diff -r 000000000000 -r 044383f39525 sbsv2/raptor/python/raptor_meta.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sbsv2/raptor/python/raptor_meta.py Tue Oct 27 16:36:35 2009 +0000 @@ -0,0 +1,3245 @@ +# +# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# This module includes classes that process bld.inf and .mmp files to +# generate Raptor build specifications +# + +import copy +import re +import os.path +import shutil +import stat +import hashlib +import base64 + +import raptor +import raptor_data +import raptor_utilities +import raptor_xml +import generic_path +import subprocess +import zipfile +from mmpparser import * + +import time + + +PiggyBackedBuildPlatforms = {'ARMV5':['GCCXML']} + +PlatformDefaultDefFileDir = {'WINSCW':'bwins', + 'ARMV5' :'eabi', + 'ARMV5SMP' :'eabi', + 'GCCXML':'eabi', + 'ARMV6':'eabi', + 'ARMV7' : 'eabi', + 'ARMV7SMP' : 'eabi'} + +def getVariantCfgDetail(aEPOCROOT, aVariantCfgFile): + """Obtain pertinent build related detail from the Symbian variant.cfg file. + + This variant.cfg file, usually located relative to $(EPOCROOT), contains: + (1) The $(EPOCROOT) relative location of the primary .hrh file used to configure the specific OS variant build + (2) A flag determining whether ARMV5 represents an ABIV1 or ABIV2 build (currently unused by Raptor).""" + + variantCfgDetails = {} + variantCfgFile = None + + try: + variantCfgFile = open(str(aVariantCfgFile)) + except IOError, (number, message): + raise MetaDataError("Could not read variant configuration file "+str(aVariantCfgFile)+" ("+message+")") + + for line in variantCfgFile.readlines(): + if re.search('^(\s$|\s*#)', line): + continue + # Note that this detection of the .hrh file matches the command line build i.e. ".hrh" somewhere + # in the specified line + elif re.search('\.hrh', line, re.I): + variantHrh = line.strip() + if variantHrh.startswith('\\') or variantHrh.startswith('/'): + variantHrh = variantHrh[1:] + variantHrh = aEPOCROOT.Append(variantHrh) + variantCfgDetails['VARIANT_HRH'] = variantHrh + else: + lineContent = line.split() + + if len(lineContent) == 1: + variantCfgDetails[lineContent.pop(0)] = 1 + else: + variantCfgDetails[lineContent.pop(0)] = lineContent + + variantCfgFile.close() + + if not variantCfgDetails.has_key('VARIANT_HRH'): + raise MetaDataError("No variant file specified in "+str(aVariantCfgFile)) + if not variantHrh.isFile(): + raise MetaDataError("Variant file "+str(variantHrh)+" does not exist") + + return variantCfgDetails + +def getOsVerFromKifXml(aPathToKifXml): + """Obtain the OS version from the kif.xml file located at $EPOCROOT/epoc32/data/kif.xml. + + If successful, the function returns a string such as "v95" to indicate 9.5; None is + returned if for any reason the function cannot determine the OS version.""" + + releaseTagName = "ki:release" + osVersion = None + + import xml.dom.minidom + + try: + # Parsed document object + kifDom = xml.dom.minidom.parse(str(aPathToKifXml)) + + # elements - the elements whose names are releaseTagName + elements = kifDom.getElementsByTagName(releaseTagName) + + # There should be exactly one of the elements whose name is releaseTagName + # If more than one, osVersion is left as None, since the version should be + # unique to the kif.xml file + if len(elements) == 1: + osVersionTemp = elements[0].getAttribute("version") + osVersion = "v" + osVersionTemp.replace(".", "") + + kifDom.unlink() # Clean up + + except: + # There's no documentation on which exceptions are raised by these functions. + # We catch everything and assume any exception means there was a failure to + # determine OS version. None is returned, and the code will fall back + # to looking at the buildinfo.txt file. + pass + + return osVersion + +def getOsVerFromBuildInfoTxt(aPathToBuildInfoTxt): + """Obtain the OS version from the buildinfo.txt file located at $EPOCROOT/epoc32/data/buildinfo.txt. + + If successful, the function returns a string such as "v95" to indicate 9.5; None is + returned if for any reason the function cannot determine the OS version. + + The file $EPOCROOT/epoc32/data/buildinfo.txt is presumed to exist. The client code should + handle existance/non-existance.""" + + pathToBuildInfoTxt = str(aPathToBuildInfoTxt) # String form version of path to buildinfo.txt + + # Open the file for reading; throw an exception if it could not be read - note that + # it should exist at this point. + try: + buildInfoTxt = open(pathToBuildInfoTxt) + except IOError, (number, message): + raise MetaDataError("Could not read buildinfo.txt file at" + pathToBuildInfoTxt + ": (" + message + ")") + + # Example buildinfo.txt contents: + # + # DeviceFamily 100 + # DeviceFamilyRev 0x900 + # ManufacturerSoftwareBuild M08765_Symbian_OS_v9.5 + # + # Regexp to match the line containing the OS version + # Need to match things like M08765_Symbian_OS_v9.5 and M08765_Symbian_OS_vFuture + # So for the version, match everything except whitespace after v. Whitespace + # signifies the end of the regexp. + osVersionMatcher = re.compile('.*_Symbian_OS_v([^\s]*)', re.I) + osVersion = None + + # Search for a regexp match over all the times in the file + # Note: if two or more lines match the search pattern then + # the latest match will overwrite the osVersion string. + for line in buildInfoTxt: + matchResult = osVersionMatcher.match(line) + if matchResult: + result = matchResult.groups() + osVersion = "v" + str(reduce(lambda x, y: x + y, result)) + osVersion = osVersion.replace(".", "") + + buildInfoTxt.close() # Clean-up + + return osVersion + +def getBuildableBldInfBuildPlatforms(aBldInfBuildPlatforms, + aDefaultOSBuildPlatforms, + aBaseDefaultOSBuildPlatforms, + aBaseUserDefaultOSBuildPlatforms): + """Obtain a set of build platform names supported by a bld.inf file + + Build platform deduction is based on both the contents of the PRJ_PLATFORMS section of + a bld.inf file together with a hard-coded set of default build platforms supported by + the build system itself.""" + + expandedBldInfBuildPlatforms = [] + removePlatforms = set() + + for bldInfBuildPlatform in aBldInfBuildPlatforms: + if bldInfBuildPlatform.upper() == "DEFAULT": + expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split()) + elif bldInfBuildPlatform.upper() == "BASEDEFAULT": + expandedBldInfBuildPlatforms.extend(aBaseDefaultOSBuildPlatforms.split()) + elif bldInfBuildPlatform.upper() == "BASEUSERDEFAULT": + expandedBldInfBuildPlatforms.extend(aBaseUserDefaultOSBuildPlatforms.split()) + elif bldInfBuildPlatform.startswith("-"): + removePlatforms.add(bldInfBuildPlatform.lstrip("-").upper()) + else: + expandedBldInfBuildPlatforms.append(bldInfBuildPlatform.upper()) + + if len(expandedBldInfBuildPlatforms) == 0: + expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split()) + + # make a set of platforms that can be built + buildableBldInfBuildPlatforms = set(expandedBldInfBuildPlatforms) + + # Add platforms that are buildable by virtue of the presence of another + for piggyBackedPlatform in PiggyBackedBuildPlatforms: + if piggyBackedPlatform in buildableBldInfBuildPlatforms: + buildableBldInfBuildPlatforms.update(PiggyBackedBuildPlatforms.get(piggyBackedPlatform)) + + # Remove platforms that were negated + buildableBldInfBuildPlatforms -= removePlatforms + + return buildableBldInfBuildPlatforms + + +def getPreProcessorCommentDetail (aPreProcessorComment): + """Takes a preprocessor comment and returns an array containing the filename and linenumber detail.""" + + commentDetail = [] + commentMatch = re.search('# (?P\d+) "(?P.*)"', aPreProcessorComment) + + if commentMatch: + filename = commentMatch.group('FILENAME') + filename = os.path.abspath(filename) + filename = re.sub(r'\\\\', r'\\', filename) + filename = re.sub(r'//', r'/', filename) + filename = generic_path.Path(filename) + linenumber = int (commentMatch.group('LINENUMBER')) + + commentDetail.append(filename) + commentDetail.append(linenumber) + + return commentDetail + + +# Classes + +class MetaDataError(Exception): + """Fatal error wrapper, to be thrown directly back to whatever is calling.""" + + def __init__(self, aText): + self.Text = aText + def __str__(self): + return repr(self.Text) + + +class PreProcessedLine(str): + """Custom string class that accepts filename and line number information from + a preprocessed context.""" + + def __new__(cls, value, *args, **keywargs): + return str.__new__(cls, value) + + def __init__(self, value, aFilename, aLineNumber): + self.filename = aFilename + self.lineNumber = aLineNumber + + def getFilename (self): + return self.filename + + def getLineNumber (self): + return self.lineNumber + +class PreProcessor(raptor_utilities.ExternalTool): + """Preprocessor wrapper suitable for Symbian metadata file processing.""" + + def __init__(self, aPreProcessor, + aStaticOptions, + aIncludeOption, + aMacroOption, + aPreIncludeOption, + aRaptor): + raptor_utilities.ExternalTool.__init__(self, aPreProcessor) + self.__StaticOptions = aStaticOptions + self.__IncludeOption = aIncludeOption + self.__MacroOption = aMacroOption + self.__PreIncludeOption = aPreIncludeOption + + self.filename = "" + self.__Macros = [] + self.__IncludePaths = [] + self.__PreIncludeFile = "" + self.raptor = aRaptor + + def call(self, aArgs, sourcefilename): + """ Override call so that we can do our own error handling.""" + tool = self._ExternalTool__Tool + try: + commandline = tool + " " + aArgs + " " + str(sourcefilename) + + # the actual call differs between Windows and Unix + if raptor_utilities.getOSFileSystem() == "unix": + p = subprocess.Popen(commandline, \ + shell=True, bufsize=65535, \ + stdin=subprocess.PIPE, \ + stdout=subprocess.PIPE, \ + stderr=subprocess.PIPE, \ + close_fds=True) + else: + p = subprocess.Popen(commandline, \ + bufsize=65535, \ + stdin=subprocess.PIPE, \ + stdout=subprocess.PIPE, \ + stderr=subprocess.PIPE, \ + universal_newlines=True) + + # run the command and wait for all the output + (self._ExternalTool__Output, errors) = p.communicate() + + if self.raptor.debugOutput: + self.raptor.Debug("Preprocessing Start %s", str(sourcefilename)) + self.raptor.Debug("Output:\n%s", self._ExternalTool__Output) + self.raptor.Debug("Errors:\n%s", errors) + self.raptor.Debug("Preprocessing End %s", str(sourcefilename)) + + incRE = re.compile("In file included from") + fromRE = re.compile(r"\s+from") + warningRE = re.compile("warning:|pasting.+token|from.+:") + remarkRE = re.compile("no newline at end of file|does not give a valid preprocessing token") + + actualErr = False + if errors != "": + for error in errors.splitlines(): + if incRE.search(error) or fromRE.search(error): + continue + if not remarkRE.search(error): + if warningRE.search(error): + self.raptor.Warn("%s: %s", tool, error) + else: + self.raptor.Error("%s: %s", tool, error) + actualErr = True + if actualErr: + raise MetaDataError("Errors in %s" % str(sourcefilename)) + + except Exception,e: + raise MetaDataError("Preprocessor exception: %s" % str(e)) + + return 0 # all OK + + def setMacros(self, aMacros): + self.__Macros = aMacros + + def addMacro(self, aMacro): + self.__Macros.append(aMacro) + + def addMacros(self, aMacros): + self.__Macros.extend(aMacros) + + def getMacros(self): + return self.__Macros + + + def addIncludePath(self, aIncludePath): + p = str(aIncludePath) + if p == "": + self.raptor.Warn("attempt to set an empty preprocessor include path for %s" % str(self.filename)) + else: + self.__IncludePaths.append(p) + + def addIncludePaths(self, aIncludePaths): + for path in aIncludePaths: + self.addIncludePath(path) + + def setIncludePaths(self, aIncludePaths): + self.__IncludePaths = [] + self.addIncludePaths(aIncludePaths) + + def setPreIncludeFile(self, aPreIncludeFile): + self.__PreIncludeFile = aPreIncludeFile + + def preprocess(self): + preProcessorCall = self.__constructPreProcessorCall() + returnValue = self.call(preProcessorCall, self.filename) + + return self.getOutput() + + def __constructPreProcessorCall(self): + + call = self.__StaticOptions + + if self.__PreIncludeFile: + call += " " + self.__PreIncludeOption + call += " " + str(self.__PreIncludeFile) + + for macro in self.__Macros: + call += " " + self.__MacroOption + macro + + for includePath in self.__IncludePaths: + call += " " + self.__IncludeOption + call += " " + str(includePath) + + return call + + +class MetaDataFile(object): + """A generic representation of a Symbian metadata file + + Symbian metadata files are subject to preprocessing, primarily with macros based + on the selected build platform. This class provides a generic means of wrapping + up the preprocessing of such files.""" + + def __init__(self, aFilename, gnucpp, aRootLocation=None, log=None): + """ + @param aFilename An MMP, bld.inf or other preprocessable build spec file + @param aDefaultPlatform Default preprocessed version of this file + @param aCPP location of GNU CPP + @param log A class with Debug(), Info() and Error() methods + """ + self.filename = aFilename + self.__RootLocation = aRootLocation + # Dictionary with key of build platform and a text string of processed output as values + self.__PreProcessedContent = {} + self.log = log + + self.__gnucpp = gnucpp + if gnucpp is None: + raise ValueError('gnucpp must be set') + + def depspath(self, platform): + """ Where does dependency information go relative to platform's SBS_BUILD_DIR? + Subclasses should redefine this + """ + return str(platform['SBS_BUILD_DIR']) + "/" + str(self.__RootLocation) + "." + platform['key_md5'] + ".d" + + def getContent(self, aBuildPlatform): + + key = aBuildPlatform['key'] + + config_macros = [] + + adepfilename = self.depspath(aBuildPlatform) + generateDepsOptions = "" + if adepfilename: + + if raptor_utilities.getOSPlatform().startswith("win"): + metatarget = "$(PARSETARGET)" + else: + metatarget = "'$(PARSETARGET)'" + generateDepsOptions = "-MD -MF%s -MT%s" % (adepfilename, metatarget) + aBuildPlatform['METADEPS'].append((adepfilename, metatarget)) + try: + os.makedirs(os.path.dirname(adepfilename)) + except Exception, e: + self.log.Debug("Couldn't make bldinf outputpath for dependency generation") + + config_macros = (aBuildPlatform['PLATMACROS']).split() + + if not key in self.__PreProcessedContent: + + preProcessor = PreProcessor(self.__gnucpp, '-undef -nostdinc ' + generateDepsOptions + ' ', + '-I', '-D', '-include', self.log) + preProcessor.filename = self.filename + + # always have the current directory on the include path + preProcessor.addIncludePath('.') + + # the SYSTEMINCLUDE directories defined in the build config + # should be on the include path. This is added mainly to support + # Feature Variation as SYSTEMINCLUDE is usually empty at this point. + systemIncludes = aBuildPlatform['SYSTEMINCLUDE'] + if systemIncludes: + preProcessor.addIncludePaths(systemIncludes.split()) + + preInclude = aBuildPlatform['VARIANT_HRH'] + + # for non-Feature Variant builds, the directory containing the HRH should + # be on the include path + if not aBuildPlatform['ISFEATUREVARIANT']: + preProcessor.addIncludePath(preInclude.Dir()) + + # and EPOCROOT/epoc32/include + preProcessor.addIncludePath(aBuildPlatform['EPOCROOT'].Append('epoc32/include')) + + # and the directory containing the bld.inf file + if self.__RootLocation is not None and str(self.__RootLocation) != "": + preProcessor.addIncludePath(self.__RootLocation) + + # and the directory containing the file we are processing + preProcessor.addIncludePath(self.filename.Dir()) + + # there is always a pre-include file + preProcessor.setPreIncludeFile(preInclude) + + macros = ["SBSV2"] + + if config_macros: + macros.extend(config_macros) + + if macros: + for macro in macros: + preProcessor.addMacro(macro + "=_____" +macro) + + # extra "raw" macros that do not need protecting + preProcessor.addMacro("__GNUC__=3") + + preProcessorOutput = preProcessor.preprocess() + + # Resurrect preprocessing replacements + pattern = r'([\\|/]| |) ?_____(('+macros[0]+')' + for macro in macros[1:]: + pattern += r'|('+macro+r')' + + pattern += r'\s*)' + # Work on all Macros in one substitution. + text = re.sub(pattern, r"\1\2", preProcessorOutput) + text = re.sub(r"\n[\t ]*", r"\n", text) + + self.__PreProcessedContent[key] = text + + return self.__PreProcessedContent[key] + +class MMPFile(MetaDataFile): + """A generic representation of a Symbian metadata file + + Symbian metadata files are subject to preprocessing, primarily with macros based + on the selected build platform. This class provides a generic means of wrapping + up the preprocessing of such files.""" + + def __init__(self, aFilename, gnucpp, bldinf, log=None): + """ + @param aFilename An MMP, bld.inf or other preprocessable build spec file + @param gnucpp location of GNU CPP + @param bldinf the bldinf file that this mmp comes from + @param log A class with Debug(), Info() and Error() methods + """ + super(MMPFile, self).__init__(aFilename, gnucpp, str(bldinf.filename.Dir()), log) + self.__bldinf = bldinf + + self.__gnucpp = gnucpp + if gnucpp is None: + raise ValueError('gnucpp must be set') + + def depspath(self, platform): + """ Where does dependency information go relative to platform's SBS_BUILD_DIR? + Subclasses should redefine this + """ + return self.__bldinf.outputpath(platform) + "/" + self.filename.File() + '.' + platform['key_md5'] + ".d" + +class Export(object): + """Single processed PRJ_EXPORTS or PRJ_TESTEXPORTS entry from a bld.inf file""" + + def getPossiblyQuotedStrings(cls,spec): + """ Split a string based on whitespace + but keep double quoted substrings together. + """ + inquotes=False + intokengap=False + sourcedest=[] + word = 0 + for c in spec: + if c == '"': + if inquotes: + inquotes = False + word += 1 + intokengap = True + else: + inquotes = True + intokengap = False + pass + elif c == ' ' or c == '\t': + if inquotes: + if len(sourcedest) == word: + sourcedest.append(c) + else: + sourcedest[word] += c + else: + if intokengap: + # gobble unquoted spaces + pass + else: + word += 1 + intokengap=True + pass + else: + intokengap = False + if len(sourcedest) == word: + sourcedest.append(c) + else: + sourcedest[word] += c + + return sourcedest + + getPossiblyQuotedStrings = classmethod(getPossiblyQuotedStrings) + + + def __init__(self, aBldInfFile, aExportsLine, aType): + """ + Rules from the OS library for convenience: + + For PRJ_TESTEXPORTS + source_file_1 [destination_file] + source_file_n [destination_file] + If the source file is listed with a relative path, the path will + be considered relative to the directory containing the bld.inf file. + If a destination file is not specified, the source file will be copied + to the directory containing the bld.inf file. + If a relative path is specified with the destination file, the path + will be considered relative to directory containing the bld.inf file. + + For PRJ_EXPORTS + source_file_1 [destination_file] + source_file_n [destination_file] + :zip zip_file [destination_path] + + Note that: + If a source file is listed with a relative path, the path will be + considered relative to the directory containing the bld.inf file. + + If a destination file is not specified, the source file will be copied + to epoc32\include\. + + If a destination file is specified with the relative path, the path will + be considered relative to directory epoc32\include\. + + If a destination begins with a drive letter, then the file is copied to + epoc32\data\\. For example, + + mydata.dat e:\appdata\mydata.dat + copies mydata.dat to epoc32\data\e\appdata\mydata.dat. + You can use any driveletter between A and Z. + + A line can start with the preface :zip. This instructs the build tools + to unzip the specified zip file to the specified destination path. If a + destination path is not specified, the source file will be unzipped in + the root directory. + + + """ + + # Work out what action is required - unzip or copy? + action = "copy" + typematch = re.match(r'^\s*(?P:zip\s+)?(?P[^\s].*[^\s])\s*$',aExportsLine, re.I) + + spec = typematch.group('spec') + if spec == None: + raise ValueError('must specify at least a source file for an export') + + if typematch.group('type') is not None: + action = "unzip" + + # Split the spec into source and destination but take care + # to allow filenames with quoted strings. + exportEntries = Export.getPossiblyQuotedStrings(spec) + + # Get the source path as specified by the bld.inf + source_spec = exportEntries.pop(0).replace(' ','%20') + + # Resolve the source file + sourcepath = generic_path.Path(raptor_utilities.resolveSymbianPath(str(aBldInfFile), source_spec)) + + # Find it if the case of the filename is wrong: + # Carry on even if we don't find it + foundfile = sourcepath.FindCaseless() + if foundfile != None: + source = str(foundfile).replace(' ','%20') + else: + source = str(sourcepath).replace(' ','%20') + + + # Get the destination path as specified by the bld.inf + if len(exportEntries) > 0: + dest_spec = exportEntries.pop(0).replace(' ','%20') + else: + dest_spec = None + # Destination list - list of destinations. For the WINSCW resource building stage, + # files exported to the emulated drives and there are several locations, for example, + # PRJ_[TEST]EXPORTS + # 1234ABCD.SPD z:/private/10009876/policy/1234ABCD.spd + # needs to end up copied in + # epoc32/data/z/private/10009876/policy/1234ABCD.spd *and* in + # epoc32/release/winscw/udeb/z/private/10009876/policy/1234ABCD.spd *and* in + # epoc32/release/winscw/urel/z/private/10009876/policy/1234ABCD.spd + dest_list = [] + + # Resolve the destination if one is specified + if dest_spec: + # check for troublesome characters + if ':' in dest_spec and not re.search('^[a-z]:', dest_spec, re.I): + raise ValueError("invalid filename " + dest_spec) + + dest_spec = dest_spec.replace(' ','%20') + aSubType="" + if action == "unzip": + aSubType=":zip" + dest_spec = dest_spec.rstrip("\\/") + + # Get the export destination(s) - note this can be a list of strings or just a string. + dest_list = raptor_utilities.resolveSymbianPath(str(aBldInfFile), dest_spec, aType, aSubType) + + def process_dest(aDest): + if dest_spec.endswith('/') or dest_spec.endswith('\\'): + m = generic_path.Path(source) + aDest += '/'+m.File() + return aDest + + if isinstance(dest_list, list): + # Process each file in the list + dest_list = map(process_dest, dest_list) + else: + # Process the single destination + dest_list = process_dest(dest_list) + + else: + # No destination was specified so we assume an appropriate one + + dest_filename=generic_path.Path(source).File() + + if aType == "PRJ_EXPORTS": + if action == "copy": + destination = '$(EPOCROOT)/epoc32/include/'+dest_filename + elif action == "unzip": + destination = '$(EPOCROOT)' + elif aType == "PRJ_TESTEXPORTS": + d = aBldInfFile.Dir() + if action == "copy": + destination = str(d.Append(dest_filename)) + elif action == "unzip": + destination = "$(EPOCROOT)" + else: + raise ValueError("Export type should be 'PRJ_EXPORTS' or 'PRJ_TESTEXPORTS'. It was: "+str(aType)) + + + self.__Source = source + if len(dest_list) > 0: # If the list has length > 0, this means there are several export destinations. + self.__Destination = dest_list + else: # Otherwise the list has length zero, so there is only a single export destination. + self.__Destination = destination + self.__Action = action + + def getSource(self): + return self.__Source + + def getDestination(self): + return self.__Destination # Note that this could be either a list, or a string, depending on the export destination + + def getAction(self): + return self.__Action + +class ExtensionmakefileEntry(object): + def __init__(self, aGnuLine, aBldInfFile, tmp): + + self.__BldInfFile = aBldInfFile + bldInfLocation = self.__BldInfFile.Dir() + biloc = str(bldInfLocation) + extInfLocation = tmp.filename.Dir() + eiloc = str(extInfLocation) + + if eiloc is None or eiloc == "": + eiloc="." # Someone building with a relative raptor path + if biloc is None or biloc == "": + biloc="." # Someone building with a relative raptor path + + self.__StandardVariables = {} + # Relative step-down to the root - let's try ignoring this for now, as it + # should amount to the same thing in a world where absolute paths are king + self.__StandardVariables['TO_ROOT'] = "" + # Top-level bld.inf location + self.__StandardVariables['TO_BLDINF'] = biloc + self.__StandardVariables['EXTENSION_ROOT'] = eiloc + + # Get the directory and filename from the full path containing the extension makefile + self.__FullPath = generic_path.Join(eiloc,aGnuLine) + self.__FullPath = self.__FullPath.GetLocalString() + self.__Filename = os.path.split(self.__FullPath)[1] + self.__Directory = os.path.split(self.__FullPath)[0] + + def getMakefileName(self): + return self.__Filename + + def getMakeDirectory(self): + return self.__Directory + + def getStandardVariables(self): + return self.__StandardVariables + +class Extension(object): + """Single processed PRJ_EXTENSIONS or PRJ_TESTEXTENSIONS START EXTENSIONS...END block + from a bld.inf file""" + + def __init__(self, aBldInfFile, aStartLine, aOptionLines, aBuildPlatform, aRaptor): + self.__BldInfFile = aBldInfFile + self.__Options = {} + self.interface = "" + self.__Raptor = aRaptor + + makefile = "" + makefileMatch = re.search(r'^\s*START EXTENSION\s+(?P\S+)\s*(?P\S*)$', aStartLine, re.I) + + self.__RawMakefile = "" + + if (makefileMatch): + self.__RawMakefile = makefileMatch.group('MAKEFILE') + self.nametag = makefileMatch.group('NAMETAG').lower() + + # Ensure all \'s are translated into /'s if required + self.interface = self.__RawMakefile + self.interface = self.interface.replace("\\", "/").replace("/", ".") + + # To support standalone testing, '$(' prefixed TEMs are assumed to start with + # a makefile variable and hence be fully located in FLM operation + if self.__RawMakefile.startswith("$("): + self.__Makefile = self.__RawMakefile + ".mk" + else: + self.__Makefile = '$(MAKEFILE_TEMPLATES)/' + self.__RawMakefile + ".mk" + + for optionLine in aOptionLines: + optionMatch = re.search(r'^\s*(OPTION\s+)?(?P\S+)\s+(?P\S+.*)$',optionLine, re.I) + if optionMatch: + self.__Options[optionMatch.group('VARIABLE').upper()] = optionMatch.group('VALUE') + + bldInfLocation = self.__BldInfFile.Dir() + + biloc = str(bldInfLocation) + if biloc is None or biloc == "": + biloc="." # Someone building with a relative raptor path + + extInfLocation = aStartLine.filename.Dir() + + eiloc = str(extInfLocation) + if eiloc is None or eiloc == "": + eiloc="." # Someone building with a relative raptor path + + self.__StandardVariables = {} + # Relative step-down to the root - let's try ignoring this for now, as it + # should amount to the same thing in a world where absolute paths are king + self.__StandardVariables['TO_ROOT'] = "" + # Top-level bld.inf location + self.__StandardVariables['TO_BLDINF'] = biloc + # Location of bld.inf file containing the current EXTENSION block + self.__StandardVariables['EXTENSION_ROOT'] = eiloc + + # If the interface exists, this means it's not a Template Extension Makefile so don't look for a .meta file for it; + # so do nothing if it's not a template extension makefile + try: + self.__Raptor.cache.FindNamedInterface(str(self.interface), aBuildPlatform['CACHEID']) + except KeyError: # This means that this Raptor doesn't have the interface self.interface, so we are in a TEM + # Read extension meta file and get default options from it. The use of TEM meta file is compulsory if TEM is used + metaFilename = "%s/epoc32/tools/makefile_templates/%s.meta" % (aBuildPlatform['EPOCROOT'], self.__RawMakefile) + metaFile = None + try: + metaFile = open(metaFilename, "r") + except IOError, e: + self.__warn("Extension: %s - cannot open Meta file: %s" % (self.__RawMakefile, metaFilename)) + + if metaFile: + for line in metaFile.readlines(): + defaultOptionMatch = re.search(r'^OPTION\s+(?P\S+)\s+(?P\S+.*)$',line, re.I) + if defaultOptionMatch and defaultOptionMatch.group('VARIABLE').upper() not in self.__Options.keys(): + self.__Options[defaultOptionMatch.group('VARIABLE').upper()] = defaultOptionMatch.group('VALUE') + + metaFile.close() + + def __warn(self, format, *extras): + if (self.__Raptor): + self.__Raptor.Warn(format, *extras) + + def getIdentifier(self): + return re.sub (r'\\|\/|\$|\(|\)', '_', self.__RawMakefile) + + def getMakefile(self): + return self.__Makefile + + def getOptions(self): + return self.__Options + + def getStandardVariables(self): + return self.__StandardVariables + +class MMPFileEntry(object): + def __init__(self, aFilename, aTestOption, aARMOption): + self.filename = aFilename + self.testoption = aTestOption + if aARMOption: + self.armoption = True + else: + self.armoption = False + + +class BldInfFile(MetaDataFile): + """Representation of a Symbian bld.inf file""" + + def __init__(self, aFilename, gnucpp, log=None): + MetaDataFile.__init__(self, aFilename, gnucpp, None, log) + self.__Raptor = log + self.testManual = 0 + self.testAuto = 0 + # Generic + + def getBuildPlatforms(self, aBuildPlatform): + platformList = [] + + for platformLine in self.__getSection(aBuildPlatform, 'PRJ_PLATFORMS'): + for platformEntry in platformLine.split(): + platformList.append(platformEntry) + + return platformList + + # Build Platform Specific + def getMMPList(self, aBuildPlatform, aType="PRJ_MMPFILES"): + mmpFileList=[] + gnuList = [] + makefileList = [] + extFound = False + m = None + + hashValue = {'mmpFileList': [] , 'gnuList': [], 'makefileList' : []} + + for mmpFileEntry in self.__getSection(aBuildPlatform, aType): + + actualBldInfRoot = mmpFileEntry.getFilename() + n = re.match('\s*(?P(GNUMAKEFILE|N?MAKEFILE))\s+(?P[^ ]+)\s*(support|manual)?\s*(?P\S+.*)?\s*$',mmpFileEntry,re.I) + if n: + + if (n.groupdict()['invalid']): + self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), n.groupdict()['invalid']) + if raptor_utilities.getOSFileSystem() == "unix": + self.log.Warn("NMAKEFILE/GNUMAKEFILE/MAKEFILE keywords not supported on Linux") + else: + extmakefilearg = n.groupdict()['extmakefile'] + bldInfDir = actualBldInfRoot.Dir() + extmakefilename = bldInfDir.Append(extmakefilearg) + extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, mmpFileEntry) + + if (n.groupdict()['makefiletype']).upper() == "GNUMAKEFILE": + gnuList.append(extmakefile) + else: + makefileList.append(extmakefile) + else: + # Currently there is only one possible option - build as arm. + # For TESTMMPFILES, the supported options are support, tidy, ignore, manual and build as arm + if aType.upper()=="PRJ_TESTMMPFILES": + if re.match('\s*(?P[^ ]+)\s*(?Pbuild_as_arm)?\s*(?Psupport)?\s*(?Pignore)?\s*(?Ptidy)?\s*(?Pmanual)?\s*(?P\S+.*)?\s*$', mmpFileEntry, re.I): + m = re.match('\s*(?P[^ ]+)\s*(?Pbuild_as_arm)?\s*(?Psupport)?\s*(?Pignore)?\s*(?Ptidy)?\s*(?Pmanual)?\s*(?P\S+.*)?\s*$', mmpFileEntry, re.I) + else: + if re.match('\s*(?P[^ ]+)\s*(?Pbuild_as_arm)?\s*(?P\S+.*)?\s*$', mmpFileEntry, re.I): + m = re.match('\s*(?P[^ ]+)\s*(?Pbuild_as_arm)?\s*(?P\S+.*)?\s*$', mmpFileEntry, re.I) + + if m: + if (m.groupdict()['invalid']): + self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), m.groupdict()['invalid']) + + mmpFileName = m.groupdict()['name'] + testmmpoption = "auto" # Setup tests to be automatic by default + tokens = m.groupdict() + for key,item in tokens.iteritems(): + if key=="manual" and item=="manual": + testmmpoption = "manual" + elif key=="support" and item=="support": + testmmpoption = "support" + elif key=="ignore" and item=="ignore": + testmmpoption = "ignore" + + buildasarm = False + if m.groupdict()['baa']: + if m.groupdict()['baa'].lower() == 'build_as_arm': + buildasarm = True + + if not mmpFileName.lower().endswith('.mmp'): + mmpFileName += '.mmp' + bldInfDir = actualBldInfRoot.Dir() + try: + mmpFileName = bldInfDir.Append(mmpFileName) + mmpfe = MMPFileEntry(mmpFileName, testmmpoption, buildasarm) + mmpFileList.append(mmpfe) + except ValueError, e: + self.log.Error("invalid .mmp file name: %s" % str(e)) + + m = None + + + hashValue['mmpFileList'] = mmpFileList + hashValue['gnuList'] = gnuList + hashValue['makefileList'] = makefileList + + return hashValue + + # Return a list of gnumakefiles used in the bld.inf + def getExtensionmakefileList(self, aBuildPlatform, aType="PRJ_MMPFILES",aString = ""): + extMakefileList=[] + m = None + for extmakeFileEntry in self.__getSection(aBuildPlatform, aType): + + actualBldInfRoot = extmakeFileEntry.filename + if aType.upper()=="PRJ_TESTMMPFILES": + m = re.match('\s*GNUMAKEFILE\s+(?P[^ ]+)\s*(?Psupport)?\s*(?Pignore)?\s*(?Ptidy)?\s*(?Pmanual)?\s*(?P\S+.*)?\s*$',extmakeFileEntry,re.I) + else: + if aString == "gnumakefile": + m = re.match('\s*GNUMAKEFILE\s+(?P[^ ]+)\s*(?P\S+.*)?\s*$',extmakeFileEntry,re.I) + elif aString == "nmakefile": + m = re.match('\s*NMAKEFILE\s+(?P[^ ]+)\s*(?P\S+.*)?\s*$',extmakeFileEntry,re.I) + elif aString == "makefile": + m = re.match('\s*MAKEFILE\s+(?P[^ ]+)\s*(?P\S+.*)?\s*$',extmakeFileEntry,re.I) + if m: + if (m.groupdict()['invalid']): + self.log.Error("%s (%d) : invalid extension makefile qualifier \"%s\"", extmakeFileEntry.filename, extmakeFileEntry.getLineNumber(), m.groupdict()['invalid']) + + extmakefilearg = m.groupdict()['extmakefile'] + bldInfDir = actualBldInfRoot.Dir() + extmakefilename = bldInfDir.Append(extmakefilearg) + extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, extmakeFileEntry) + extMakefileList.append(extmakefile) + m = None + + return extMakefileList + + def getTestExtensionmakefileList(self,aBuildPlatform,aString=""): + return self.getExtensionmakefileList(aBuildPlatform,"PRJ_TESTMMPFILES",aString) + + def getTestMMPList(self, aBuildPlatform): + return self.getMMPList(aBuildPlatform, "PRJ_TESTMMPFILES") + + def getRomTestType(self, aBuildPlatform): + testMMPList = self.getTestMMPList(aBuildPlatform) + for testMMPFileEntry in testMMPList['mmpFileList']: + if aBuildPlatform["TESTCODE"]: + # Calculate test type (manual or auto) + if testMMPFileEntry.testoption == "manual": + self.testManual += 1 + if not (testMMPFileEntry.testoption == "support" or testMMPFileEntry.testoption == "manual" or testMMPFileEntry.testoption == "ignore"): + self.testAuto += 1 + if self.testManual and self.testAuto: + return 'BOTH' + elif self.testAuto: + return 'AUTO' + elif self.testManual: + return 'MANUAL' + else: + return 'NONE' + + def getExports(self, aBuildPlatform, aType="PRJ_EXPORTS"): + exportList = [] + + for exportLine in self.__getSection(aBuildPlatform, aType): + + if not re.match(r'\S+', exportLine): + continue + + try: + exportList.append(Export(exportLine.getFilename(), exportLine, aType)) + except ValueError,e: + self.log.Error(str(e)) + + return exportList + + def getTestExports(self, aBuildPlatform): + return self.getExports(aBuildPlatform, "PRJ_TESTEXPORTS") + + def getExtensions(self, aBuildPlatform, aType="PRJ_EXTENSIONS"): + extensionObjects = [] + start = "" + options = [] + + for extensionLine in self.__getSection(aBuildPlatform, aType): + if (re.search(r'^\s*START ',extensionLine, re.I)): + start = extensionLine + elif re.search(r'^\s*END\s*$',extensionLine, re.I): + extensionObjects.append(Extension(self.filename, start, options, aBuildPlatform, self.__Raptor)) + start = "" + options = [] + elif re.search(r'^\s*$',extensionLine, re.I): + continue + elif start: + options.append(extensionLine) + + return extensionObjects + + def getTestExtensions(self, aBuildPlatform): + return self.getExtensions(aBuildPlatform, "PRJ_TESTEXTENSIONS") + + def __getSection(self, aBuildPlatform, aSection): + + activeSection = False + sectionContent = [] + lineContent = re.split(r'\n', self.getContent(aBuildPlatform)); + + currentBldInfFile = self.filename + currentLineNumber = 0 + + for line in lineContent: + if line.startswith("#"): + commentDetail = getPreProcessorCommentDetail(line) + currentBldInfFile = commentDetail[0] + currentLineNumber = commentDetail[1]-1 + continue + + currentLineNumber += 1 + + if not re.match(r'.*\S+', line): + continue + elif re.match(r'\s*' + aSection + r'\s*$', line, re.I): + activeSection = True + elif re.match(r'\s*PRJ_\w+\s*$', line, re.I): + activeSection = False + elif activeSection: + sectionContent.append(PreProcessedLine(line, currentBldInfFile, currentLineNumber)) + + return sectionContent + + @staticmethod + def outputPathFragment(bldinfpath): + """Return a relative path that uniquely identifies this bldinf file + whilst being short so that it can be appended to epoc32/build. + The build product of a particular bld.inf may be placed in here. + This affects its TEMs and its MMPs""" + + absroot_str = os.path.abspath(str(bldinfpath)).lower().replace("\\","/") + + uniqueid = hashlib.md5() + uniqueid.update(absroot_str) + + specnamecomponents = (re.sub("^[A-Za-z]:", "", absroot_str)).split('/') # split, removing any drive identifier (if present) + + pathlist=[] + while len(specnamecomponents) > 0: + top = specnamecomponents.pop() + if top.endswith('.inf'): + continue + elif top == 'group': + continue + else: + pathlist = [top] + break + + pathlist.append("c_"+uniqueid.hexdigest()[:16]) + return "/".join(pathlist) + + def outputpath(self, platform): + """ The full path where product from this bldinf is created.""" + return str(platform['SBS_BUILD_DIR']) + "/" + BldInfFile.outputPathFragment(self.filename) + + def depspath(self, platform): + """ Where does dependency information go relative to platform's SBS_BUILD_DIR? + Subclasses should redefine this + """ + return self.outputpath(platform) + "/bldinf." + platform['key_md5'] + ".d" + + + +class MMPRaptorBackend(MMPBackend): + """A parser "backend" for the MMP language + + This is used to map recognised MMP syntax onto a buildspec """ + + # Support priorities, with case-fixed mappings for use + epoc32priorities = { + 'low':'Low', + 'background':'Background', + 'foreground':'Foreground', + 'high':'High', + 'windowserver':'WindowServer', + 'fileserver':'FileServer', + 'realtimeserver':'RealTimeServer', + 'supervisor':'SuperVisor' + } + + # Known capability flags with associated bitwise operations + supportedCapabilities = { + 'tcb':(1<<0), + 'commdd':(1<<1), + 'powermgmt':(1<<2), + 'multimediadd':(1<<3), + 'readdevicedata':(1<<4), + 'writedevicedata':(1<<5), + 'drm':(1<<6), + 'trustedui':(1<<7), + 'protserv':(1<<8), + 'diskadmin':(1<<9), + 'networkcontrol':(1<<10), + 'allfiles':(1<<11), + 'swevent':(1<<12), + 'networkservices':(1<<13), + 'localservices':(1<<14), + 'readuserdata':(1<<15), + 'writeuserdata':(1<<16), + 'location':(1<<17), + 'surroundingsdd':(1<<18), + 'userenvironment':(1<<19), + # Old capability names have zero value + 'root':0, + 'mediadd':0, + 'readsystemdata':0, + 'writesystemdata':0, + 'sounddd':0, + 'uidd':0, + 'killanyprocess':0, + 'devman':0, + 'phonenetwork':0, + 'localnetwork':0 + } + + library_re = re.compile(r"^(?P[^{]+?)(?P{(?P[0-9]+)\.(?P[0-9]+)})?(\.(lib|dso))?$",re.I) + + + def __init__(self, aRaptor, aMmpfilename, aBldInfFilename): + super(MMPRaptorBackend,self).__init__() + self.platformblock = None + self.__Raptor = aRaptor + self.BuildVariant = raptor_data.Variant() + self.ResourceVariants = [] + self.BitmapVariants = [] + self.StringTableVariants = [] + self.__bldInfFilename = aBldInfFilename + self.__targettype = "UNKNOWN" + self.__currentMmpFile = aMmpfilename + self.__defFileRoot = self.__currentMmpFile + self.__currentLineNumber = 0 + self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, "") + self.__userinclude = "" + self.__systeminclude = "" + self.__bitmapSourcepath = self.__sourcepath + self.__current_resource = "" + self.__capabilities = [] + self.__resourceFiles = [] + self.__pageConflict = [] + self.__debuggable = "" + self.sources = [] + + self.__TARGET = "" + self.__TARGETEXT = "" + self.deffile = "" + self.__LINKAS = "" + self.nostrictdef = False + self.featureVariant = False + + self.__currentResourceVariant = None + self.__currentStringTableVariant = None + self.__explicitversion = False + self.__versionhex = "" + + # "ALL" capability calculated based on the total capabilities currently supported + allCapabilities = 0 + for supportedCapability in MMPRaptorBackend.supportedCapabilities.keys(): + allCapabilities = allCapabilities | MMPRaptorBackend.supportedCapabilities[supportedCapability] + MMPRaptorBackend.supportedCapabilities['all'] = allCapabilities + + # Permit unit-testing output without a Raptor context + def __debug(self, format, *extras): + if (self.__Raptor): + self.__Raptor.Debug(format, *extras) + + def __warn(self, format, *extras): + if (self.__Raptor): + self.__Raptor.Warn(format, *extras) + + def doPreProcessorComment(self,s,loc,toks): + commentDetail = getPreProcessorCommentDetail(toks[0]) + self.__currentMmpFile = commentDetail[0].GetLocalString() + self.__currentLineNumber = commentDetail[1] + self.__debug("Current file %s, line number %s\n" % (self.__currentMmpFile,str(self.__currentLineNumber))) + return "OK" + + def doBlankLine(self,s,loc,toks): + self.__currentLineNumber += 1 + + def doStartPlatform(self,s,loc,toks): + self.__currentLineNumber += 1 + self.__debug( "Start Platform block "+toks[0]) + self.platformblock = toks[0] + return "OK" + + def doEndPlatform(self,s,loc,toks): + self.__currentLineNumber += 1 + self.__debug( "Finalise platform " + self.platformblock) + return "OK" + + def doSetSwitch(self,s,loc,toks): + self.__currentLineNumber += 1 + prefix="" + varname = toks[0].upper() + + # A bright spark made the optionname the same as + # the env variable. One will override the other if we pass this + # on to make. Add a prefix to prevent the clash. + if varname=='ARMINC': + prefix="SET_" + self.__debug( "Set switch "+toks[0]+" ON") + self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1")) + + elif varname=='NOSTRICTDEF': + self.nostrictdef = True + self.__debug( "Set switch "+toks[0]+" ON") + self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1")) + + elif varname == 'PAGED': + self.BuildVariant.AddOperation(raptor_data.Set(varname, "1")) + self.__debug( "Set switch PAGE ON") + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged")) + self.__debug( "Set switch PAGEDCODE ON") + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged")) + self.__debug( "Set data PAGEDDATA ON") + self.__pageConflict.append("PAGEDCODE") + self.__pageConflict.append("PAGEDDATA") + + elif varname == 'UNPAGED': + self.BuildVariant.AddOperation(raptor_data.Set("PAGED", "0")) + self.__debug( "Set switch PAGED OFF") + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged")) + self.__debug( "Set switch PAGEDCODE OFF") + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged")) + self.__debug( "Set data PAGEDDATA OFF") + self.__pageConflict.append("UNPAGEDCODE") + self.__pageConflict.append("UNPAGEDDATA") + + elif varname == 'PAGEDCODE': + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged")) + self.__debug( "Set switch " + varname + " ON") + self.__pageConflict.append(varname) + + elif varname == 'PAGEDDATA': + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged")) + self.__debug( "Set switch " + varname + " ON") + self.__pageConflict.append(varname) + + elif varname == 'UNPAGEDCODE': + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged")) + self.__debug( "Set switch " + varname + " ON") + self.__pageConflict.append(varname) + elif varname == 'UNPAGEDDATA': + self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged")) + self.__debug( "Set switch " + varname + " ON") + self.__pageConflict.append(varname) + + elif varname == 'NOLINKTIMECODEGENERATION': + self.BuildVariant.AddOperation(raptor_data.Set("LTCG","")) + self.__debug( "Set switch " + varname + " OFF") + elif varname == 'NOMULTIFILECOMPILATION': + self.BuildVariant.AddOperation(raptor_data.Set("MULTIFILE_ENABLED","")) + self.__debug( "Set switch " + varname + " OFF") + + elif varname == 'DEBUGGABLE': + if self.__debuggable != "udeb": + self.__debuggable = "udeb urel" + else: + self.__Raptor.Warn("DEBUGGABLE keyword ignored as DEBUGGABLE_UDEBONLY is already specified") + elif varname == 'DEBUGGABLE_UDEBONLY': + if self.__debuggable != "": + self.__Raptor.Warn("DEBUGGABLE keyword has no effect as DEBUGGABLE or DEBUGGABLE_UDEBONLY is already set") + self.__debuggable = "udeb" + elif varname == 'FEATUREVARIANT': + self.BuildVariant.AddOperation(raptor_data.Set(varname,"1")) + self.featureVariant = True + else: + self.__debug( "Set switch "+toks[0]+" ON") + self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1")) + + return "OK" + + def doAssignment(self,s,loc,toks): + self.__currentLineNumber += 1 + varname = toks[0].upper() + if varname=='TARGET': + (self.__TARGET, self.__TARGETEXT) = os.path.splitext(toks[1]) + self.__TARGETEXT = self.__TARGETEXT.lstrip('.') + + self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", self.__TARGETEXT.lower())) + + lowercase_TARGET = self.__TARGET.lower() + self.__debug("Set "+toks[0]+" to " + lowercase_TARGET) + self.__debug("Set REQUESTEDTARGETEXT to " + self.__TARGETEXT.lower()) + + self.BuildVariant.AddOperation(raptor_data.Set("TARGET", self.__TARGET)) + self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower", lowercase_TARGET)) + if lowercase_TARGET != self.__TARGET: + self.__debug("TARGET is not lowercase: '%s' - might cause BC problems." % self.__TARGET) + elif varname=='TARGETTYPE': + self.__debug("Set "+toks[0]+" to " + str(toks[1])) + self.__targettype=toks[1] + if self.__targettype.lower() == "none": + self.BuildVariant.AddOperation(raptor_data.Set("TARGET", "")) + self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower","")) + self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", "")) + self.BuildVariant.AddOperation(raptor_data.Set(varname,toks[1].lower())) + + elif varname=='TARGETPATH': + value = toks[1].lower().replace('\\','/') + self.__debug("Set "+varname+" to " + value) + self.BuildVariant.AddOperation(raptor_data.Set(varname, value)) + + elif varname=='OPTION' or varname=='LINKEROPTION': + self.__debug("Set "+toks[1]+varname+" to " + str(toks[2])) + self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper()," ".join(toks[2]))) + + # Warn about OPTION ARMASM + if "armasm" in toks[1].lower(): + self.__Raptor.Warn(varname+" ARMASM has no effect (use OPTION ARMCC).") + + elif varname=='OPTION_REPLACE': + # Warn about OPTION_REPLACE ARMASM + if "armasm" in toks[1].lower(): + self.__Raptor.Warn("OPTION_REPLACE ARMASM has no effect (use OPTION_REPLACE ARMCC).") + else: + args = " ".join(toks[2]) + + searchReplacePairs = self.resolveOptionReplace(args) + + for searchReplacePair in searchReplacePairs: + self.__debug("Append %s to OPTION_REPLACE_%s", searchReplacePair, toks[1].upper()) + self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper(),searchReplacePair)) + + elif varname=='SYSTEMINCLUDE' or varname=='USERINCLUDE': + for path in toks[1]: + resolved = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, path) + self.BuildVariant.AddOperation(raptor_data.Append(varname,resolved)) + + if varname=='SYSTEMINCLUDE': + self.__systeminclude += ' ' + resolved + self.__debug(" %s = %s",varname, self.__systeminclude) + else: + self.__userinclude += ' ' + resolved + self.__debug(" %s = %s",varname, self.__userinclude) + + self.__debug("Appending %s to %s",resolved, varname) + + self.__systeminclude = self.__systeminclude.strip() + self.__systeminclude = self.__systeminclude.rstrip('\/') + self.__userinclude = self.__userinclude.strip() + self.__userinclude = self.__userinclude.rstrip('\/') + + elif varname=='EXPORTLIBRARY': + # Remove extension from the EXPORTLIBRARY name + libName = toks[1].rsplit(".", 1)[0] + self.__debug("Set "+varname+" to " + libName) + self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(libName))) + + elif varname=='CAPABILITY': + for cap in toks[1]: + self.BuildVariant.AddOperation(raptor_data.Append(varname,cap," ")) + self.__debug("Setting "+toks[0]+": " + cap) + self.__capabilities.append(cap.lower()) + elif varname=='DEFFILE': + self.__defFileRoot = self.__currentMmpFile + self.deffile = toks[1] + elif varname=='LINKAS': + self.__debug("Set "+toks[0]+" OPTION to " + str(toks[1])) + self.__LINKAS = toks[1] + self.BuildVariant.AddOperation(raptor_data.Set(varname, toks[1])) + elif varname=='SECUREID' or varname=='VENDORID': + hexoutput = MMPRaptorBackend.canonicalUID(toks[1]) + self.__debug("Set "+toks[0]+" OPTION to " + hexoutput) + self.BuildVariant.AddOperation(raptor_data.Set(varname, hexoutput)) + elif varname=='VERSION': + if toks[-1] == "EXPLICIT": + self.__explicitversion = True + self.BuildVariant.AddOperation(raptor_data.Set("EXPLICITVERSION", "1")) + + vm = re.match(r'^(\d+)(\.(\d+))?$', toks[1]) + if vm is not None: + version = vm.groups() + # the major version number + major = int(version[0],10) + + # add in the minor number + minor = 0 + if len(version) > 1: + minor = int(version[2],10) + + self.__versionhex = "%04x%04x" % (major, minor) + self.BuildVariant.AddOperation(raptor_data.Set(varname, "%d.%d" %(major, minor))) + self.BuildVariant.AddOperation(raptor_data.Set(varname+"HEX", self.__versionhex)) + self.__debug("Set "+toks[0]+" OPTION to " + toks[1]) + self.__debug("Set "+toks[0]+"HEX OPTION to " + "%04x%04x" % (major,minor)) + + else: + self.__Raptor.Warn("Invalid version supplied to VERSION (%s), using default value" % toks[1]) + + elif varname=='EPOCHEAPSIZE': + # Standardise on sending hex numbers to the FLMS. + + if toks[1].lower().startswith('0x'): + min = long(toks[1],16) + else: + min = long(toks[1],10) + + if toks[2].lower().startswith('0x'): + max = long(toks[2],16) + else: + max = long(toks[2],10) + + self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN", "%x" % min)) + self.__debug("Set "+varname+"MIN OPTION to '%x' (hex)" % min ) + self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX", "%x" % max)) + self.__debug("Set "+varname+"MAX OPTION to '%x' (hex)" % max ) + + # Some toolchains require decimal versions of the min/max values, converted to KB and + # rounded up to the next 1KB boundary + min_dec_kb = (int(min) + 1023) / 1024 + max_dec_kb = (int(max) + 1023) / 1024 + self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN_DEC_KB", "%d" % min_dec_kb)) + self.__debug("Set "+varname+"MIN OPTION KB to '%d' (dec)" % min_dec_kb ) + self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX_DEC_KB", "%d" % max_dec_kb)) + self.__debug("Set "+varname+"MAX OPTION KB to '%d' (dec)" % max_dec_kb ) + + elif varname=='EPOCSTACKSIZE': + if toks[1].lower().startswith('0x'): + stack = long(toks[1],16) + else: + stack = long(toks[1],10) + self.BuildVariant.AddOperation(raptor_data.Set(varname, "%x" % stack)) + self.__debug("Set "+varname+" OPTION to '%x' (hex)" % stack ) + elif varname=='EPOCPROCESSPRIORITY': + # low, background, foreground, high, windowserver, fileserver, realtimeserver or supervisor + # These are case insensitive in metadata entries, but must be mapped to a static case pattern for use + prio = toks[1].lower() + + # NOTE: Original validation here didn't actually work. This has been corrected to provide an error, but probably needs re-examination. + if not MMPRaptorBackend.epoc32priorities.has_key(prio): + self.__Raptor.Error("Priority setting '%s' is not a valid priority - should be one of %s.", prio, MMPRaptorBackend.epoc32priorities.values()) + else: + self.__debug("Set "+toks[0]+" to " + MMPRaptorBackend.epoc32priorities[prio]) + self.BuildVariant.AddOperation(raptor_data.Set(varname,MMPRaptorBackend.epoc32priorities[prio])) + elif varname=='ROMTARGET' or varname=='RAMTARGET': + if len(toks) == 1: + self.__debug("Set "+toks[0]+" to " ) + self.BuildVariant.AddOperation(raptor_data.Set(varname,"")) + else: + toks1 = str(toks[1]).replace("\\","/") + if toks1.find(","): + toks1 = re.sub("[,'\[\]]", "", toks1).replace("//","/") + self.__debug("Set "+toks[0]+" to " + toks1) + self.BuildVariant.AddOperation(raptor_data.Set(varname,toks1)) + + else: + self.__debug("Set "+toks[0]+" to " + str(toks[1])) + self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(toks[1]))) + + if varname=='LINKAS': + self.__LINKAS = toks[1] + + return "OK" + + def doAppend(self,s,loc,toks): + self.__currentLineNumber += 1 + """MMP command + """ + name=toks[0].upper() + if len(toks) == 1: + # list can be empty e.g. MACRO _FRED_ when fred it defined in the HRH + # causes us to see just "MACRO" in the input - it is valid to ignore this + self.__debug("Empty append list for " + name) + return "OK" + self.__debug("Append to "+name+" the values: " +str(toks[1])) + + if name=='MACRO': + name='MMPDEFS' + elif name=='LANG': + # don't break the environment variable + name='LANGUAGES' + + for item in toks[1]: + if name=='MMPDEFS': + # Unquote any macros since the FLM does it anyhow + if item.startswith('"') and item.endswith('"') \ + or item.startswith("'") and item.endswith("'"): + item = item.strip("'\"") + if name=='LIBRARY' or name=='DEBUGLIBRARY': + im = MMPRaptorBackend.library_re.match(item) + if not im: + self.__error("LIBRARY: %s Seems to have an invalid name.\nExpected xxxx.lib or xxxx.dso\n where xxxx might be\n\tname or \n\tname(n,m) where n is a major version number and m is a minor version number\n" %item) + d = im.groupdict() + + item = d['name'] + if d['version'] is not None: + item += "{%04x%04x}" % (int(d['major']), int(d['minor'])) + item += ".dso" + elif name=='STATICLIBRARY': + # the FLM will decide on the ending appropriate to the platform + item = re.sub(r"^(.*)\.[Ll][Ii][Bb]$",r"\1", item) + elif name=="LANGUAGES": + item = item.lower() + elif (name=="WIN32_LIBRARY" and (item.startswith(".") or re.search(r'[\\|/]',item))) \ + or (name=="WIN32_RESOURCE"): + # Relatively pathed win32 libraries, and all win32 resources, are resolved in relation + # to the wrapper bld.inf file in which their .mmp file is specified. This equates to + # the current working directory in ABLD operation. + item = raptor_utilities.resolveSymbianPath(self.__bldInfFilename, item) + + self.BuildVariant.AddOperation(raptor_data.Append(name,item," ")) + + # maintain a debug library list, the same as LIBRARY but with DEBUGLIBRARY values + # appended as they are encountered + if name=='LIBRARY' or name=='DEBUGLIBRARY': + self.BuildVariant.AddOperation(raptor_data.Append("LIBRARY_DEBUG",item," ")) + + return "OK" + + def canonicalUID(number): + """ convert a UID string into an 8 digit hexadecimal string without leading 0x """ + if number.lower().startswith("0x"): + n = int(number,16) + else: + n = int(number,10) + + return "%08x" % n + + canonicalUID = staticmethod(canonicalUID) + + def doUIDAssignment(self,s,loc,toks): + """A single UID command results in a number of spec variables""" + self.__currentLineNumber += 1 + + hexoutput = MMPRaptorBackend.canonicalUID(toks[1][0]) + self.__debug( "Set UID2 to %s" % hexoutput ) + self.BuildVariant.AddOperation(raptor_data.Set("UID2", hexoutput)) + + if len(toks[1]) > 1: + hexoutput = MMPRaptorBackend.canonicalUID(toks[1][1]) + self.__debug( "Set UID3 to %s" % hexoutput) + self.BuildVariant.AddOperation(raptor_data.Set("UID3", hexoutput)) + + self.__debug( "done set UID") + return "OK" + + def doSourcePathAssignment(self,s,loc,toks): + self.__currentLineNumber += 1 + self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, toks[1]) + self.__debug( "Remembering self.sourcepath state: "+str(toks[0])+" is now " + self.__sourcepath) + self.__debug("selfcurrentMmpFile: " + self.__currentMmpFile) + return "OK" + + + def doSourceAssignment(self,s,loc,toks): + self.__currentLineNumber += 1 + self.__debug( "Setting "+toks[0]+" to " + str(toks[1])) + for file in toks[1]: + # file is always relative to sourcepath but some MMP files + # have items that begin with a slash... + file = file.lstrip("/") + source = generic_path.Join(self.__sourcepath, file) + + # If the SOURCEPATH itself begins with a '/', then dont look up the caseless version, since + # we don't know at this time what $(EPOCROOT) will evaluate to. + if source.GetLocalString().startswith('$(EPOCROOT)'): + self.sources.append(str(source)) + self.__debug("Append SOURCE " + str(source)) + + else: + foundsource = source.FindCaseless() + if foundsource == None: + # Hope that the file will be generated later + self.__debug("Sourcefile not found: %s" % source) + foundsource = source + + self.sources.append(str(foundsource)) + self.__debug("Append SOURCE " + str(foundsource)) + + + self.__debug(" sourcepath: " + self.__sourcepath) + return "OK" + + # Resource + + def doOldResourceAssignment(self,s,loc,toks): + # Technically deprecated, but still used, so... + self.__currentLineNumber += 1 + self.__debug("Processing old-style "+toks[0]+" "+str(toks[1])) + + sysRes = (toks[0].lower() == "systemresource") + + for rss in toks[1]: + variant = raptor_data.Variant() + + source = generic_path.Join(self.__sourcepath, rss) + variant.AddOperation(raptor_data.Set("SOURCE", str(source))) + self.__resourceFiles.append(str(source)) + + target = source.File().rsplit(".", 1)[0] # remove the extension + variant.AddOperation(raptor_data.Set("TARGET", target)) + variant.AddOperation(raptor_data.Set("TARGET_lower", target.lower())) + + header = target.lower() + ".rsg" # filename policy + variant.AddOperation(raptor_data.Set("HEADER", header)) + + if sysRes: + dsrtp = self.getDefaultSystemResourceTargetPath() + variant.AddOperation(raptor_data.Set("TARGETPATH", dsrtp)) + + self.ResourceVariants.append(variant) + + return "OK" + + def getDefaultSystemResourceTargetPath(self): + # the default systemresource TARGETPATH value should come from the + # configuration rather than being hard-coded here. Then again, this + # should really be deprecated away into oblivion... + return "system/data" + + + def getDefaultResourceTargetPath(self, targettype): + # the different default TARGETPATH values should come from the + # configuration rather than being hard-coded here. + if targettype == "plugin": + return "resource/plugins" + if targettype == "pdl": + return "resource/printers" + return "" + + def resolveOptionReplace(self, content): + """ + Constructs search/replace pairs based on .mmp OPTION_REPLACE entries for use on tool command lines + within FLMS. + + Depending on what's supplied to OPTION_REPLACE , the core part of the command line + in the relevant FLM will have search and replace actions performed on it post-expansion (but pre- + any OPTION additions). + + In terms of logic, we try to follow what ABLD does, as the current behaviour is undocumented. + What happens is a little inconsistent, and best described by some generic examples: + + OPTION_REPLACE TOOL existing_option replacement_value + + Replace all instances of "option existing_value" with "option replacement_value" + + OPTION_REPLACE TOOL existing_option replacement_option + + Replace all instances of "existing_option" with "replacement_option". + + If "existing_option" is present in isolation then a removal is performed. + + Any values encountered that don't follow an option are ignored. + Options are identified as being prefixed with either '-' or '--'. + + The front-end processes each OPTION_REPLACE entry and then appends one or more search/replace pairs + to an OPTION_REPLACE_ variable in the following format: + + search<->replace + """ + # Note that, for compatibility reasons, the following is mostly a port to Python of the corresponding + # ABLD Perl, and hence maintains ABLD's idiosyncrasies in what it achieves + + searchReplacePairs = [] + matches = re.findall("-{1,2}\S+\s*(?!-)\S*",content) + + if matches: + # reverse so we can process as a stack whilst retaining original order + matches.reverse() + + while (len(matches)): + match = matches.pop() + + standaloneMatch = re.match('^(?P