Merge wip
authorraptorbot <raptorbot@systemstesthead.symbian.intra>
Sun, 17 Jan 2010 23:00:39 +0000
branchwip
changeset 127 874022828cfd
parent 126 7309affc5a05 (diff)
parent 112 0a90a68583c7 (current diff)
child 128 d7a63891e2e6
Merge
--- a/sbsv2/raptor/RELEASE-NOTES.txt	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/RELEASE-NOTES.txt	Sun Jan 17 23:00:39 2010 +0000
@@ -1,6 +1,37 @@
 
 Release Notes for Symbian Build System v2
 
+Next version
+
+- Combine Linking and Postlinking into a single step
+- New 'Patchable Constants' target type: TARGETTYPE pdll
+- Support exit codes from next version of the trace compiler which will issue them
+- New sbs_filter script and batchfile to ease the use of sbs_filter.py.  
+    Allows filters to be executed over a log after a build has been done.
+    e.g.
+  	sbs_filter --filters=FilterWhat < logfile.log 
+    (This runs a "--what" without regenerating any makefiles or reparsing
+    the matadata.)
+- New (beta) FilterWhatComp filter.  Simulates abld log output for
+   use with parse_what.pl for packing up zips by component.  Whatcomp output
+   uses the incoming epocroot value.  i.e. if epocroot is relative then so is
+   the what output.  e.g. if EPOCROOT=\   then the output will be of the form
+   "\epoc32\release\armv5\...."  If it's "..\myepocroot" then the output will
+   be "..\myepocroot\epoc32\release\armv5".  If it's absolute then the what
+   output will also be absolute.
+- New FilterCheck filter.  This can be used with sbs_filter to perform the 
+   equivalent of --check using the log output from a build.  It is more
+   efficient than --check because the metadata is not parsed and no makefiles
+   are generated. e.g.
+  	sbs_filter --filters=FilterCheck < logfile.log 
+- New (beta) graphical build visualisation tool (bin/timelines.py). Requires pygame
+   and PyOpenGL. e.g.  python timelines.py < filename.log
+- New (beta) log analyser (recipestats.py) for recording the total time spent in 
+   each type of recipe in the build. e.g. python recipestats.py < filename.log > stats.csv
+   The output is in CSV format.  
+
+
+
 version 2.11.3
 
 Defect Fixes:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/bin/recipestats.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# 
+# display summary information about recipes from raptor logs
+# e.g. total times and so on.
+
+import time
+
+class RecipeStats(object):
+	STAT_OK = 0
+
+
+	def __init__(self):
+		self.stats = {}
+		self.failcount = 0
+		self.failtime = 0.0
+		self.failtypes = {}
+		self.retryfails = 0
+		
+	def add(self, starttime, duration, name, status):
+		if status != RecipeStats.STAT_OK:
+			self.failcount += 1
+			if name in self.failtypes:
+				self.failtypes[name] += 1
+			else:
+				self.failtypes[name] = 1
+
+			if status == 128:
+				self.retryfails += 1
+			return
+			
+		if name in self.stats:
+			(count, time) = self.stats[name]
+			self.stats[name] = (count + 1, time + duration)
+		else:
+			self.stats[name] = (1,duration)
+
+	def recipe_csv(self):
+		s = "# name, time, count\n"
+		for (name,(count,time)) in self.stats.iteritems():
+			s += '"%s",%s,%d\n' % (name, str(time), count)
+		return s
+
+
+
+import sys
+import re
+
+def main():
+
+	f = sys.stdin
+	st = RecipeStats()
+
+	recipe_re = re.compile(".*<recipe name='([^']+)'.*")
+	time_re = re.compile(".*<time start='([0-9]+\.[0-9]+)' *elapsed='([0-9]+\.[0-9]+)'.*")
+	status_re = re.compile(".*<status exit='(?P<exit>(ok|failed))'( *code='(?P<code>[0-9]+)')?.*")
+
+	alternating = 0
+	start_time = 0.0
+
+	
+	for l in f.xreadlines():
+		l2 = l.rstrip("\n\r")
+		rm = recipe_re.match(l2)
+
+		if rm is not None:
+			rname = rm.groups()[0]
+			continue
+
+
+		tm = time_re.match(l2)
+		if tm is not None:
+			try:
+				s = float(tm.groups()[0])
+				elapsed = float(tm.groups()[1])
+
+				if start_time == 0.0:
+					start_time = s
+
+				s -= start_time
+
+				continue
+			except ValueError, e:
+				raise Exception("Parse problem: float conversion on these groups: %s\n%s" %(str(tm.groups()), str(e)))
+		else:
+			if l2.find("<time") is not -1:
+				raise Exception("unparsed timing status: %s\n"%l2)
+
+		sm = status_re.match(l2)
+
+		if sm is None:
+			continue
+
+		if sm.groupdict()['exit'] == 'ok':
+			status = 0
+		else:
+			status = int(sm.groupdict()['code'])
+
+		st.add(s, elapsed, rname, status)
+
+	print st.recipe_csv()
+
+
+if __name__ == '__main__': main()
--- a/sbsv2/raptor/bin/sbs	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/bin/sbs	Sun Jan 17 23:00:39 2010 +0000
@@ -79,7 +79,6 @@
 	__PYTHON__=$($u "$__PYTHON__")
 
 	export SBS_HOME=$($u "$SBS_HOME")
-	export EPOCROOT=$($u "$EPOCROOT")
 
 	export PATH=${__MINGW__}/bin:${__CYGWIN__}/bin:$SBS_HOME/$HOSTPLATFORM_DIR/bin:$PATH
 
--- a/sbsv2/raptor/bin/sbs.bat	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/bin/sbs.bat	Sun Jan 17 23:00:39 2010 +0000
@@ -43,7 +43,11 @@
 @IF "%__CYGWIN__%"=="" SET __CYGWIN__=%SBS_HOME%\win32\cygwin
 
 @REM add to the search path
-@SET PATH=%__MINGW__%\bin;%__CYGWIN__%\bin;%SBS_HOME%\win32\bin;%PATH%
+@REM (make sure that we don't get into trouble if there are Path and PATH variables)
+@SET PATH_TEMP=%__MINGW__%\bin;%__CYGWIN__%\bin;%SBS_HOME%\win32\bin;%PATH%
+@SET PATH=
+@SET PATH=%PATH_TEMP%
+@SET PATH_TEMP=
 
 @REM Make sure that /tmp is not set incorrectly for sbs
 @umount -u /tmp >NUL  2>NUL
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/bin/sbs_filter	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,91 @@
+#!/bin/bash
+# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# raptor script
+# add mingw to the PATH if we are running Cygwin on Windows
+#
+
+# If SBS_HOME is not set in the environment then work it out
+# from the path to this batch file
+if [ -z "$SBS_HOME" ] ; then
+	temp=$0
+	SBS_HOME=$(cd ${temp%/*} && echo $PWD)
+	export SBS_HOME=${SBS_HOME%/bin}
+fi
+
+# Ensure that the host type is set for Raptor:
+eval $($SBS_HOME/bin/gethost.sh -e)
+
+if [ -z "$HOSTPLATFORM" ]; then
+	echo "Error: HOSTPLATFORM could not be determined." 1>&2
+	exit 1
+fi
+
+if [ ! -d "$SBS_HOME/$HOSTPLATFORM_DIR" ]; then
+cat 1>&2 <<EOERROR
+Error: sbs has not been installed with support for your platform: "${HOSTPLATFORM}".
+EOERROR
+	exit 1
+fi
+
+if [ "$OSTYPE" == "cygwin" ]; then
+
+	SBS_HOME=${SBS_HOME//\\//}
+
+	__MINGW__=${SBS_MINGW:-$SBS_HOME/$HOSTPLATFORM_DIR/mingw}
+	__CYGWIN__=${SBS_CYGWIN:-$SBS_HOME/$HOSTPLATFORM_DIR/cygwin}
+	__PYTHON__=${SBS_PYTHON:-$SBS_HOME/$HOSTPLATFORM_DIR/python252/python.exe}
+	export PYTHONPATH=${SBS_PYTHONPATH:-$SBS_HOME/$HOSTPLATFORM_DIR/python252}
+
+    # Command for unifying path strings. For example, "c:\some\path" and
+    # "/cygdrive/c/some/path" will both be converted into "c:/some/path".
+	u="$__CYGWIN__/bin/cygpath.exe -m"
+
+	__MINGW__=$($u "$__MINGW__")
+	__CYGWIN__=$($u "$__MINGW__")
+	__PYTHON__=$($u "$__PYTHON__")
+
+	export SBS_HOME=$($u "$SBS_HOME")
+
+	export PATH=${__MINGW__}/bin:${__CYGWIN__}/bin:$SBS_HOME/$HOSTPLATFORM_DIR/bin:$PATH
+
+	# Tell Cygwin not to map unix security attributes to windows to
+	# prevent raptor from potentially creating read-only files:
+	export CYGWIN='nontsec nosmbntsec'
+
+else
+	export PYTHONPATH=${SBS_PYTHONPATH:-$SBS_HOME/$HOSTPLATFORM_DIR/python262/lib}
+	PATH=$SBS_HOME/$HOSTPLATFORM_DIR/python262/bin:$SBS_HOME/$HOSTPLATFORM_DIR/bin:$PATH
+	LD_LIBRARY_PATH=$SBS_HOME/$HOSTPLATFORM_DIR/python262/lib:$SBS_HOME/$HOSTPLATFORM_DIR/bv/lib:$LD_LIBRARY_PATH
+
+	export PATH LD_LIBRARY_PATH
+	__PYTHON__=python
+fi
+
+
+# call sbs_filter.py with the arguments
+
+FILTER_START="$SBS_HOME/bin/sbs_filter.py"
+
+if [ -e "$FILTER_START" ]; then
+	# run the source version
+	${__PYTHON__} "$FILTER_START" "$@"
+elif [ -e "$FILTER_START"c ]; then
+	# run the compiled version
+	${__PYTHON__} "$FILTER_START"c "$@"
+else
+	echo "Cannot start sbs_filter - $FILTER_START not found." 1>&2
+	echo "Check your SBS_HOME environment variable." 1>&2
+fi
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/bin/sbs_filter.bat	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,47 @@
+@rem
+@rem Copyright (c) 2005-2009 Nokia Corporation and/or its subsidiary(-ies).
+@rem All rights reserved.
+@rem This component and the accompanying materials are made available
+@rem under the terms of the License "Eclipse Public License v1.0"
+@rem which accompanies this distribution, and is available
+@rem at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@rem
+@rem Initial Contributors:
+@rem Nokia Corporation - initial contribution.
+@rem
+@rem Contributors:
+@rem
+@rem Description: 
+@rem
+
+@REM Automatically find SBS_HOME if it is not set
+@IF NOT "%SBS_HOME%"==""  goto foundhome
+@SET RAPTORBINDIR=%~dp0
+@SET WD=%cd%
+@cd %RAPTORBINDIR%\..
+@SET SBS_HOME=%cd%
+@cd %WD%
+:foundhome 
+
+@REM Use the cygwin set by the environment if possible
+@SET __CYGWIN__=%SBS_CYGWIN%
+@IF "%__CYGWIN__%"=="" SET __CYGWIN__=%SBS_HOME%\win32\cygwin
+
+@REM add to the search path
+@SET PATH=%__CYGWIN__%\bin;%PATH%
+
+@REM Make sure that /tmp is not set incorrectly for sbs
+@umount -u /tmp >NUL  2>NUL
+@mount -u %TEMP% /tmp >NUL 2>NUL
+@umount -u / >NUL  2>NUL
+@mount -u %__CYGWIN__% / >NUL 2>NUL
+
+@REM Tell CYGWIN not to map unix security attributes to windows to
+@REM prevent raptor from potentially creating read-only files:
+@set CYGWIN=nontsec nosmbntsec
+
+@REM Run with all the arguments.
+@bash %SBS_HOME%\bin\sbs_filter %*
+
+@ENDLOCAL
+@cmd /c exit /b %ERRORLEVEL%
--- a/sbsv2/raptor/bin/sbs_filter.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/bin/sbs_filter.py	Sun Jan 17 23:00:39 2010 +0000
@@ -61,22 +61,29 @@
 	the_raptor.out.open(raptor_params, the_raptor.filterList.split(','), pbox)
 	
 except Exception, e:
-	sys.stderr.write("filter exception: %s\n" % str(e))
-	traceback.print_ex()
+	sys.stderr.write("error: problem while creating filters %s\n" % str(e))
+	traceback.print_exc()
 	sys.exit(1)
 		
 # read stdin a line at a time and pass it to the Raptor object
-line = " "
-while line:
-	line = sys.stdin.readline()
-	the_raptor.out.write(line)
+try:
+	line = " "
+	while line:
+		line = sys.stdin.readline()
+		the_raptor.out.write(line)
+except:
+	sys.stderr.write("error: problem while filtering: %s\n" % str(e))
+	traceback.print_exc()
+	sys.exit(1)
 
-# from Raptor.CloseLog()
-if not the_raptor.out.summary():
-	the_raptor.errorCode = 1
+the_raptor_errorCode = 0
+
+# Print the summary (this can't return errors)
+the_raptor.out.summary()
 	
 if not the_raptor.out.close():
-	the_raptor.errorCode = 1
+	print "BADCLOSE"
+	the_raptor.errorCode = 2
 	
 # return the error code
 sys.exit(the_raptor.errorCode)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/bin/timelines.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,224 @@
+#
+# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# 
+# Raptor log visualisation program. Takes a raptor log as standard input
+# and displays timelines that represent build progress and 
+# how much actual parallelism there is in the build.
+# This program requires the pygame and PyOpenGL modules.
+
+from OpenGL.GL import *
+from OpenGL.GLU import *
+import pygame
+from pygame.locals import *
+import time
+
+class Timeline(object):
+	"""A bar representing a number of recipes which were executed in 
+	   time sequence.  There is no guarantee about what host but in 
+	   theory they could have been executed on the same host."""
+
+	globalmax = 2.0
+
+	def __init__(self,ylevel):
+		self.maxtime = 0.0
+		self.recipes = []
+		self.ylevel = ylevel
+
+	def append(self, recipe):
+		"" add this recipe to this timeline if it happens after the latest recipe already in the timeline ""
+		if recipe.starttime + recipe.duration > self.maxtime:
+			self.maxtime = recipe.starttime + recipe.duration
+			if self.maxtime > Timeline.globalmax:
+				Timeline.globalmax = self.maxtime 
+		else:
+			pass
+
+		self.recipes.append(recipe)
+
+	def draw(self):
+		glLoadIdentity()
+		self.xscale = 4.0 / Timeline.globalmax
+
+    		glTranslatef(-2.0, -1.5, -6.0)
+		count = 0
+		for r in self.recipes:
+			if count % 2 == 0:
+				coloff=0.8
+			else:
+				coloff = 1.0
+
+			count += 1
+			r.draw(self.xscale, self.ylevel, coloff)
+
+class Recipe(object):
+	"""Represents a task completed in a raptor build. 
+	   Drawn as a colour-coded bar with different 
+	   colours for the various recipe types."""
+	STAT_OK = 0
+	colours = {
+		'compile': (0.5,0.5,1.0),
+		'compile2object': (0.5,0.5,1.0),
+		'win32compile2object': (0.5,0.5,1.0),
+		'tools2linkexe': (0.5,1.0,0.5),
+		'link': (0.5,1.0,0.5),
+		'linkandpostlink': (0.5,1.0,0.5),
+		'win32stageonelink': (0.5,1.0,0.5),
+		'tools2lib': (0.5,1.0,1.0),
+		'win32stagetwolink': (1.0,0.1,1.0),
+		'postlink': (1.0,0.5,1.0)
+		}
+
+	def __init__(self, starttime, duration, name, status):
+		self.starttime = starttime
+		self.duration = duration
+		self.status = status
+		self.colour = (1.0, 1.0, 1.0)
+		if name in Recipe.colours:
+			self.colour = Recipe.colours[name]
+		else:
+			self.colour = (1.0,1.0,1.0)
+		self.name = name 
+
+	def draw(self, scale, ylevel, coloff):
+		if self.status == Recipe.STAT_OK:
+			glColor4f(self.colour[0]*coloff, self.colour[1]*coloff, self.colour[2]*coloff,0.2)
+		else:
+			glColor4f(1.0*coloff, 0.6*coloff, 0.6*coloff,0.2)
+
+
+		x = self.starttime * scale
+		y = ylevel
+		x2 = x + self.duration * scale
+		y2 = ylevel + 0.2
+		glBegin(GL_QUADS)
+		glVertex3f(x, y, 0)
+		glVertex3f(x, y2, 0)
+		glVertex3f(x2, y2, 0)
+		glVertex3f(x2, y, 0)
+		glEnd()
+
+
+def resize((width, height)):
+	if height==0:
+		height=1
+	glViewport(0, 0, width, height)
+	glMatrixMode(GL_PROJECTION)
+	glLoadIdentity()
+	gluPerspective(45, 1.0*width/height, 0.1, 100.0)
+	glMatrixMode(GL_MODELVIEW)
+	glLoadIdentity()
+
+def init():
+	glShadeModel(GL_SMOOTH)
+	glClearColor(0.0, 0.0, 0.0, 0.0)
+	glClearDepth(1.0)
+	glEnable(GL_DEPTH_TEST)
+	glDepthFunc(GL_LEQUAL)
+	glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST)
+
+
+import sys
+import re
+
+def main():
+
+	video_flags = OPENGL|DOUBLEBUF
+
+	pygame.init()
+	pygame.display.set_mode((800,600), video_flags)
+
+	resize((800,600))
+	init()
+
+	frames = 0
+	ticks = pygame.time.get_ticks()
+
+
+	lines = 4
+	timelines = []
+	ylevel = 0.0
+	for i in xrange(0,4):
+		ylevel += 0.6 
+		timelines.append(Timeline(ylevel))
+
+	f = sys.stdin
+
+	recipe_re = re.compile(".*<recipe name='([^']+)'.*")
+	time_re = re.compile(".*<time start='([0-9]+\.[0-9]+)' *elapsed='([0-9]+\.[0-9]+)'.*")
+	status_re = re.compile(".*<status exit='([^']*)'.*")
+
+	alternating = 0
+	start_time = 0.0
+
+	
+	for l in f.xreadlines():
+		l2 = l.rstrip("\n")
+		rm = recipe_re.match(l2)
+
+		if rm is not None:
+			rname = rm.groups()[0]
+			continue
+
+
+		tm = time_re.match(l2)
+		if tm is not None:
+			s = float(tm.groups()[0])
+			elapsed = float(tm.groups()[1])
+
+			if start_time == 0.0:
+				start_time = s
+
+			s -= start_time
+
+			continue
+
+		sm = status_re.match(l2)
+
+		if sm is None:
+			continue
+
+		if sm.groups()[0] == 'ok':
+			status = 0
+		else:
+			status = int(sm.groups()[0])
+
+		olddiff = 999999999.0
+		tnum = 0
+		for t in timelines:
+			newdiff = s - t.maxtime
+			if newdiff < 0.0:
+				continue
+			if olddiff > newdiff:
+				dest_timeline = t
+				olddiff = newdiff
+			tnum += 1
+
+		dest_timeline.append(Recipe(s, elapsed, rname, status))
+		event = pygame.event.poll()
+		if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
+			break
+
+		glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
+		for t in timelines:
+			t.draw()
+		pygame.display.flip()
+
+		frames = frames+1
+
+	print "fps:  %de" % ((frames*1000)/(pygame.time.get_ticks()-ticks))
+	event = pygame.event.wait()
+
+
+if __name__ == '__main__': main()
--- a/sbsv2/raptor/lib/config/interfaces.xml	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/config/interfaces.xml	Sun Jan 17 23:00:39 2010 +0000
@@ -5,12 +5,13 @@
 	<var name="default.interfaces">
 		<!-- interfaces corresponding to target types -->
 
-		<set name="INTERFACE_TYPES" value="exe stdexe ext_makefile dll stddll lib stdlib export extension ani plugin textnotifier2 implib var var2 exexp kexe kdll kext klib ldd pdd pdl fsy resource none stringtable bitmap"/>
+		<set name="INTERFACE_TYPES" value="exe stdexe ext_makefile dll stddll lib stdlib export extension ani plugin textnotifier2 implib var var2 exexp kexe kdll kext klib pdll ldd pdd pdl fsy resource none stringtable bitmap"/>
 		<set name="INTERFACE.exe" value="Symbian.exe"/>
 		<set name="INTERFACE.stdexe" value="Symbian.stdexe"/>
 		<set name="INTERFACE.stddll" value="Symbian.stddll"/>
 		<set name="INTERFACE.stdlib" value="Symbian.stdlib"/>
 		<set name="INTERFACE.dll" value="Symbian.dll"/>
+		<set name="INTERFACE.pdll" value="Symbian.pdll"/>
 		<set name="INTERFACE.lib" value="Symbian.lib"/>
 		<set name="INTERFACE.ani" value="Symbian.ani"/>
 		<set name="INTERFACE.plugin" value="Symbian.plugin"/>
--- a/sbsv2/raptor/lib/config/variants.xml	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/config/variants.xml	Sun Jan 17 23:00:39 2010 +0000
@@ -187,7 +187,6 @@
 		<set name="OWN_LIBRARY_OPTION" value="--library_interface=aeabi_clib"/>
 		<set name="RELOCATABLE_IMAGE_OPTION" value=""/>
 		<set name="SPLIT_OPTION" value=""/>
-		<set name="STDLIB_OPTION" value=""/>
 		<set name="PLATMACROS.VAR" value="ARMCC_4 ARMCC_4_0"/>
 		<set name="ARMMACROS.VAR" value="__ARMCC_4__ __ARMCC_4_0__"/>
 		<set name="SYMBIAN_LINK_FLAGS.VAR" value="--override_visibility"/>
--- a/sbsv2/raptor/lib/config/winscw.xml	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/config/winscw.xml	Sun Jan 17 23:00:39 2010 +0000
@@ -26,6 +26,7 @@
 		<set name="INTERFACE.exe" value="Emulator.exe"/>
 		<set name="INTERFACE.stdexe" value="Emulator.stdexe"/>
 		<set name="INTERFACE.dll" value="Emulator.dll"/>
+		<set name="INTERFACE.pdll" value="Emulator.pdll"/>
 		<set name="INTERFACE.stddll" value="Emulator.stddll"/>
 		<set name="INTERFACE.exexp" value="Emulator.exexp"/>
 		<set name="INTERFACE.fsy" value="Emulator.fsy"/>
--- a/sbsv2/raptor/lib/flm/e32abiv2.flm	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/e32abiv2.flm	Sun Jan 17 23:00:39 2010 +0000
@@ -384,7 +384,24 @@
 # Generating the import library is enough if TARGETTYPE=implib #############
 
 ifneq ($(DOPOSTLINK),)
-include $(FLMHOME)/e32postlink.mk
+# Capabilities
+ADDED_CAPABILITIES:=$(subst $(CHAR_SPACE),+,$(filter-out -%,$(CAPABILITY)))
+SUBTRACTED_CAPABILITIES:=$(subst $(CHAR_SPACE),,$(filter -%,$(CAPABILITY)))
+FINAL_CAPABILITIES:=$(if $(ADDED_CAPABILITIES),$(ADDED_CAPABILITIES)$(SUBTRACTED_CAPABILITIES),NONE)
+
+# Paging options for the old postlinker
+POSTLINKER_PAGEDOPTION:=--defaultpaged
+ifeq ($(PAGED),1)
+  POSTLINKER_PAGEDOPTION:=--paged
+endif
+ifeq ($(PAGED),0)
+  POSTLINKER_PAGEDOPTION:=--unpaged
+endif
+
+CLEANTARGETS:=$(CLEANTARGETS) $(E32TARGET)
+CLEANTARGETS:=$(CLEANTARGETS) $(GENERATED_DEFFILE)
+CLEANTARGETS:=$(CLEANTARGETS) $(GENERATED_DSO)
+
 endif # ifneq ($(DOPOSTLINK),)
 
 ifneq ($(TARGETTYPE),implib)
@@ -592,10 +609,10 @@
 # outside the relevant tags but it is also unavoidable.
 define linktarget_func
 ## The actual link target, dependencies and build step
-$(LINK_TARGET): $(if $(MULTIFILE_ENABLED),$(MULTIFILEOBJECT) $(CIAFILES_LINKOBJECTS),$(LINKOBJECTS)) $(escaped_e32abiv2_LIBS) $(LINKER_ENTRYPOINT_LIBDEP) $(if $(SUPPORTS_STDCPP_NEWLIB),$(CHECKLIB)) $(if $(LINKERFEEDBACK_STAGE2),$(FEEDBACKFILE),)
+$(E32TARGET): $(POSTLINKDEFFILE) $(ELF2E32) $(if $(MULTIFILE_ENABLED),$(MULTIFILEOBJECT) $(CIAFILES_LINKOBJECTS),$(LINKOBJECTS)) $(escaped_e32abiv2_LIBS) $(LINKER_ENTRYPOINT_LIBDEP) $(if $(SUPPORTS_STDCPP_NEWLIB),$(CHECKLIB)) $(if $(LINKERFEEDBACK_STAGE2),$(FEEDBACKFILE),) $(if $(HAVE_ORDERONLY),|,) $(EPOCROOT)/epoc32/build/TEM_LIB
 	$(if $(MULTIFILE_ENABLED),,@echo -n "" > $(VIAFILE);
 	$(call groupin10,$(LINKOBJECTS)) ;)
-	$(call startrule,link) \
+	$(call startrule,linkandpostlink) \
 	$(if $(PERTURBSTARTTIME),$(RANSLEEP) $(PERTURBMSECS) ;,) \
 	$(if $(SUPPORTS_STDCPP_NEWLIB),$(if $(located_STATICLIBRARIES),$(CHECKLIB) $(CHECKLIB_TYPE) --elf $(call dblquote,$(located_STATICLIBRARIES)) &&,),) \
 	$(LD) $(LINKER_MISC_FLAGS) $(LINKER_DEFAULT_LIB_PATHS) $(SYMBIAN_LINK_FLAGS) $(if $(DEBUG_INFO),$(LINKER_DEBUG_OPTION),$(LINKER_NODEBUG_OPTION)) \
@@ -605,19 +622,61 @@
 	  $(LINKER_ARCH_OPTION) \
 	  $(SYMVER_OPTION) $(SO_NAME_OPTION)=$(call dblquote,$(LINKASVERSIONED)) \
 	  $(LINKER_ENTRYPOINT_SETTING) \
-	  -o $$(call dblquote,$$@) \
+	  -o $$(call dblquote,$(LINK_TARGET)) \
 	  $(if $(LTCG),$(LTCG_OPTION),) \
 	  $(LINKER_SYMBOLS_OPTION) $(LINKER_SYMBOLS_FILE_OPTION)=$(call dblquote,$(MAPFILE)) \
   	  $(LINKEROPTION) \
 	  $(if $(MULTIFILE_ENABLED),$(call dblquote,$(MULTIFILEOBJECT) $(CIAFILES_LINKOBJECTS)),$(COMMANDFILE_OPTION)$(call dblquote,$(VIAFILE))) \
-          $(if $(GENERATELINKERFEEDBACK),$(FEEDBACK_OPTION)$(call dblquote,$(FEEDBACKFILE))) \
+      $(if $(GENERATELINKERFEEDBACK),$(FEEDBACK_OPTION)$(call dblquote,$(FEEDBACKFILE))) \
 	  $(if $(LINKER_ADD_STATIC_RUNTIME),$(if $(STATIC_RUNTIME_LIB),$(LINKER_GROUP_START_OPTION) $(STATIC_RUNTIME_DIR)/$(STATIC_RUNTIME_LIB) $(LINKER_GROUP_END_OPTION),)) \
-	  $(quoted_e32abiv2_LIBS) $(LINKER_DEFAULT_LIBS)\
-	$(call endrule,link)
+	  $(quoted_e32abiv2_LIBS) $(LINKER_DEFAULT_LIBS) && \
+	  $(ELF2E32) \
+	  --sid=0x$(if $(SID),$(SID),$(if $(UID3),$(UID3),0)) \
+	  --version=$(VERSION) \
+	  --capability=$(FINAL_CAPABILITIES) \
+	  --linkas=$(call dblquote,$(LINKASVERSIONED)) \
+	  --fpu=$(if $(ARMFPU),$(ARMFPU),$(POSTLINKER_FPU_DEFAULT)) \
+	  --targettype=$(POSTLINKTARGETTYPE) \
+	  --output=$$(call dblquote,$$@) \
+	  --elfinput=$(call dblquote,$(LINK_TARGET)) \
+	  $(if $(UID1),--uid1=0x$(UID1),) \
+	  $(if $(UID2),--uid2=0x$(UID2),) \
+	  $(if $(UID3),--uid3=0x$(UID3),) \
+	  $(if $(VENDORID),--vid=0x$(VENDORID),) \
+	  $(if $(EXPTARGET),--customdlltarget,) \
+	  $(if $(ARMLIBS),--excludeunwantedexports,) \
+	  $(if $(EPOCALLOWDLLDATA),--dlldata,) \
+	  $(if $(EPOCPROCESSPRIORITY),--priority=$(EPOCPROCESSPRIORITY),) \
+	  $(if $(EPOCSTACKSIZE),--stack=0x$(EPOCSTACKSIZE),) \
+	  $(if $(EPOCHEAPSIZEMIN),--heap=0x$(EPOCHEAPSIZEMIN)$(CHAR_COMMA)0x$(EPOCHEAPSIZEMAX),) \
+	  $(if $(EPOCFIXEDPROCESS),--fixedaddress,) \
+	  $(if $(EPOCDATALINKADDRESS),--datalinkaddress=$(EPOCDATALINKADDRESS),) \
+	  $(if $(NAMEDSYMLKUP),--namedlookup,) \
+	  $(if $(SMPSAFE),--smpsafe,) \
+	  $(if $(POSTLINKDEFFILE),--definput=$(POSTLINKDEFFILE),) \
+	  $(if $(EXPORTUNFROZEN),--unfrozen,) \
+	  $(if $(AUTOEXPORTS),--sysdef=$(call dblquote,$(AUTOEXPORTS)),) \
+	  $(if $(CANIGNORENONCALLABLE), \
+	    $(if $(IMPORTLIBRARYREQUIRED),,--ignorenoncallable),) \
+	  $(if $(CANHAVEEXPORTS), --defoutput=$(call dblquote,$(GENERATED_DEFFILE)) --dso=$(GENERATED_DSO)) \
+	  $(if $(filter $(VARIANTTYPE),$(DEBUGGABLE)),--debuggable,) \
+	  $(if $(POSTLINKER_SUPPORTS_WDP), \
+	    --codepaging=$(PAGEDCODE_OPTION) --datapaging=$(PAGEDDATA_OPTION), \
+	    $(POSTLINKER_PAGEDOPTION)) \
+	  $(if $(NOCOMPRESSTARGET),--uncompressed, \
+	    $(if $(INFLATECOMPRESSTARGET),--compressionmethod=inflate, \
+	      $(if $(BYTEPAIRCOMPRESSTARGET),--compressionmethod=bytepair, \
+	        --compressionmethod=$(POSTLINKER_COMPRESSION_DEFAULT)))) \
+	  --libpath="$(call concat,$(PATHSEP)$(CHAR_SEMIC),$(strip $(RUNTIME_LIBS_PATH) $(STATIC_LIBS_PATH)))" \
+	  $(if $(SAVESPACE),$(if $(EXPORTUNFROZEN),,&& { $(GNURM) -rf $(INTERMEDIATEPATH); true; })) \
+	$(call endrule,linkandpostlink)
 
-$(MAPFILE): $(LINK_TARGET)
+$(MAPFILE): $(E32TARGET)
+$(LINK_TARGET): $(E32TARGET)
 endef
+ifneq ($(DOPOSTLINK),)
 $(eval $(linktarget_func))
+endif # ifneq ($(DOPOSTLINK),)
 
 CLEANTARGETS:=$(CLEANTARGETS) $(VIAFILE) $(if $(GENERATELINKERFEEDBACK),$(FEEDBACKFILE)) $(if $(MULTIFILE_ENABLED),$(MULTIFILEOBJECT))
 WHATRELEASE:=$(WHATRELEASE) $(MAPFILE)
--- a/sbsv2/raptor/lib/flm/e32abiv2defaults.mk	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/e32abiv2defaults.mk	Sun Jan 17 23:00:39 2010 +0000
@@ -59,7 +59,7 @@
 	LINKER_ENTRYPOINT_ADORNMENT:=(uc_exe_.o)
   endif
 
-  ifeq ($(call isoneof,$(TARGETTYPE),ani textnotifier2 stddll plugin fsy pdl dll),1)
+  ifeq ($(call isoneof,$(TARGETTYPE),ani textnotifier2 stddll plugin fsy pdl dll pdll),1)
 	LINKER_ENTRYPOINT_ADORNMENT:=(uc_dll_.o)
   endif
 
--- a/sbsv2/raptor/lib/flm/e32abiv2dll.flm	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/e32abiv2dll.flm	Sun Jan 17 23:00:39 2010 +0000
@@ -55,6 +55,6 @@
 $(call vrestore)
 
 else
-$(error $e32abiv2dll.flm called with wrong TARGETTYPE (should be 'dll' but is '$(TARGETTYPE)'))
+$(error e32abiv2dll.flm called with wrong TARGETTYPE (should be 'dll' but is '$(TARGETTYPE)'))
 endif
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/lib/flm/e32abiv2pdll.flm	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,61 @@
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# ARMv5 PDLL ABIv2 Function Like Makefile (FLM)
+# Build an e32 PDLL (Patchable constants DLL)
+# 
+#
+
+ifeq ($(TARGETTYPE),pdll)
+include $(FLMHOME)/e32abiv2defaults.mk
+
+# What we need to build a DLL
+ifeq ($(NOEXPORTLIBRARY),)
+IMPORTLIBRARYREQUIRED:=1
+endif
+POSTLINKDEFFILE:=$(DEFFILE)
+SUPPORT_FREEZE:=1
+
+# Default Linker settings for this target type
+LINKER_ENTRYPOINT_LIBDEP:=$(STATIC_RUNTIME_DIR)/edll.lib
+LINKER_ENTRYPOINT_SETTING:=$(LINKER_ENTRY_OPTION)=_E32Dll $(LINKER_ENTRYPOINT_DECORATION)$(LINKER_SEPARATOR)$(call dblquote,$(STATIC_RUNTIME_DIR)/edll.lib$(LINKER_ENTRYPOINT_ADORNMENT))
+
+ifeq ("$(NEED_ENTRYPOINT_LIBRARY)","True")
+LINKER_ENTRYPOINT_SETTING:=$(LINKER_ENTRYPOINT_SETTING) $(LINKER_ENTRYPOINT_LIBDEP)
+endif
+
+LINKER_STUB_LIBRARY:=$(STATIC_RUNTIME_DIR)/edllstub.lib
+STATIC_RUNTIME_LIB:=$(USER_STATIC_RUNTIME_LIB)
+
+
+# Default Postlinker settings
+CANHAVEEXPORTS:=1
+POSTLINKTARGETTYPE:=DLL
+POSTLINKFILETYPE:=dll
+DOPOSTLINK:=1
+CANIGNORENONCALLABLE:=1
+
+# Use the general EABI FLM 
+# We are appending to CDEFS but we don't want this to affect
+# other invocations so we are going to save it on a stack
+# and restore it afterwards
+$(call vsave,CDEFS)
+CDEFS:=$(CDEFS) __DLL__
+include $(FLMHOME)/e32abiv2.flm
+$(call vrestore)
+
+else
+$(error e32abiv2pdll.flm called with wrong TARGETTYPE (should be 'pdll' but is '$(TARGETTYPE)'))
+endif
+
+
--- a/sbsv2/raptor/lib/flm/e32postlink.mk	Fri Jan 15 16:09:36 2010 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,140 +0,0 @@
-#
-# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
-# All rights reserved.
-# This component and the accompanying materials are made available
-# under the terms of the License "Eclipse Public License v1.0"
-# which accompanies this distribution, and is available
-# at the URL "http://www.eclipse.org/legal/epl-v10.html".
-#
-# Initial Contributors:
-# Nokia Corporation - initial contribution.
-#
-# Contributors:
-#
-# Description: 
-# ARMv5 e32 postlinking FLM
-# Knows how to postlink all possible ABIV2 executables for ARM
-#
-
-# Interface
-#
-#  Metadata supplied (or deduced from)
-#
-#   ARMFPU
-#   BYTEPAIRCOMPRESSTARGET
-#   CAPABILITY
-#   COMPRESSTARGET                    Not directly referenced, at least with the current approach to compression keywords
-#   DEBUGGABLE                        Can be "udeb" or "urel" or "udeb urel" or ""
-#   E32TARGET
-#   EPOCALLOWDLLDATA
-#   EPOCFIXEDPROCESS
-#   EPOCHEAPSIZEMAX
-#   EPOCHEAPSIZEMIN
-#   EPOCPROCESSPRIORITY
-#   EPOCSTACKSIZE
-#   EXPORTUNFROZEN
-#   INFLATECOMPRESSTARGET
-#   NOCOMPRESSTARGET
-#   POSTLINKTARGETTYPE
-#   SID
-#   SMPSAFE
-#   UID2
-#   UID3
-#   VERSION
-#   VENDORID
-#
-#  Other
-#
-#   ARMLIBS
-#   AUTOEXPORTS                       Symbols that must be assumed to exist for this TARGETTYPE in the format: export,ordinal;export,ordinal;..
-#   CANIGNORENONCALLABLE              If the TARGETTYPE allows it, disregard non-callable exports (v-tables, type information, etc.)
-#   CANHAVEEXPORTS
-#   CLEANTARGETS
-#   ELF2E32
-#   EPOCDATALINKADDRESS               Redundant?
-#   EPOCROOT
-#   EXPTARGET
-#   GENERATED_DEFFILE
-#   GENERATED_DSO
-#   HAVE_ORDERONLY
-#   IMPORTLIBRARYREQUIRED
-#   INTERMEDIATEPATH
-#   LINKASVERSIONED
-#   LINK_TARGET                       Postlinker elf input
-#   NAMEDSYMLKUP
-#   PAGEDCODE_OPTION
-#   POSTLINKDEFFILE
-#   POSTLINKER_COMPRESSION_DEFAULT    Default compression when either COMPRESSTARGET or no compression .mmp keyword is used
-#   POSTLINKER_FPU_DEFAULT
-#   POSTLINKER_SUPPORTS_WDP
-#   RUNTIME_LIBS_PATH
-#   SAVESPACE
-#   STATIC_LIBS_PATH
-#   UID1
-#   VARIANTTYPE
-
-
-# Capabilities
-ADDED_CAPABILITIES:=$(subst $(CHAR_SPACE),+,$(filter-out -%,$(CAPABILITY)))
-SUBTRACTED_CAPABILITIES:=$(subst $(CHAR_SPACE),,$(filter -%,$(CAPABILITY)))
-FINAL_CAPABILITIES:=$(if $(ADDED_CAPABILITIES),$(ADDED_CAPABILITIES)$(SUBTRACTED_CAPABILITIES),NONE)
-
-# Paging options for the old postlinker
-POSTLINKER_PAGEDOPTION:=--defaultpaged
-ifeq ($(PAGED),1)
-  POSTLINKER_PAGEDOPTION:=--paged
-endif
-ifeq ($(PAGED),0)
-  POSTLINKER_PAGEDOPTION:=--unpaged
-endif
-
-# Postlink target
-define e32postlink
-$(E32TARGET): $(LINK_TARGET) $(POSTLINKDEFFILE) $(ELF2E32) $(if $(HAVE_ORDERONLY),|,) $(EPOCROOT)/epoc32/build/TEM_LIB
-	$(call startrule,postlink) \
-	$(ELF2E32) \
-	  --sid=0x$(if $(SID),$(SID),$(if $(UID3),$(UID3),0)) \
-	  --version=$(VERSION) \
-	  --capability=$(FINAL_CAPABILITIES) \
-	  --linkas=$(call dblquote,$(LINKASVERSIONED)) \
-	  --fpu=$(if $(ARMFPU),$(ARMFPU),$(POSTLINKER_FPU_DEFAULT)) \
-	  --targettype=$(POSTLINKTARGETTYPE) \
-	  --output=$$(call dblquote,$$@) \
-	  --elfinput=$(call dblquote,$(LINK_TARGET)) \
-	  $(if $(UID1),--uid1=0x$(UID1),) \
-	  $(if $(UID2),--uid2=0x$(UID2),) \
-	  $(if $(UID3),--uid3=0x$(UID3),) \
-	  $(if $(VENDORID),--vid=0x$(VENDORID),) \
-	  $(if $(EXPTARGET),--customdlltarget,) \
-	  $(if $(ARMLIBS),--excludeunwantedexports,) \
-	  $(if $(EPOCALLOWDLLDATA),--dlldata,) \
-	  $(if $(EPOCPROCESSPRIORITY),--priority=$(EPOCPROCESSPRIORITY),) \
-	  $(if $(EPOCSTACKSIZE),--stack=0x$(EPOCSTACKSIZE),) \
-	  $(if $(EPOCHEAPSIZEMIN),--heap=0x$(EPOCHEAPSIZEMIN)$(CHAR_COMMA)0x$(EPOCHEAPSIZEMAX),) \
-	  $(if $(EPOCFIXEDPROCESS),--fixedaddress,) \
-	  $(if $(EPOCDATALINKADDRESS),--datalinkaddress=$(EPOCDATALINKADDRESS),) \
-	  $(if $(NAMEDSYMLKUP),--namedlookup,) \
-	  $(if $(SMPSAFE),--smpsafe,) \
-	  $(if $(POSTLINKDEFFILE),--definput=$(POSTLINKDEFFILE),) \
-	  $(if $(EXPORTUNFROZEN),--unfrozen,) \
-	  $(if $(AUTOEXPORTS),--sysdef=$(call dblquote,$(AUTOEXPORTS)),) \
-	  $(if $(CANIGNORENONCALLABLE), \
-	    $(if $(IMPORTLIBRARYREQUIRED),,--ignorenoncallable),) \
-	  $(if $(CANHAVEEXPORTS), --defoutput=$(call dblquote,$(GENERATED_DEFFILE)) --dso=$(GENERATED_DSO)) \
-	  $(if $(filter $(VARIANTTYPE),$(DEBUGGABLE)),--debuggable,) \
-	  $(if $(POSTLINKER_SUPPORTS_WDP), \
-	    --codepaging=$(PAGEDCODE_OPTION) --datapaging=$(PAGEDDATA_OPTION), \
-	    $(POSTLINKER_PAGEDOPTION)) \
-	  $(if $(NOCOMPRESSTARGET),--uncompressed, \
-	    $(if $(INFLATECOMPRESSTARGET),--compressionmethod=inflate, \
-	      $(if $(BYTEPAIRCOMPRESSTARGET),--compressionmethod=bytepair, \
-	        --compressionmethod=$(POSTLINKER_COMPRESSION_DEFAULT)))) \
-	  --libpath="$(call concat,$(PATHSEP)$(CHAR_SEMIC),$(strip $(RUNTIME_LIBS_PATH) $(STATIC_LIBS_PATH)))" \
-	  $(if $(SAVESPACE),$(if $(EXPORTUNFROZEN),,&& { $(GNURM) -rf $(INTERMEDIATEPATH); true; })) \
-	$(call endrule,postlink)
-endef
-$(eval $(e32postlink))
-
-CLEANTARGETS:=$(CLEANTARGETS) $(E32TARGET)
-CLEANTARGETS:=$(CLEANTARGETS) $(GENERATED_DEFFILE)
-CLEANTARGETS:=$(CLEANTARGETS) $(GENERATED_DSO)
--- a/sbsv2/raptor/lib/flm/emulator.xml	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/emulator.xml	Sun Jan 17 23:00:39 2010 +0000
@@ -151,6 +151,23 @@
 		<param name='MAKEDEF'/>
 	</interface>
 
+	<interface name="Emulator.pdll" extends="Emulator.win32" flm="win32pdll.flm">
+		<param name='CHECKLIB' default=''/>
+		<param name='LD'/>
+		<param name='LFLAGS_INFGEN'/>
+		<param name='OPT.CHECKLIB.STDCPP'/>
+		<param name='OPT.CHECKLIB.SYMCPP'/>
+		<param name='OPT.CHECKLIB.WIN32'/>
+		<param name='OPT.HEAPCOMMIT'/>
+		<param name='OPT.HEAPRESERVE'/>
+		<param name='OPT.IMAGEBASE'/>
+		<param name='OPT.IMPLIB'/>
+		<param name='OPT.LIBFILE'/>
+		<param name='OPT.LIBPATH'/>
+		<param name='OPT.NOIMPLIB'/>
+		<param name='MAKEDEF'/>
+	</interface>
+
 	<interface name="Emulator.stddll" extends="Emulator.dll" flm="win32stddll.flm">
 		<param name='LFLAGS_SYMGEN'/>
 		<param name='SYMLOOKUPUTIL'/>
--- a/sbsv2/raptor/lib/flm/standard.xml	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/standard.xml	Sun Jan 17 23:00:39 2010 +0000
@@ -212,6 +212,10 @@
 		<param name='TARGETTYPE' default="dll"/>
 		<param name='UID1' default="10000079"/>
 	</interface>
+	<interface name="Symbian.pdll" extends="Symbian.e32abiv2" flm="e32abiv2pdll.flm">
+		<param name='TARGETTYPE' default="pdll"/>
+		<param name='UID1' default="10000079"/>
+	</interface>
 	<interface name="Symbian.stddll" extends="Symbian.e32abiv2" flm="e32abiv2stddll.flm">
 		<param name='TARGETTYPE' default="stddll"/>
 		<param name='UID1' default="10000079"/>
--- a/sbsv2/raptor/lib/flm/tracecompiler.mk	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/tracecompiler.mk	Sun Jan 17 23:00:39 2010 +0000
@@ -63,9 +63,9 @@
 	( echo -en "$(TRACE_PRJNAME)\n$(PROJECT_META)\n"; \
 	  $(GNUCAT) $(TRACE_SOURCE_LIST); \
 	  echo -en "*ENDOFSOURCEFILES*\n" ) | \
-	$(JAVA_COMMAND) $(TRACE_COMPILER_START) $(UID_TC) && \
-	$(GNUMD5SUM) $(TRACE_SOURCE_LIST) > $(TRACE_MARKER) ; \
-	$(GNUCAT) $(TRACE_SOURCE_LIST) \
+	$(JAVA_COMMAND) $(TRACE_COMPILER_START) $(UID_TC) &&  \
+	$(GNUMD5SUM) $(TRACE_SOURCE_LIST) > $(TRACE_MARKER) && \
+	{ $(GNUCAT) $(TRACE_SOURCE_LIST) ; true ; } \
 	$(call endrule,tracecompile)
 endef
 
--- a/sbsv2/raptor/lib/flm/win32dll.flm	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/lib/flm/win32dll.flm	Sun Jan 17 23:00:39 2010 +0000
@@ -30,5 +30,5 @@
 include $(FLMHOME)/win32.flm
 
 else
-$(error $win32dll.flm called with wrong TARGETTYPE (should be 'dll' but is '$(TARGETTYPE)'))
+$(error win32dll.flm called with wrong TARGETTYPE (should be 'dll' but is '$(TARGETTYPE)'))
 endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/lib/flm/win32pdll.flm	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,34 @@
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# WINSCW PDLL Function Like Makefile (FLM)
+# Build an emulator PDLL
+# 
+#
+
+ifeq ($(TARGETTYPE),pdll)
+
+BASE_TYPE:=dll
+CW_STATIC_RUNTIME:=1
+FIRST_STATLIB:=
+FIXED_EXPORT:=
+SUPPORTS_IMPORT_LIBRARY:=1
+SYSTEM_TARGET:=0
+UID2_DEFAULT:=
+
+# Use the general win32 FLM 
+include $(FLMHOME)/win32.flm
+
+else
+$(error $winp32dll.flm called with wrong TARGETTYPE (should be 'dll' but is '$(TARGETTYPE)'))
+endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/python/plugins/filter_check.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,29 @@
+#
+# Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# Filter class for doing --what and --check operations
+#
+
+import os
+import sys
+import re
+import filter_interface
+import filter_what
+
+class FilterCheck(filter_what.FilterWhat):
+
+        def __init__(self): 
+		super(filter_what.FilterWhat,self).__init__()
+		self.check = True
+
--- a/sbsv2/raptor/python/plugins/filter_terminal.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/plugins/filter_terminal.py	Sun Jan 17 23:00:39 2010 +0000
@@ -113,6 +113,7 @@
 		"asmcompile" : "asmcompile" ,
 		"compile" : "compile" ,
 		"postlink" : "target",
+		"linkandpostlink" : "target",
 		"resourcecompile" : "resource",
 		"genstringtable" : "strtable",
 		"tem" : "tem",
--- a/sbsv2/raptor/python/plugins/filter_what.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/plugins/filter_what.py	Sun Jan 17 23:00:39 2010 +0000
@@ -22,6 +22,10 @@
 
 class FilterWhat(filter_interface.Filter):
 
+	def __init__(self):
+		super(filter_interface.Filter,self).__init__()
+		self.path_prefix_to_strip = None
+		self.path_prefix_to_add_on = None
 	
 	def print_file(self, line, start, end):
 		"Ensure DOS slashes on Windows"
@@ -33,6 +37,12 @@
 			filename = line[(start + 1):end].replace("/","\\")
 		else:
 			filename = line[(start + 1):end]
+
+		if self.path_prefix_to_strip:
+			if filename.startswith(self.path_prefix_to_strip):
+				filename = filename[len(self.path_prefix_to_strip):]
+			if self.path_prefix_to_add_on != None:
+				filename = self.path_prefix_to_add_on + filename
 			
 		if self.check:
 			if not os.path.isfile(filename):
@@ -42,6 +52,12 @@
 			self.outfile.write(filename+"\n")
 
 		self.prints += 1
+
+	def start_bldinf(self, bldinf):
+		pass
+
+	def end_bldinf(self):
+		pass
 		
 
 	def open(self, build_parameters):
@@ -79,6 +95,10 @@
 		
 		"Regex for zip exports"
 		self.zip_export_regex = re.compile("^<member>.*")
+
+		"Regex for determining bld.inf name"
+		self.whatlog_regex = re.compile("^<whatlog *bldinf='(?P<bldinf>[^']*)'.*")
+		self.current_bldinf = ''
 		
 		self.prints = 0
 		self.ok = True		
@@ -105,6 +125,7 @@
 				self.repetitions[line] = 0
 				
 			if self.repetitions[line] == 0:
+				
 				if self.regex.match(line) and (self.what or self.check):
 					"Print the whole line"
 					self.print_file(line, (-1), len(line))
@@ -129,6 +150,20 @@
 					end = line.rfind("<")
 					
 					self.print_file(line, start, end)
+
+				else:
+					"work out what the 'current' bldinf file is"
+					m = self.whatlog_regex.match(line)
+					if m:
+						bi = m.groupdict()['bldinf']
+						if self.current_bldinf != bi:
+							if self.current_bldinf != '':
+								self.end_bldinf()
+							self.current_bldinf = bi
+							if bi != '':
+								self.start_bldinf(bi)
+							
+					
 						
 			self.repetitions[line] += 1
 				
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/python/plugins/filter_whatcomp.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,56 @@
+#
+# Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# Filter class for doing --what and --check operations
+#
+
+import os
+import sys
+import re
+import filter_interface
+import filter_what
+
+class FilterWhatComp(filter_what.FilterWhat):
+
+        def __init__(self): 
+		super(filter_what.FilterWhat, self).__init__()
+
+	def write(self, text):
+		"process some log text"
+		ok = True
+		
+		for line in text.splitlines():
+			ok = filter_what.FilterWhat.write(self, line)
+			if not ok:
+				break
+				
+		self.ok = ok
+		return self.ok
+	
+	def start_bldinf(self,bldinf):
+		if "win" in self.buildparameters.platform:
+			dir = os.path.dirname(bldinf.replace("/","\\"))
+		else:
+			dir = os.path.dirname(bldinf)
+
+		self.outfile.write("-- abld -w \nChdir %s \n" % dir)
+		
+	def end_bldinf(self):
+		self.outfile.write("++ Finished\n")
+
+        def open(self, build_parameters):
+		t = filter_what.FilterWhat.open(self, build_parameters)
+		self.path_prefix_to_strip = os.path.abspath(build_parameters.epocroot)
+		self.path_prefix_to_add_on = build_parameters.incoming_epocroot
+		return t
--- a/sbsv2/raptor/python/raptor.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/raptor.py	Sun Jan 17 23:00:39 2010 +0000
@@ -11,7 +11,7 @@
 #
 # Contributors:
 #
-# Description: 
+# Description:
 # raptor module
 # This module represents the running Raptor program. Raptor is started
 # either by calling the Main() function, which creates an instance of
@@ -58,12 +58,15 @@
 # defaults can use EPOCROOT
 
 if "EPOCROOT" in os.environ:
-        epocroot = os.environ["EPOCROOT"].replace("\\","/")
+	incoming_epocroot = os.environ["EPOCROOT"]
+        epocroot = incoming_epocroot.replace("\\","/")
 else:
 	if 'linux' in hostplatform:
 		epocroot=os.environ['HOME'] + os.sep + "epocroot"
 		os.environ["EPOCROOT"] = epocroot
+		incoming_epocroot = epocroot
 	else:
+		incoming_epocroot = "\\"
 		epocroot = "/"
 		os.environ["EPOCROOT"] = os.sep
 
@@ -119,7 +122,7 @@
 		self.type = type
 		self.specs = []
 		self.deps = []
-		self.children = set() 
+		self.children = set()
 		self.unfurled = False
 		self.parent = parent
 
@@ -163,15 +166,15 @@
 
 		for c in self.children:
 			c.unfurl_all(build)
-		
+
 
 	def realise_exports(self, build):
-		"""Do the things that are needed such that we can fully unfurl all 
+		"""Do the things that are needed such that we can fully unfurl all
 		   sibling nodes.  i.e. this step is here to "take care" of the dependencies
-		   between siblings.  
+		   between siblings.
 		"""
 		pass
-	
+
 	def realise_makefile(self, build, specs):
 		makefilename_base = build.topMakefile
 		if self.name is not None:
@@ -190,16 +193,16 @@
 				key = str(makefile.path))
 
 		return makefileset
-		
+
 
 
 	def realise(self, build):
-		"""Give the spec trees to the make engine and actually 
-		"build" the product represented by this model node"""	
+		"""Give the spec trees to the make engine and actually
+		"build" the product represented by this model node"""
 		# Must ensure that all children are unfurled at this point
 		self.unfurl_all(build)
 
-		sp = self.specs	
+		sp = self.specs
 
 		build.AssertBuildOK()
 
@@ -210,8 +213,8 @@
 		result = build.Make(m)
 		build.InfoEndTime(object_type = "layer", task = "build",
 				key = (str(m.directory) + "/" + str(m.filenamebase)))
-		
-		
+
+
 		return result
 
 
@@ -229,7 +232,7 @@
 
 	def makefile(self, makefilename_base, engine, named = False):
 		"""Makefiles for individual mmps not feasible at the moment"""
-		pass # Cannot, currently, "unfurl an mmp" directly but do want 
+		pass # Cannot, currently, "unfurl an mmp" directly but do want
 		     # to be able to simulate the overall recursive unfurling of a build.
 
 class Component(ModelNode):
@@ -252,9 +255,9 @@
 
 
 class Layer(ModelNode):
-	""" 	Some components that should be built togther 
-		e.g. a Layer in the system definition. 
-	""" 
+	""" 	Some components that should be built togther
+		e.g. a Layer in the system definition.
+	"""
 	def __init__(self, name, componentlist=[]):
 		super(Layer,self).__init__(name)
 		self.name = name
@@ -263,11 +266,11 @@
 			self.children.add(Component(c))
 
 	def unfurl(self, build):
-		"""Discover the children of this layer. This involves parsing the component MetaData (bld.infs, mmps). 
+		"""Discover the children of this layer. This involves parsing the component MetaData (bld.infs, mmps).
 		Takes a raptor object as a parameter (build), together with a list of Configurations.
 
 		We currently have parsers that work on collections of components/bld.infs and that cannot
-		parse at a "finer" level.  So one can't 'unfurl' an mmp at the moment.  
+		parse at a "finer" level.  So one can't 'unfurl' an mmp at the moment.
 
 		Returns True if the object was successfully unfurled.
 		"""
@@ -300,7 +303,7 @@
 	def meta_realise(self, build):
 		"""Generate specs that can be used to "take care of" finding out more
 		about this metaunit - i.e. one doesn't want to parse it immediately
-		but to create a makefile that will parse it. 
+		but to create a makefile that will parse it.
 		In this case it allows bld.infs to be parsed in parallel by make."""
 
 		# insert the start time into the Makefile name?
@@ -310,44 +313,44 @@
 
 		# Pass certain CLI flags through to the makefile-generating sbs calls
 		cli_options = ""
-			
+
 		if build.debugOutput == True:
 			cli_options += " -d"
-				
+
 		if build.ignoreOsDetection == True:
 			cli_options += " -i"
-			
+
 		if build.keepGoing == True:
 			cli_options += " -k"
-			
+
 		if build.quiet == True:
 			cli_options += " -q"
-			
+
 		if build.timing == True:
 			cli_options += " --timing"
 
-		
+
 		nc = len(self.children)
 		number_blocks = build.jobs
 		block_size = (nc / number_blocks) + 1
 		component_blocks = []
 		spec_nodes = []
-		
+
 		b = 0
 		childlist = list(self.children)
 		while b < nc:
 			component_blocks.append(childlist[b:b+block_size])
 			b += block_size
-			
+
 		while len(component_blocks[-1]) <= 0:
 			component_blocks.pop()
 			number_blocks -= 1
-	
+
 		build.Info("Parallel Parsing: bld.infs split into %d blocks\n", number_blocks)
-		# Cause the binding makefiles to have the toplevel makefile's 
-		# name.  The bindee's have __pp appended.	
+		# Cause the binding makefiles to have the toplevel makefile's
+		# name.  The bindee's have __pp appended.
 		tm = build.topMakefile.Absolute()
-		binding_makefiles = raptor_makefile.MakefileSet(str(tm.Dir()), build.maker.selectors, makefiles=None, filenamebase=str(tm.File()))		
+		binding_makefiles = raptor_makefile.MakefileSet(str(tm.Dir()), build.maker.selectors, makefiles=None, filenamebase=str(tm.File()))
 		build.topMakefile = generic_path.Path(str(build.topMakefile) + "_pp")
 
 		loop_number = 0
@@ -357,16 +360,16 @@
 
 			componentList = " ".join([str(c.bldinf_filename) for c in block])
 
-			
+
 			configList = " ".join([c.name for c in self.configs if c.name != "build" ])
-			
+
 			makefile_path = str(build.topMakefile) + "_" + str(loop_number)
 			try:
 				os.unlink(makefile_path) # until we have dependencies working properly
 			except Exception,e:
 				# print "couldn't unlink %s: %s" %(componentMakefileName, str(e))
 				pass
-			
+
 			# add some basic data in a component-wide variant
 			var = raptor_data.Variant()
 			var.AddOperation(raptor_data.Set("COMPONENT_PATHS", componentList))
@@ -385,13 +388,13 @@
 			if build.noBuild:
 				var.AddOperation(raptor_data.Set("NO_BUILD", "1"))
 			specNode.AddVariant(var)
-	
+
 			try:
 				interface = build.cache.FindNamedInterface("build.makefiles")
 				specNode.SetInterface(interface)
 			except KeyError:
 				build.Error("Can't find flm interface 'build.makefiles' ")
-				
+
 			spec_nodes.append(specNode)
 			binding_makefiles.addInclude(str(makefile_path)+"_all")
 
@@ -430,7 +433,7 @@
 
 
 	M_BUILD = 1
-	M_VERSION = 2	
+	M_VERSION = 2
 
 	def __init__(self, home = None):
 
@@ -554,7 +557,7 @@
 			self.Warn("ignoring target %s because --what or --check is specified.\n", target)
 		else:
 			self.targets.append(target)
-			
+
 	def AddSourceTarget(self, filename):
 		# source targets are sanitised and then added as if they were a "normal" makefile target
 		# in addition they have a default, empty, top-level target assigned in order that they can
@@ -615,7 +618,7 @@
 	def SetNoDependInclude(self, TrueOrFalse):
 		self.noDependInclude = TrueOrFalse
 		return True
-		
+
 	def SetKeepGoing(self, TrueOrFalse):
 		self.keepGoing = TrueOrFalse
 		return True
@@ -669,7 +672,7 @@
 			return False
 
 		return True
-	
+
 	def SetTiming(self, TrueOrFalse):
 		self.timing = TrueOrFalse
 		return True
@@ -717,9 +720,9 @@
 		self.Info("Set-up %s", str(self.raptorXML))
 		self.Info("Command-line-arguments %s", " ".join(self.args))
 		self.Info("Current working directory %s", os.getcwd())
-		
+
 		# the inherited environment
-		for e, value in os.environ.items():
+		for e, value in sorted( os.environ.items() ):
 			self.Info("Environment %s=%s", e, value.replace("]]>", "]]&gt;"))
 
 		# and some general debug stuff
@@ -822,7 +825,7 @@
 				return self.home.Append(aGenericPath)
 			else:
 				return aGenericPath
-		
+
 		# make generic paths absolute (if required)
 		self.configPath = map(mkAbsolute, self.configPath)
 		self.cache.Load(self.configPath)
@@ -859,12 +862,12 @@
 		return x
 
 	def GetBuildUnitsToBuild(self, configNames):
-		"""Return a list of the configuration objects that correspond to the 
+		"""Return a list of the configuration objects that correspond to the
 		   list of configuration names in the configNames parameter.
 
 		raptor.GetBuildUnitsToBuild(["armv5", "winscw"])
 		>>> [ config1, config2, ... , configN ]
-		""" 
+		"""
 
 		if len(configNames) == 0:
 			# use default config
@@ -878,9 +881,9 @@
 
 		for c in set(configNames):
 			self.Debug("BuildUnit: %s", c)
-			try:		
+			try:
 				x = self.GetConfig(c)
-				gb = x.GenerateBuildUnits(self.cache) 
+				gb = x.GenerateBuildUnits(self.cache)
 				buildUnitsToBuild.update( gb )
 			except Exception, e:
 				self.FatalError(str(e))
@@ -894,7 +897,7 @@
 		return buildUnitsToBuild
 
 	def CheckToolset(self, evaluator, configname):
-		"""Check the toolset for a particular config, allow other objects access 
+		"""Check the toolset for a particular config, allow other objects access
 		to the toolset for this build (e.g. the raptor_make class)."""
 		if self.toolset is None:
 			if self.toolcheck == 'on':
@@ -968,7 +971,7 @@
 
 
 	def FindComponentIn(self, aDir = None):
-		# look for a bld.inf 
+		# look for a bld.inf
 
 		if aDir is None:
 			dir = generic_path.CurrentDir()
@@ -1109,14 +1112,14 @@
 		for a,v in dictionary.items():
 			atts += " " + a + "='" + v + "'"
 		return atts
-	
+
 	def Info(self, format, *extras, **attributes):
 		"""Send an information message to the configured channel
 				(XML control characters will be escaped)
 		"""
 		self.out.write("<info" + self.attributeString(attributes) + ">" +
 		               escape(format % extras) + "</info>\n")
-		
+
 	def InfoDiscovery(self, object_type, count):
 		if self.timing:
 			try:
@@ -1124,7 +1127,7 @@
 						count = count))
 			except Exception, exception:
 				Error(exception.Text, function = "InfoDiscoveryTime")
-		
+
 	def InfoStartTime(self, object_type, task, key):
 		if self.timing:
 			try:
@@ -1132,7 +1135,7 @@
 						task = task, key = key))
 			except Exception, exception:
 				Error(exception.Text, function = "InfoStartTime")
-		
+
 	def InfoEndTime(self, object_type, task, key):
 		if self.timing:
 			try:
@@ -1154,7 +1157,7 @@
 		"""Send a warning message to the configured channel
 				(XML control characters will be escaped)
 		"""
-		self.out.write("<warning" + self.attributeString(attributes) + ">" + 
+		self.out.write("<warning" + self.attributeString(attributes) + ">" +
 		               escape(format % extras) + "</warning>\n")
 
 	def FatalError(self, format, *extras, **attributes):
@@ -1165,7 +1168,7 @@
 		   further errors are probably triggered by the first.
 		"""
 		if not self.fatalErrorState:
-			self.out.write("<error" + self.attributeString(attributes) + ">" + 
+			self.out.write("<error" + self.attributeString(attributes) + ">" +
 			               (format % extras) + "</error>\n")
 			self.errorCode = 1
 			self.fatalErrorState = True
@@ -1174,7 +1177,7 @@
 		"""Send an error message to the configured channel
 				(XML control characters will be escaped)
 		"""
-		self.out.write("<error" + self.attributeString(attributes) + ">" + 
+		self.out.write("<error" + self.attributeString(attributes) + ">" +
 		               escape(format % extras) + "</error>\n")
 		self.errorCode = 1
 
@@ -1212,7 +1215,7 @@
 		if self.systemDefinitionFile != None:
 			systemModel = raptor_xml.SystemModel(self, self.systemDefinitionFile, self.systemDefinitionBase)
 			layers = self.GatherSysModelLayers(systemModel, self.systemDefinitionRequestedLayers)
-			
+
 		# Now get components specified on a commandline - build them after any
 		# layers in the system definition.
 		if len(self.commandlineComponents) > 0:
@@ -1246,7 +1249,7 @@
 			self.Introduction()
 			# establish an object cache
 			self.AssertBuildOK()
-			
+
 			self.LoadCache()
 
 			# find out what configurations to build
@@ -1299,7 +1302,7 @@
 				for l in layers:
 					# create specs for a specific group of components
 					l.realise(self)
-					
+
 		except BuildCannotProgressException,b:
 			if str(b) != "":
 				self.Info(str(b))
@@ -1326,7 +1329,7 @@
 		build.ProcessConfig()
 		build.CommandLine(argv)
 
-		return build 
+		return build
 
 
 
@@ -1334,6 +1337,8 @@
 class BuildStats(object):
 
 	def __init__(self, raptor_instance):
+		self.incoming_epocroot = incoming_epocroot
+		self.epocroot = epocroot
 		self.logFileName = raptor_instance.logFileName
 		self.quiet = raptor_instance.quiet
 		self.doCheck = raptor_instance.doCheck
--- a/sbsv2/raptor/python/raptor_data.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/raptor_data.py	Sun Jan 17 23:00:39 2010 +0000
@@ -722,15 +722,23 @@
 	def Apply(self, oldValue):
 		try:
 			value = os.environ[self.name]
-
-			# if this value is a "path" or a "tool" then we need to make sure
-			# it is a proper absolute path in our preferred format.
-			if value and (self.type == "path" or self.type == "tool"):
-				try:
-					path = generic_path.Path(value)
-					value = str(path.Absolute())
-				except ValueError,e:
-					raise BadToolValue("the environment variable %s is incorrect: %s" % (self.name, str(e)))
+			
+			if value:
+				# if this value is a "path" or a "tool" then we need to make sure
+				# it is a proper absolute path in our preferred format.
+				if self.type == "path" or self.type == "tool":
+					try:
+						path = generic_path.Path(value)
+						value = str(path.Absolute())
+					except ValueError,e:
+						raise BadToolValue("the environment variable %s is incorrect: %s" % (self.name, str(e)))				
+				# if this value ends in an un-escaped backslash, then it will be treated as a line continuation character
+				# in makefile parsing - un-escaped backslashes at the end of values are therefore escaped
+				elif value.endswith('\\'):
+					# an odd number of backslashes means there's one to escape
+					count = len(value) - len(value.rstrip('\\'))
+					if count % 2:
+						value += '\\'	
 		except KeyError:
 			if self.default != None:
 				value = self.default
--- a/sbsv2/raptor/python/raptor_make.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/raptor_make.py	Sun Jan 17 23:00:39 2010 +0000
@@ -30,6 +30,8 @@
 from raptor_makefile import *
 import traceback
 import sys
+from xml.sax.saxutils import escape
+
 
 # raptor_make module classes
 
@@ -403,7 +405,7 @@
 			command = self.buildCommand
 
 			if self.makefileOption:
-				command += " " + self.makefileOption + " " + '"' + str(makefile) + '"'
+				command += " " + self.makefileOption + " " + ' "' + str(makefile) + '" '
 
 			if self.raptor.keepGoing and self.keepGoingOption:
 				command += " " + self.keepGoingOption
@@ -416,7 +418,13 @@
 			command += " " + self.defaultMakeOptions
 			# Can supply options on the commandline to override default settings.
 			if len(self.raptor.makeOptions) > 0:
-				command += " " + " ".join(self.raptor.makeOptions)
+				for o in self.raptor.makeOptions:
+					if o.find(";") != -1:
+						command += "  " + "'" + o + "'"
+					elif o.find("\\") != -1:
+						command += "  " + o.replace("\\","\\\\")
+					else:
+						command += "  " + o
 
 			# Switch off dependency file including?
 			if self.raptor.noDependInclude:
@@ -449,6 +457,11 @@
 			if addTargets:
 				command += " " + " ".join(addTargets)
 
+			# Send stderr to a file so that it can't mess up the log (e.g.
+			# clock skew messages from some build engines.
+			stderrfilename = makefile+'.stderr'
+			command += " 2>'%s' " % stderrfilename
+
 			# Substitute the makefile name for any occurrence of #MAKEFILE#
 			command = command.replace("#MAKEFILE#", str(makefile))
 
@@ -469,16 +482,20 @@
 					makeenv['TALON_SHELL']=self.talonshell
 					makeenv['TALON_BUILDID']=str(self.buildID)
 					makeenv['TALON_TIMEOUT']=str(self.talontimeout)
+
 				if self.raptor.filesystem == "unix":
-					p = subprocess.Popen(command, bufsize=65535,
-									     stdout=subprocess.PIPE,
-									     stderr=subprocess.STDOUT,
-									     close_fds=True, env=makeenv, shell=True)
+					p = subprocess.Popen([command], bufsize=65535,
+						stdout=subprocess.PIPE,
+						stderr=subprocess.STDOUT,
+						close_fds=True, env=makeenv, shell=True)
 				else:
-					p = subprocess.Popen(command, bufsize=65535,
-									     stdout=subprocess.PIPE,
-									     stderr=subprocess.STDOUT,
-									     universal_newlines=True, env=makeenv)
+					p = subprocess.Popen(args = 
+						[raptor_data.ToolSet.shell, '-c', command],
+						bufsize=65535,
+						stdout=subprocess.PIPE,
+						stderr=subprocess.STDOUT,
+						shell = False,
+						universal_newlines=True, env=makeenv)
 				stream = p.stdout
 
 
@@ -487,6 +504,14 @@
 					line = stream.readline()
 					self.raptor.out.write(line)
 
+				try:
+					e = open(stderrfilename,"r")
+					for line in e:
+						self.raptor.out.write(escape(line))
+					e.close()
+				except Exception,e:
+					self.raptor.Error("Couldn't complete stderr output for %s - '%s'", str(e), command)
+
 				# should be done now
 				returncode = p.wait()
 
--- a/sbsv2/raptor/python/raptor_meta.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/python/raptor_meta.py	Sun Jan 17 23:00:39 2010 +0000
@@ -1,3310 +1,3310 @@
-#
-# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
-# All rights reserved.
-# This component and the accompanying materials are made available
-# under the terms of the License "Eclipse Public License v1.0"
-# which accompanies this distribution, and is available
-# at the URL "http://www.eclipse.org/legal/epl-v10.html".
-#
-# Initial Contributors:
-# Nokia Corporation - initial contribution.
-#
-# Contributors:
-#
-# Description: 
-# This module includes classes that process bld.inf and .mmp files to
-# generate Raptor build specifications
-#
-
-import copy
-import re
-import os.path
-import shutil
-import stat
-import hashlib
-import base64
-
-import raptor
-import raptor_data
-import raptor_utilities
-import raptor_xml
-import generic_path
-import subprocess
-import zipfile
-from mmpparser import *
-
-import time
-
-
-PiggyBackedBuildPlatforms = {'ARMV5':['GCCXML']}
-
-PlatformDefaultDefFileDir = {'WINSCW':'bwins',
-				  'ARMV5' :'eabi',
-				  'ARMV5SMP' :'eabi',
-				  'GCCXML':'eabi',
-				  'ARMV6':'eabi',
-				  'ARMV7' : 'eabi',
-				  'ARMV7SMP' : 'eabi'}
-
-def getVariantCfgDetail(aEPOCROOT, aVariantCfgFile):
-	"""Obtain pertinent build related detail from the Symbian variant.cfg file.
-
-	This variant.cfg file, usually located relative to $(EPOCROOT), contains:
-	(1) The $(EPOCROOT) relative location of the primary .hrh file used to configure the specific OS variant build
-	(2) A flag determining whether ARMV5 represents an ABIV1 or ABIV2 build (currently unused by Raptor)."""
-
-	variantCfgDetails = {}
-	variantCfgFile = None
-
-	try:
-		variantCfgFile = open(str(aVariantCfgFile))
-	except IOError, (number, message):
-		raise MetaDataError("Could not read variant configuration file "+str(aVariantCfgFile)+" ("+message+")")
-
-	for line in variantCfgFile.readlines():
-		if re.search('^(\s$|\s*#)', line):
-			continue
-		# Note that this detection of the .hrh file matches the command line build i.e. ".hrh" somewhere
-		# in the specified line
-		elif re.search('\.hrh', line, re.I):
-			variantHrh = line.strip()
-			if variantHrh.startswith('\\') or variantHrh.startswith('/'):
-				variantHrh = variantHrh[1:]
-			variantHrh = aEPOCROOT.Append(variantHrh)
-			variantCfgDetails['VARIANT_HRH'] = variantHrh
-		else:
-			lineContent = line.split()
-
-			if len(lineContent) == 1:
-				variantCfgDetails[lineContent.pop(0)] = 1
-			else:
-				variantCfgDetails[lineContent.pop(0)] = lineContent
-
-	variantCfgFile.close()
-
-	if not variantCfgDetails.has_key('VARIANT_HRH'):
-		raise MetaDataError("No variant file specified in "+str(aVariantCfgFile))
-	if not variantHrh.isFile():
-		raise MetaDataError("Variant file "+str(variantHrh)+" does not exist")
-
-	return variantCfgDetails
-
-def getOsVerFromKifXml(aPathToKifXml):
-	"""Obtain the OS version from the kif.xml file located at $EPOCROOT/epoc32/data/kif.xml.
-
-	If successful, the function returns a string such as "v95" to indicate 9.5; None is
-	returned if for any reason the function cannot determine the OS version."""
-
-	releaseTagName = "ki:release"
-	osVersion = None
-
-	import xml.dom.minidom
-
-	try:
-		# Parsed document object
-		kifDom = xml.dom.minidom.parse(str(aPathToKifXml))
-
-		# elements - the elements whose names are releaseTagName
-		elements = kifDom.getElementsByTagName(releaseTagName)
-
-		# There should be exactly one of the elements whose name is releaseTagName
-		# If more than one, osVersion is left as None, since the version should be
-		# unique to the kif.xml file
-		if len(elements) == 1:
-			osVersionTemp = elements[0].getAttribute("version")
-			osVersion = "v" + osVersionTemp.replace(".", "")
-
-		kifDom.unlink() # Clean up
-
-	except:
-		# There's no documentation on which exceptions are raised by these functions.
-		# We catch everything and assume any exception means there was a failure to
-		# determine OS version. None is returned, and the code will fall back
-		# to looking at the buildinfo.txt file.
-		pass
-
-	return osVersion
-
-def getOsVerFromBuildInfoTxt(aPathToBuildInfoTxt):
-	"""Obtain the OS version from the buildinfo.txt file located at $EPOCROOT/epoc32/data/buildinfo.txt.
-
-	If successful, the function returns a string such as "v95" to indicate 9.5; None is
-	returned if for any reason the function cannot determine the OS version.
-
-	The file $EPOCROOT/epoc32/data/buildinfo.txt is presumed to exist. The client code should
-	handle existance/non-existance."""
-
-	pathToBuildInfoTxt = str(aPathToBuildInfoTxt) # String form version of path to buildinfo.txt
-
-	# Open the file for reading; throw an exception if it could not be read - note that
-	# it should exist at this point.
-	try:
-		buildInfoTxt = open(pathToBuildInfoTxt)
-	except IOError, (number, message):
-		raise MetaDataError("Could not read buildinfo.txt file at" + pathToBuildInfoTxt + ": (" + message + ")")
-
-	# Example buildinfo.txt contents:
-	#
-	# DeviceFamily               100
-	# DeviceFamilyRev            0x900
-	# ManufacturerSoftwareBuild  M08765_Symbian_OS_v9.5
-	#
-	# Regexp to match the line containing the OS version
-	# Need to match things like M08765_Symbian_OS_v9.5 and M08765_Symbian_OS_vFuture
-	# So for the version, match everything except whitespace after v. Whitespace
-	# signifies the end of the regexp.
-	osVersionMatcher = re.compile('.*_Symbian_OS_v([^\s]*)', re.I)
-	osVersion = None
-
-	# Search for a regexp match over all the times in the file
-	# Note: if two or more lines match the search pattern then
-	# the latest match will overwrite the osVersion string.
-	for line in buildInfoTxt:
-		matchResult = osVersionMatcher.match(line)
-		if matchResult:
-			result = matchResult.groups()
-			osVersion = "v" +  str(reduce(lambda x, y: x + y, result))
-			osVersion = osVersion.replace(".", "")
-
-	buildInfoTxt.close() # Clean-up
-
-	return osVersion
-
-def getBuildableBldInfBuildPlatforms(aBldInfBuildPlatforms,
-									aDefaultOSBuildPlatforms,
-									aBaseDefaultOSBuildPlatforms,
-									aBaseUserDefaultOSBuildPlatforms):
-	"""Obtain a set of build platform names supported by a bld.inf file
-
-	Build platform deduction is based on both the contents of the PRJ_PLATFORMS section of
-	a bld.inf file together with a hard-coded set of default build platforms supported by
-	the build system itself."""
-
-	expandedBldInfBuildPlatforms = []
-	removePlatforms = set()
-
-	for bldInfBuildPlatform in aBldInfBuildPlatforms:
-		if bldInfBuildPlatform.upper() == "DEFAULT":
-			expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split())
-		elif bldInfBuildPlatform.upper() == "BASEDEFAULT":
-			expandedBldInfBuildPlatforms.extend(aBaseDefaultOSBuildPlatforms.split())
-		elif bldInfBuildPlatform.upper() == "BASEUSERDEFAULT":
-			expandedBldInfBuildPlatforms.extend(aBaseUserDefaultOSBuildPlatforms.split())
-		elif bldInfBuildPlatform.startswith("-"):
-			removePlatforms.add(bldInfBuildPlatform.lstrip("-").upper())
-		else:
-			expandedBldInfBuildPlatforms.append(bldInfBuildPlatform.upper())
-
-	if len(expandedBldInfBuildPlatforms) == 0:
-		expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split())
-
-	# make a set of platforms that can be built
-	buildableBldInfBuildPlatforms = set(expandedBldInfBuildPlatforms)
-
-	# Add platforms that are buildable by virtue of the presence of another
-	for piggyBackedPlatform in PiggyBackedBuildPlatforms:
-		if piggyBackedPlatform in buildableBldInfBuildPlatforms:
-			buildableBldInfBuildPlatforms.update(PiggyBackedBuildPlatforms.get(piggyBackedPlatform))
-
-	# Remove platforms that were negated
-	buildableBldInfBuildPlatforms -= removePlatforms
-
-	return buildableBldInfBuildPlatforms
-
-
-def getPreProcessorCommentDetail (aPreProcessorComment):
-	"""Takes a preprocessor comment and returns an array containing the filename and linenumber detail."""
-
-	commentDetail = []
-	commentMatch = re.search('# (?P<LINENUMBER>\d+) "(?P<FILENAME>.*)"', aPreProcessorComment)
-
-	if commentMatch:
-		filename = commentMatch.group('FILENAME')
-		filename = os.path.abspath(filename)
-		filename = re.sub(r'\\\\', r'\\', filename)
-		filename = re.sub(r'//', r'/', filename)
-		filename = generic_path.Path(filename)
-		linenumber = int (commentMatch.group('LINENUMBER'))
-
-		commentDetail.append(filename)
-		commentDetail.append(linenumber)
-
-	return commentDetail
-
-
-def getSpecName(aFileRoot, fullPath=False):
-	"""Returns a build spec name: this is the file root (full path
-	or simple file name) made safe for use as a file name."""
-
-	if fullPath:
-		specName = str(aFileRoot).replace("/","_")
-		specName = specName.replace(":","")
-	else:
-		specName = aFileRoot.File()
-
-	return specName.lower()
-
-
-# Classes
-
-class MetaDataError(Exception):
-	"""Fatal error wrapper, to be thrown directly back to whatever is calling."""
-
-	def __init__(self, aText):
-		self.Text = aText
-	def __str__(self):
-		return repr(self.Text)
-
-
-class PreProcessedLine(str):
-	"""Custom string class that accepts filename and line number information from
-	a preprocessed context."""
-
-	def __new__(cls, value, *args, **keywargs):
-		return str.__new__(cls, value)
-
-	def __init__(self, value, aFilename, aLineNumber):
-		self.filename = aFilename
-		self.lineNumber = aLineNumber
-
-	def getFilename (self):
-		return self.filename
-
-	def getLineNumber (self):
-		return self.lineNumber
-
-class PreProcessor(raptor_utilities.ExternalTool):
-	"""Preprocessor wrapper suitable for Symbian metadata file processing."""
-
-	def __init__(self, aPreProcessor,
-				 aStaticOptions,
-				 aIncludeOption,
-				 aMacroOption,
-				 aPreIncludeOption,
-				 aRaptor):
-		raptor_utilities.ExternalTool.__init__(self, aPreProcessor)
-		self.__StaticOptions = aStaticOptions
-		self.__IncludeOption = aIncludeOption
-		self.__MacroOption = aMacroOption
-		self.__PreIncludeOption = aPreIncludeOption
-
-		self.filename = ""
-		self.__Macros = []
-		self.__IncludePaths = []
-		self.__PreIncludeFile = ""
-		self.raptor = aRaptor
-
-	def call(self, aArgs, sourcefilename):
-		""" Override call so that we can do our own error handling."""
-		tool = self._ExternalTool__Tool
-		commandline = tool + " " + aArgs + " " + str(sourcefilename)
-		try:
-			# the actual call differs between Windows and Unix
-			if raptor_utilities.getOSFileSystem() == "unix":
-				p = subprocess.Popen(commandline, \
-									 shell=True, bufsize=65535, \
-									 stdin=subprocess.PIPE, \
-									 stdout=subprocess.PIPE, \
-									 stderr=subprocess.PIPE, \
-									 close_fds=True)
-			else:
-				p = subprocess.Popen(commandline, \
-									 bufsize=65535, \
-									 stdin=subprocess.PIPE, \
-									 stdout=subprocess.PIPE, \
-									 stderr=subprocess.PIPE, \
-									 universal_newlines=True)
-
-			# run the command and wait for all the output
-			(self._ExternalTool__Output, errors) = p.communicate()
-
-			if self.raptor.debugOutput:
-				self.raptor.Debug("Preprocessing Start %s", str(sourcefilename))
-				self.raptor.Debug("Output:\n%s", self._ExternalTool__Output)
-				self.raptor.Debug("Errors:\n%s", errors)
-				self.raptor.Debug("Preprocessing End %s", str(sourcefilename))
-
-			incRE = re.compile("In file included from")
-			fromRE = re.compile(r"\s+from")
-			warningRE = re.compile("warning:|pasting.+token|from.+:")
-			remarkRE = re.compile("no newline at end of file|does not give a valid preprocessing token")
-
-			actualErr = False
-			if errors != "":
-				for error in errors.splitlines():
-					if incRE.search(error) or fromRE.search(error):
-						continue
-					if not remarkRE.search(error):
-						if warningRE.search(error):
-							self.raptor.Warn("%s: %s", tool, error)
-						else:
-							self.raptor.Error("%s: %s", tool, error)
-							actualErr = True
-			if actualErr:
-				raise MetaDataError("Errors in %s" % str(sourcefilename))
-
-		except Exception,e:
-			raise MetaDataError("Preprocessor exception: '%s' : in command : '%s'" % (str(e), commandline))
-
-		return 0	# all OK
-
-	def setMacros(self, aMacros):
-		self.__Macros = aMacros
-
-	def addMacro(self, aMacro):
-		self.__Macros.append(aMacro)
-
-	def addMacros(self, aMacros):
-		self.__Macros.extend(aMacros)
-
-	def getMacros(self):
-		return self.__Macros
-
-
-	def addIncludePath(self, aIncludePath):
-		p = str(aIncludePath)
-		if p == "":
-			self.raptor.Warn("attempt to set an empty preprocessor include path for %s" % str(self.filename))
-		else:
-			self.__IncludePaths.append(p)
-
-	def addIncludePaths(self, aIncludePaths):
-		for path in aIncludePaths:
-			self.addIncludePath(path)
-
-	def setIncludePaths(self, aIncludePaths):
-		self.__IncludePaths = []
-		self.addIncludePaths(aIncludePaths)
-
-	def setPreIncludeFile(self, aPreIncludeFile):
-		self.__PreIncludeFile = aPreIncludeFile
-
-	def preprocess(self):
-		preProcessorCall = self.__constructPreProcessorCall()
-		returnValue = self.call(preProcessorCall, self.filename)
-
-		return self.getOutput()
-
-	def __constructPreProcessorCall(self):
-
-		call = self.__StaticOptions
-
-		if self.__PreIncludeFile:
-			call += " " + self.__PreIncludeOption
-			call += " " + str(self.__PreIncludeFile)
-
-		for macro in self.__Macros:
-			call += " " + self.__MacroOption + macro
-
-		for includePath in self.__IncludePaths:
-			call += " " + self.__IncludeOption
-			call += " " + str(includePath)
-
-		return call
-
-
-class MetaDataFile(object):
-	"""A generic representation of a Symbian metadata file
-
-	Symbian metadata files are subject to preprocessing, primarily with macros based
-	on the selected build platform.  This class provides a generic means of wrapping
-	up the preprocessing of such files."""
-
-	def __init__(self, aFilename, gnucpp, depfiles, aRootLocation=None, log=None):
-		"""
-		@param aFilename	An MMP, bld.inf or other preprocessable build spec file
-		@param aDefaultPlatform  Default preprocessed version of this file
-		@param aCPP 		location of GNU CPP
-		@param depfiles     	list to add dependency file tuples to
-		@param aRootLocation    where the file is 
-		@param log 		A class with Debug(<string>), Info(<string>) and Error(<string>) methods
-		"""
-		self.filename = aFilename
-		self.__RootLocation = aRootLocation
-		# Dictionary with key of build platform and a text string of processed output as values
-		self.__PreProcessedContent = {}
-		self.log = log
-		self.depfiles = depfiles
-
-		self.__gnucpp = gnucpp
-		if gnucpp is None:
-			raise ValueError('gnucpp must be set')
-
-	def depspath(self, platform):
-	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
-	       Subclasses should redefine this
-	   """
-	   return str(platform['SBS_BUILD_DIR']) + "/" + str(self.__RootLocation) + "." + platform['key_md5'] + ".d"
-
-	def getContent(self, aBuildPlatform):
-
-		key = aBuildPlatform['key']
-
-		config_macros = []
-
-		adepfilename = self.depspath(aBuildPlatform)
-		generateDepsOptions = ""
-		if adepfilename:
-
-			if raptor_utilities.getOSPlatform().startswith("win"):
-				metatarget = "$(PARSETARGET)"
-			else:
-				metatarget = "'$(PARSETARGET)'"
-			generateDepsOptions = "-MD -MF%s -MT%s" % (adepfilename, metatarget)
-			self.depfiles.append((adepfilename, metatarget))
-			try:
-				os.makedirs(os.path.dirname(adepfilename))
-			except Exception, e:
-				self.log.Debug("Couldn't make bldinf outputpath for dependency generation")
-
-		config_macros = (aBuildPlatform['PLATMACROS']).split()
-
-		if not key in self.__PreProcessedContent:
-
-			preProcessor = PreProcessor(self.__gnucpp, '-undef -nostdinc ' + generateDepsOptions + ' ',
-										'-I', '-D', '-include', self.log)
-			preProcessor.filename = self.filename
-
-			# always have the current directory on the include path
-			preProcessor.addIncludePath('.')
-
-			# the SYSTEMINCLUDE directories defined in the build config
-			# should be on the include path. This is added mainly to support
-			# Feature Variation as SYSTEMINCLUDE is usually empty at this point.
-			systemIncludes = aBuildPlatform['SYSTEMINCLUDE']
-			if systemIncludes:
-				preProcessor.addIncludePaths(systemIncludes.split())
-
-			preInclude = aBuildPlatform['VARIANT_HRH']
-
-			# for non-Feature Variant builds, the directory containing the HRH should
-			# be on the include path
-			if not aBuildPlatform['ISFEATUREVARIANT']:
-				preProcessor.addIncludePath(preInclude.Dir())
-
-			# and EPOCROOT/epoc32/include
-			preProcessor.addIncludePath(aBuildPlatform['EPOCROOT'].Append('epoc32/include'))
-
-			# and the directory containing the bld.inf file
-			if self.__RootLocation is not None and str(self.__RootLocation) != "":
-				preProcessor.addIncludePath(self.__RootLocation)
-
-			# and the directory containing the file we are processing
-			preProcessor.addIncludePath(self.filename.Dir())
-
-			# there is always a pre-include file
-			preProcessor.setPreIncludeFile(preInclude)
-
-			macros = ["SBSV2"]
-
-			if config_macros:
-				macros.extend(config_macros)
-
-			if macros:
-				for macro in macros:
-					preProcessor.addMacro(macro + "=_____" +macro)
-
-			# extra "raw" macros that do not need protecting
-			preProcessor.addMacro("__GNUC__=3")
-
-			preProcessorOutput = preProcessor.preprocess()
-
-			# Resurrect preprocessing replacements
-			pattern = r'([\\|/]| |) ?_____(('+macros[0]+')'
-			for macro in macros[1:]:
-				pattern += r'|('+macro+r')'
-
-			pattern += r'\s*)'
-			# Work on all Macros in one substitution.
-			text = re.sub(pattern, r"\1\2", preProcessorOutput)
-			text = re.sub(r"\n[\t ]*", r"\n", text)
-
-			self.__PreProcessedContent[key] = text
-
-		return self.__PreProcessedContent[key]
-
-class MMPFile(MetaDataFile):
-	"""A generic representation of a Symbian metadata file
-
-	Symbian metadata files are subject to preprocessing, primarily with macros based
-	on the selected build platform.  This class provides a generic means of wrapping
-	up the preprocessing of such files."""
-
-	def __init__(self, aFilename, gnucpp, bldinf, depfiles, log=None):
-		"""
-		@param aFilename	An MMP, bld.inf or other preprocessable build spec file
-		@param gnucpp 		location of GNU CPP
-		@param bldinf		the bld.inf file this mmp was specified in
-		@param depfiles         list to fill with mmp dependency files
-		@param log 		A class with Debug(<string>), Info(<string>) and Error(<string>) methods
-		"""
-		super(MMPFile, self).__init__(aFilename, gnucpp, depfiles, str(bldinf.filename.Dir()),  log)
-		self.__bldinf = bldinf
-		self.depfiles = depfiles
-
-		self.__gnucpp = gnucpp
-		if gnucpp is None:
-			raise ValueError('gnucpp must be set')
-
-	def depspath(self, platform):
-	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
-	       Subclasses should redefine this
-	   """
-	   return self.__bldinf.outputpath(platform) + "/" + self.filename.File() + '.' + platform['key_md5'] + ".d"
-
-class Export(object):
-	"""Single processed PRJ_EXPORTS or PRJ_TESTEXPORTS entry from a bld.inf file"""
-
-	def getPossiblyQuotedStrings(cls,spec):
-		""" 	Split a string based on whitespace
-			but keep double quoted substrings together.
-		"""
-		inquotes=False
-		intokengap=False
-		sourcedest=[]
-		word = 0
-		for c in spec:
-			if c == '"':
-				if inquotes:
-					inquotes = False
-					word += 1
-					intokengap = True
-				else:
-					inquotes = True
-					intokengap = False
-				pass
-			elif c == ' ' or c == '\t':
-				if inquotes:
-					if len(sourcedest) == word:
-						sourcedest.append(c)
-					else:
-						sourcedest[word] += c
-				else:
-					if intokengap:
-						# gobble unquoted spaces
-						pass
-					else:
-						word += 1
-						intokengap=True
-				pass
-			else:
-				intokengap = False
-				if len(sourcedest) == word:
-					sourcedest.append(c)
-				else:
-					sourcedest[word] += c
-
-		return sourcedest
-
-	getPossiblyQuotedStrings = classmethod(getPossiblyQuotedStrings)
-
-
-	def __init__(self, aBldInfFile, aExportsLine, aType):
-		"""
-		Rules from the OS library for convenience:
-
-		For PRJ_TESTEXPORTS
-		source_file_1 [destination_file]
-		source_file_n [destination_file]
-		If the source file is listed with a relative path, the path will
-	 	  be considered relative to the directory containing the bld.inf file.
-		If a destination file is not specified, the source file will be copied
-		  to the directory containing the bld.inf file.
-		If a relative path is specified with the destination file, the path
-		  will be considered relative to directory containing the bld.inf file.
-
-		For PRJ_EXPORTS
-		source_file_1 [destination_file]
-		source_file_n [destination_file]
-		:zip zip_file [destination_path]
-
-		Note that:
-		If a source file is listed with a relative path, the path will be
-		considered relative to the directory containing the bld.inf file.
-
-		If a destination file is not specified, the source file will be copied
-		to epoc32\include\.
-
-		If a destination file is specified with the relative path, the path will
-		be considered relative to directory epoc32\include\.
-
-		If a destination begins with a drive letter, then the file is copied to
-		epoc32\data\<drive_letter>\<path>. For example,
-
-			mydata.dat e:\appdata\mydata.dat
-			copies mydata.dat to epoc32\data\e\appdata\mydata.dat.
-			You can use any driveletter between A and Z.
-
-		A line can start with the preface :zip. This instructs the build tools
-		to unzip the specified zip file to the specified destination path. If a
-		destination path is not specified, the source file will be unzipped in
-		the root directory.
-
-
-		"""
-
-		# Work out what action is required - unzip or copy?
-		action = "copy"
-		typematch = re.match(r'^\s*(?P<type>:zip\s+)?(?P<spec>[^\s].*[^\s])\s*$',aExportsLine, re.I)
-
-		spec = typematch.group('spec')
-		if spec == None:
-			raise ValueError('must specify at least a source file for an export')
-
-		if typematch.group('type') is not None:
-			action = "unzip"
-
-		# Split the spec into source and destination but take care
-		# to allow filenames with quoted strings.
-		exportEntries = Export.getPossiblyQuotedStrings(spec)
-
-		# Get the source path as specified by the bld.inf
-		source_spec = exportEntries.pop(0).replace(' ','%20')
-
-		# Resolve the source file
-		sourcepath = generic_path.Path(raptor_utilities.resolveSymbianPath(str(aBldInfFile), source_spec))
-
-		# Find it if the case of the filename is wrong:
-		# Carry on even if we don't find it
-		foundfile = sourcepath.FindCaseless()
-		if foundfile != None:
-			source = str(foundfile).replace(' ','%20')
-		else:
-			source = str(sourcepath).replace(' ','%20')
-
-
-		# Get the destination path as specified by the bld.inf
-		if len(exportEntries) > 0:
-			dest_spec = exportEntries.pop(0).replace(' ','%20')
-		else:
-			dest_spec = None
-		# Destination list - list of destinations. For the WINSCW resource building stage,
-		# files exported to the emulated drives and there are several locations, for example,
-		# PRJ_[TEST]EXPORTS
-		# 1234ABCD.SPD		z:/private/10009876/policy/1234ABCD.spd
-		# needs to end up copied in
-		# epoc32/data/z/private/10009876/policy/1234ABCD.spd *and* in
-		# epoc32/release/winscw/udeb/z/private/10009876/policy/1234ABCD.spd *and* in
-		# epoc32/release/winscw/urel/z/private/10009876/policy/1234ABCD.spd
-		dest_list = []
-
-		# Resolve the destination if one is specified
-		if dest_spec:
-			# check for troublesome characters
-			if ':' in dest_spec and not re.search('^[a-z]:', dest_spec, re.I):
-				raise ValueError("invalid filename " + dest_spec)
-
-			dest_spec = dest_spec.replace(' ','%20')
-			aSubType=""
-			if action == "unzip":
-				aSubType=":zip"
-				dest_spec = dest_spec.rstrip("\\/")
-
-			# Get the export destination(s) - note this can be a list of strings or just a string.
-			dest_list = raptor_utilities.resolveSymbianPath(str(aBldInfFile), dest_spec, aType, aSubType)
-
-			def process_dest(aDest):
-				if dest_spec.endswith('/') or  dest_spec.endswith('\\'):
-					m = generic_path.Path(source)
-					aDest += '/'+m.File()
-				return aDest
-
-			if isinstance(dest_list, list):
-				# Process each file in the list
-				dest_list = map(process_dest, dest_list)
-			else:
-				# Process the single destination
-				dest_list = process_dest(dest_list)
-
-		else:
-			# No destination was specified so we assume an appropriate one
-
-			dest_filename=generic_path.Path(source).File()
-
-			if aType == "PRJ_EXPORTS":
-				if action == "copy":
-					destination = '$(EPOCROOT)/epoc32/include/'+dest_filename
-				elif action == "unzip":
-					destination = '$(EPOCROOT)'
-			elif aType == "PRJ_TESTEXPORTS":
-				d = aBldInfFile.Dir()
-				if action == "copy":
-					destination = str(d.Append(dest_filename))
-				elif action == "unzip":
-					destination = "$(EPOCROOT)"
-			else:
-				raise ValueError("Export type should be 'PRJ_EXPORTS' or 'PRJ_TESTEXPORTS'. It was: "+str(aType))
-
-
-		self.__Source = source
-		if len(dest_list) > 0: # If the list has length > 0, this means there are several export destinations.
-			self.__Destination = dest_list
-		else: # Otherwise the list has length zero, so there is only a single export destination.
-			self.__Destination = destination
-		self.__Action = action
-
-	def getSource(self):
-		return self.__Source
-
-	def getDestination(self):
-		return self.__Destination # Note that this could be either a list, or a string, depending on the export destination
-
-	def getAction(self):
-		return self.__Action
-
-class ExtensionmakefileEntry(object):
-	def __init__(self, aGnuLine, aBldInfFile, tmp):
-
-		self.__BldInfFile = aBldInfFile
-		bldInfLocation = self.__BldInfFile.Dir()
-		biloc = str(bldInfLocation)
-		extInfLocation = tmp.filename.Dir()
-		eiloc = str(extInfLocation)
-
-		if eiloc is None or eiloc == "":
-			eiloc="." # Someone building with a relative raptor path
-		if biloc is None or biloc == "":
-			biloc="." # Someone building with a relative raptor path
-
-		self.__StandardVariables = {}
-		# Relative step-down to the root - let's try ignoring this for now, as it
-		# should amount to the same thing in a world where absolute paths are king
-		self.__StandardVariables['TO_ROOT'] = ""
-		# Top-level bld.inf location
-		self.__StandardVariables['TO_BLDINF'] = biloc
-		self.__StandardVariables['EXTENSION_ROOT'] = eiloc
-
-		# Get the directory and filename from the full path containing the extension makefile
-		self.__FullPath = generic_path.Join(eiloc,aGnuLine)
-		self.__FullPath = self.__FullPath.GetLocalString()
-		self.__Filename = os.path.split(self.__FullPath)[1]
-		self.__Directory = os.path.split(self.__FullPath)[0]
-
-	def getMakefileName(self):
-		return self.__Filename
-
-	def getMakeDirectory(self):
-		return self.__Directory
-
-	def getStandardVariables(self):
-		return self.__StandardVariables
-
-class Extension(object):
-	"""Single processed PRJ_EXTENSIONS or PRJ_TESTEXTENSIONS START EXTENSIONS...END block
-	from a bld.inf file"""
-
-	def __init__(self, aBldInfFile, aStartLine, aOptionLines, aBuildPlatform, aRaptor):
-		self.__BldInfFile = aBldInfFile
-		self.__Options = {}
-		self.interface = ""
-		self.__Raptor = aRaptor
-
-		makefile = ""
-		makefileMatch = re.search(r'^\s*START EXTENSION\s+(?P<MAKEFILE>\S+)\s*(?P<NAMETAG>\S*)$', aStartLine, re.I)
-
-		self.__RawMakefile = ""
-
-		if (makefileMatch):
-			self.__RawMakefile = makefileMatch.group('MAKEFILE')
-			self.nametag = makefileMatch.group('NAMETAG').lower()
-
-			# Ensure all \'s are translated into /'s if required
-			self.interface = self.__RawMakefile
-			self.interface = self.interface.replace("\\", "/").replace("/", ".")
-
-		# To support standalone testing, '$(' prefixed TEMs  are assumed to  start with
-		# a makefile variable and hence be fully located in FLM operation
-		if self.__RawMakefile.startswith("$("):
-			self.__Makefile = self.__RawMakefile + ".mk"
-		else:
-			self.__Makefile = '$(MAKEFILE_TEMPLATES)/' + self.__RawMakefile + ".mk"
-
-		for optionLine in aOptionLines:
-			optionMatch = re.search(r'^\s*(OPTION\s+)?(?P<VARIABLE>\S+)\s+(?P<VALUE>\S+.*)$',optionLine, re.I)
-			if optionMatch:
-				self.__Options[optionMatch.group('VARIABLE').upper()] = optionMatch.group('VALUE')
-
-		bldInfLocation = self.__BldInfFile.Dir()
-
-		biloc = str(bldInfLocation)
-		if biloc is None or biloc == "":
-			biloc="." # Someone building with a relative raptor path
-
-		extInfLocation = aStartLine.filename.Dir()
-
-		eiloc = str(extInfLocation)
-		if eiloc is None or eiloc == "":
-			eiloc="." # Someone building with a relative raptor path
-
-		self.__StandardVariables = {}
-		# Relative step-down to the root - let's try ignoring this for now, as it
-		# should amount to the same thing in a world where absolute paths are king
-		self.__StandardVariables['TO_ROOT'] = ""
-		# Top-level bld.inf location
-		self.__StandardVariables['TO_BLDINF'] = biloc
-		# Location of bld.inf file containing the current EXTENSION block
-		self.__StandardVariables['EXTENSION_ROOT'] = eiloc
-
-		# If the interface exists, this means it's not a Template Extension Makefile so don't look for a .meta file for it;
-		# so do nothing if it's not a template extension makefile
-		try:
-			self.__Raptor.cache.FindNamedInterface(str(self.interface), aBuildPlatform['CACHEID'])
-		except KeyError: # This means that this Raptor doesn't have the interface self.interface, so we are in a TEM
-			# Read extension meta file and get default options from it.  The use of TEM meta file is compulsory if TEM is used
-			metaFilename = "%s/epoc32/tools/makefile_templates/%s.meta" % (aBuildPlatform['EPOCROOT'], self.__RawMakefile)
-			metaFile = None
-			try:
-				metaFile = open(metaFilename, "r")
-			except IOError, e:
-				self.__warn("Extension: %s - cannot open Meta file: %s" % (self.__RawMakefile, metaFilename))
-
-			if metaFile:
-				for line in metaFile.readlines():
-					defaultOptionMatch = re.search(r'^OPTION\s+(?P<VARIABLE>\S+)\s+(?P<VALUE>\S+.*)$',line, re.I)
-					if defaultOptionMatch and defaultOptionMatch.group('VARIABLE').upper() not in self.__Options.keys():
-						self.__Options[defaultOptionMatch.group('VARIABLE').upper()] = defaultOptionMatch.group('VALUE')
-
-				metaFile.close()
-
-	def __warn(self, format, *extras):
-		if (self.__Raptor):
-			self.__Raptor.Warn(format, *extras)
-
-	def getIdentifier(self):
-		return re.sub (r'\\|\/|\$|\(|\)', '_', self.__RawMakefile)
-
-	def getMakefile(self):
-		return self.__Makefile
-
-	def getOptions(self):
-		return self.__Options
-
-	def getStandardVariables(self):
-		return self.__StandardVariables
-
-class MMPFileEntry(object):
-	def __init__(self, aFilename, aTestOption, aARMOption):
-		self.filename = aFilename
-		self.testoption = aTestOption
-		if aARMOption:
-			self.armoption = True
-		else:
-			self.armoption = False
-
-
-class BldInfFile(MetaDataFile):
-	"""Representation of a Symbian bld.inf file"""
-
-	def __init__(self, aFilename, gnucpp, depfiles, log=None):
-		MetaDataFile.__init__(self, aFilename, gnucpp, depfiles, None, log)
-		self.__Raptor = log
-		self.testManual = 0
-		self.testAuto = 0
-	# Generic
-
-	def getBuildPlatforms(self, aBuildPlatform):
-		platformList = []
-
-		for platformLine in self.__getSection(aBuildPlatform, 'PRJ_PLATFORMS'):
-			for platformEntry in platformLine.split():
-				platformList.append(platformEntry)
-
-		return platformList
-
-	# Build Platform Specific
-	def getMMPList(self, aBuildPlatform, aType="PRJ_MMPFILES"):
-		mmpFileList=[]
-		gnuList = []
-		makefileList = []
-		extFound = False
-		m = None
-
-		hashValue = {'mmpFileList': [] , 'gnuList': [], 'makefileList' : []}
-
-		for mmpFileEntry in self.__getSection(aBuildPlatform, aType):
-
-			actualBldInfRoot = mmpFileEntry.getFilename()
-			n = re.match('\s*(?P<makefiletype>(GNUMAKEFILE|N?MAKEFILE))\s+(?P<extmakefile>[^ ]+)\s*(support|manual)?\s*(?P<invalid>\S+.*)?\s*$',mmpFileEntry,re.I)
-			if n:
-
-				if (n.groupdict()['invalid']):
-					self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), n.groupdict()['invalid'])
-				if raptor_utilities.getOSFileSystem() == "unix":
-					self.log.Warn("NMAKEFILE/GNUMAKEFILE/MAKEFILE keywords not supported on Linux")
-				else:
-					extmakefilearg = n.groupdict()['extmakefile']
-					bldInfDir = actualBldInfRoot.Dir()
-					extmakefilename = bldInfDir.Append(extmakefilearg)
-					extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, mmpFileEntry)
-
-					if (n.groupdict()['makefiletype']).upper() == "GNUMAKEFILE":
-						gnuList.append(extmakefile)
-					else:
-						makefileList.append(extmakefile)
-			else:
-				# Currently there is only one possible option - build as arm.
-				# For TESTMMPFILES, the supported options are support, tidy, ignore, manual and build as arm
-				if aType.upper()=="PRJ_TESTMMPFILES":
-					if re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I):
-						m = re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I)
-				else:
-					if re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I):
-						m = re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I)
-
-			if m:
-				if (m.groupdict()['invalid']):
-					self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), m.groupdict()['invalid'])
-
-				mmpFileName = m.groupdict()['name']
-				testmmpoption = "auto" # Setup tests to be automatic by default
-				tokens = m.groupdict()
-				for key,item in tokens.iteritems():
-					if key=="manual" and item=="manual":
-						testmmpoption = "manual"
-					elif key=="support" and item=="support":
-						testmmpoption = "support"
-					elif key=="ignore" and item=="ignore":
-						testmmpoption = "ignore"
-
-				buildasarm = False
-				if  m.groupdict()['baa']:
-					if m.groupdict()['baa'].lower() == 'build_as_arm':
-						buildasarm = True
-
-				if not mmpFileName.lower().endswith('.mmp'):
-					mmpFileName += '.mmp'
-				bldInfDir = actualBldInfRoot.Dir()
-				try:
-					mmpFileName = bldInfDir.Append(mmpFileName)
-					mmpfe = MMPFileEntry(mmpFileName, testmmpoption, buildasarm)
-					mmpFileList.append(mmpfe)
-				except ValueError, e:
-					self.log.Error("invalid .mmp file name: %s" % str(e))
-
-				m = None
-
-
-		hashValue['mmpFileList'] = mmpFileList
-		hashValue['gnuList'] = gnuList
-		hashValue['makefileList'] = makefileList
-
-		return hashValue
-
-	# Return a list of gnumakefiles used in the bld.inf
-	def getExtensionmakefileList(self, aBuildPlatform, aType="PRJ_MMPFILES",aString = ""):
-		extMakefileList=[]
-		m = None
-		for extmakeFileEntry in self.__getSection(aBuildPlatform, aType):
-
-			actualBldInfRoot = extmakeFileEntry.filename
-			if aType.upper()=="PRJ_TESTMMPFILES":
-				m = re.match('\s*GNUMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
-			else:
-				if aString == "gnumakefile":
-					m = re.match('\s*GNUMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
-				elif aString == "nmakefile":
-					m = re.match('\s*NMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
-				elif aString == "makefile":
-					m = re.match('\s*MAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
-			if m:
-				if (m.groupdict()['invalid']):
-					self.log.Error("%s (%d) : invalid extension makefile qualifier \"%s\"", extmakeFileEntry.filename, extmakeFileEntry.getLineNumber(), m.groupdict()['invalid'])
-
-				extmakefilearg = m.groupdict()['extmakefile']
-				bldInfDir = actualBldInfRoot.Dir()
-				extmakefilename = bldInfDir.Append(extmakefilearg)
-				extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, extmakeFileEntry)
-				extMakefileList.append(extmakefile)
-				m = None
-
-		return extMakefileList
-
-	def getTestExtensionmakefileList(self,aBuildPlatform,aString=""):
-		return self.getExtensionmakefileList(aBuildPlatform,"PRJ_TESTMMPFILES",aString)
-
-	def getTestMMPList(self, aBuildPlatform):
-		return self.getMMPList(aBuildPlatform, "PRJ_TESTMMPFILES")
-
-	def getRomTestType(self, aBuildPlatform):
-		testMMPList = self.getTestMMPList(aBuildPlatform)
-		for testMMPFileEntry in testMMPList['mmpFileList']:
-			if aBuildPlatform["TESTCODE"]:
-				# Calculate test type (manual or auto)
-				if testMMPFileEntry.testoption == "manual":
-					self.testManual += 1
-				if not (testMMPFileEntry.testoption == "support" or testMMPFileEntry.testoption == "manual" or testMMPFileEntry.testoption == "ignore"):
-					self.testAuto += 1
-		if self.testManual and self.testAuto:
-			return 'BOTH'
-		elif self.testAuto:
-			return 'AUTO'
-		elif self.testManual:
-			return 'MANUAL'
-		else:
-			return 'NONE'
-
-	def getExports(self, aBuildPlatform, aType="PRJ_EXPORTS"):
-		exportList = []
-
-		for exportLine in self.__getSection(aBuildPlatform, aType):
-
-			if not re.match(r'\S+', exportLine):
-				continue
-
-			try:
-				exportList.append(Export(exportLine.getFilename(), exportLine, aType))
-			except ValueError,e:
-				self.log.Error(str(e))
-
-		return exportList
-
-	def getTestExports(self, aBuildPlatform):
-		return self.getExports(aBuildPlatform, "PRJ_TESTEXPORTS")
-
-	def getExtensions(self, aBuildPlatform, aType="PRJ_EXTENSIONS"):
-		extensionObjects = []
-		start = ""
-		options = []
-
-		for extensionLine in self.__getSection(aBuildPlatform, aType):
-			if (re.search(r'^\s*START ',extensionLine, re.I)):
-				start = extensionLine
-			elif re.search(r'^\s*END\s*$',extensionLine, re.I):
-				extensionObjects.append(Extension(self.filename, start, options, aBuildPlatform, self.__Raptor))
-				start = ""
-				options = []
-			elif re.search(r'^\s*$',extensionLine, re.I):
-				continue
-			elif start:
-				options.append(extensionLine)
-
-		return extensionObjects
-
-	def getTestExtensions(self, aBuildPlatform):
-		return self.getExtensions(aBuildPlatform, "PRJ_TESTEXTENSIONS")
-
-	def __getSection(self, aBuildPlatform, aSection):
-
-		activeSection = False
-		sectionContent = []
-		lineContent = re.split(r'\n', self.getContent(aBuildPlatform));
-
-		currentBldInfFile = self.filename
-		currentLineNumber = 0
-
-		for line in lineContent:
-			if line.startswith("#"):
-				commentDetail = getPreProcessorCommentDetail(line)
-				currentBldInfFile = commentDetail[0]
-				currentLineNumber = commentDetail[1]-1
-				continue
-
-			currentLineNumber += 1
-
-			if not re.match(r'.*\S+', line):
-				continue
-			elif re.match(r'\s*' + aSection + r'\s*$', line, re.I):
-				activeSection = True
-			elif re.match(r'\s*PRJ_\w+\s*$', line, re.I):
-				activeSection = False
-			elif activeSection:
-				sectionContent.append(PreProcessedLine(line, currentBldInfFile, currentLineNumber))
-
-		return sectionContent
-
-	@staticmethod
-	def outputPathFragment(bldinfpath):
-		"""Return a relative path that uniquely identifies this bldinf file
-		   whilst being short so that it can be appended to epoc32/build.
-		   The  build product of a particular bld.inf may be placed in here.
-		   This affects its TEMs and its MMPs"""
-
-		absroot_str = os.path.abspath(str(bldinfpath)).lower().replace("\\","/")
-
-		uniqueid = hashlib.md5()
-		uniqueid.update(absroot_str)
-
-		specnamecomponents = (re.sub("^[A-Za-z]:", "", absroot_str)).split('/') # split, removing any drive identifier (if present)
-
-		pathlist=[]
-		while len(specnamecomponents) > 0:
-			top = specnamecomponents.pop()
-			if top.endswith('.inf'):
-				continue
-			elif top == 'group':
-				continue
-			else:
-				pathlist = [top]
-				break
-
-		pathlist.append("c_"+uniqueid.hexdigest()[:16])
-		return "/".join(pathlist)
-
-	def outputpath(self, platform):
-		""" The full path where product from this bldinf is created."""
-		return str(platform['SBS_BUILD_DIR']) + "/" + BldInfFile.outputPathFragment(self.filename)
-
-	def depspath(self, platform):
-	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
-	       Subclasses should redefine this
-	   """
-	   return self.outputpath(platform) + "/bldinf." + platform['key_md5'] + ".d"
-
-
-
-class MMPRaptorBackend(MMPBackend):
-	"""A parser "backend" for the MMP language
-
-	This is used to map recognised MMP syntax onto a buildspec """
-
-	# Support priorities, with case-fixed mappings for use
-	epoc32priorities = {
-		'low':'Low',
-		'background':'Background',
-		'foreground':'Foreground',
-		'high':'High',
-		'windowserver':'WindowServer',
-		'fileserver':'FileServer',
-		'realtimeserver':'RealTimeServer',
-		'supervisor':'SuperVisor'
-		}
-
-	# Known capability flags with associated bitwise operations
-	supportedCapabilities = {
-		'tcb':(1<<0),
-		'commdd':(1<<1),
-		'powermgmt':(1<<2),
-		'multimediadd':(1<<3),
-		'readdevicedata':(1<<4),
-		'writedevicedata':(1<<5),
-		'drm':(1<<6),
-		'trustedui':(1<<7),
-		'protserv':(1<<8),
-		'diskadmin':(1<<9),
-		'networkcontrol':(1<<10),
-		'allfiles':(1<<11),
-		'swevent':(1<<12),
-		'networkservices':(1<<13),
-		'localservices':(1<<14),
-		'readuserdata':(1<<15),
-		'writeuserdata':(1<<16),
-		'location':(1<<17),
-		'surroundingsdd':(1<<18),
-		'userenvironment':(1<<19),
-	# Old capability names have zero value
-		'root':0,
-		'mediadd':0,
-		'readsystemdata':0,
-		'writesystemdata':0,
-		'sounddd':0,
-		'uidd':0,
-		'killanyprocess':0,
-		'devman':0,
-		'phonenetwork':0,
-		'localnetwork':0
-	  	}
-
-	library_re = re.compile(r"^(?P<name>[^{]+?)(?P<version>{(?P<major>[0-9]+)\.(?P<minor>[0-9]+)})?(\.(lib|dso))?$",re.I)
-
-
-	def __init__(self, aRaptor, aMmpfilename, aBldInfFilename):
-		super(MMPRaptorBackend,self).__init__()
-		self.platformblock = None
-		self.__Raptor = aRaptor
-		self.__debug("-----+++++ %s " % aMmpfilename)
-		self.BuildVariant = raptor_data.Variant(name = "mmp")
-		self.ApplyVariants = []
-		self.ResourceVariants = []
-		self.BitmapVariants = []
-		self.StringTableVariants = []
-		self.__bldInfFilename = aBldInfFilename
-		self.__targettype = "UNKNOWN"
-		self.__currentMmpFile = aMmpfilename
-		self.__defFileRoot = self.__currentMmpFile
-		self.__currentLineNumber = 0
-		self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, "")
-		self.__userinclude = ""
-		self.__systeminclude = ""
-		self.__bitmapSourcepath = self.__sourcepath
-		self.__current_resource = ""
-		self.__resourceFiles = []
-		self.__pageConflict = []
-		self.__debuggable = ""
-		self.__compressionKeyword = ""
-		self.sources = []
-		self.capabilities = []
-
-		self.__TARGET = ""
-		self.__TARGETEXT = ""
-		self.deffile = ""
-		self.__LINKAS = ""
-		self.nostrictdef = False
-		self.featureVariant = False
-
-		self.__currentResourceVariant = None
-		self.__currentStringTableVariant = None
-		self.__explicitversion = False
-		self.__versionhex = ""
-
-		# "ALL" capability calculated based on the total capabilities currently supported
-		allCapabilities = 0
-		for supportedCapability in MMPRaptorBackend.supportedCapabilities.keys():
-			allCapabilities = allCapabilities | MMPRaptorBackend.supportedCapabilities[supportedCapability]
-		MMPRaptorBackend.supportedCapabilities['all'] = allCapabilities
-
-	# Permit unit-testing output without a Raptor context
-	def __debug(self, format, *extras):
-		if (self.__Raptor):
-			self.__Raptor.Debug(format, *extras)
-
-	def __warn(self, format, *extras):
-		if (self.__Raptor):
-			self.__Raptor.Warn(format, *extras)
-
-	def doPreProcessorComment(self,s,loc,toks):
-		commentDetail = getPreProcessorCommentDetail(toks[0])
-		self.__currentMmpFile = commentDetail[0].GetLocalString()
-		self.__currentLineNumber = commentDetail[1]
-		self.__debug("Current file %s, line number %s\n"  % (self.__currentMmpFile,str(self.__currentLineNumber)))
-		return "OK"
-
-	def doBlankLine(self,s,loc,toks):
-		self.__currentLineNumber += 1
-
-	def doStartPlatform(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Start Platform block "+toks[0])
-		self.platformblock = toks[0]
-		return "OK"
-
-	def doEndPlatform(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Finalise platform " + self.platformblock)
-		return "OK"
-
-	def doSetSwitch(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		prefix=""
-		varname = toks[0].upper()
-
-		# A bright spark made the optionname the same as
-		# the env variable. One will override the other if we pass this
-		# on to make.  Add a prefix to prevent the clash.
-		if varname=='ARMINC':
-			prefix="SET_"
-			self.__debug( "Set switch "+toks[0]+" ON")
-			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
-
-		elif varname=='NOSTRICTDEF':
-			self.nostrictdef = True
-			self.__debug( "Set switch "+toks[0]+" ON")
-			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
-
-		elif varname == 'PAGED':
-			self.BuildVariant.AddOperation(raptor_data.Set(varname, "1"))
-			self.__debug( "Set switch PAGE ON")
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged"))
-			self.__debug( "Set switch PAGEDCODE ON")
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged"))
-			self.__debug( "Set data PAGEDDATA ON")
-			self.__pageConflict.append("PAGEDCODE")
-			self.__pageConflict.append("PAGEDDATA")
-
-		elif varname == 'UNPAGED':
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGED", "0"))
-			self.__debug( "Set switch PAGED OFF")
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged"))
-			self.__debug( "Set switch PAGEDCODE OFF")
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged"))
-			self.__debug( "Set data PAGEDDATA OFF")
-			self.__pageConflict.append("UNPAGEDCODE")
-			self.__pageConflict.append("UNPAGEDDATA")
-
-		elif varname == 'PAGEDCODE':
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged"))
-			self.__debug( "Set switch " + varname + " ON")
-			self.__pageConflict.append(varname)
-
-		elif varname == 'PAGEDDATA':
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged"))
-			self.__debug( "Set switch " + varname + " ON")
-			self.__pageConflict.append(varname)
-
-		elif varname == 'UNPAGEDCODE':
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged"))
-			self.__debug( "Set switch " + varname + " ON")
-			self.__pageConflict.append(varname)
-		elif varname == 'UNPAGEDDATA':
-			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged"))
-			self.__debug( "Set switch " + varname + " ON")
-			self.__pageConflict.append(varname)
-
-		elif varname == 'NOLINKTIMECODEGENERATION':
-			self.BuildVariant.AddOperation(raptor_data.Set("LTCG",""))
-			self.__debug( "Set switch " + varname + " OFF")
-		elif varname == 'NOMULTIFILECOMPILATION':
-			self.BuildVariant.AddOperation(raptor_data.Set("MULTIFILE_ENABLED",""))
-			self.__debug( "Set switch " + varname + " OFF")
-
-		elif varname == 'DEBUGGABLE':
-			if self.__debuggable != "udeb":
-				self.__debuggable = "udeb urel"
-			else:
-				self.__Raptor.Warn("DEBUGGABLE keyword ignored as DEBUGGABLE_UDEBONLY is already specified")
-		elif varname == 'DEBUGGABLE_UDEBONLY':
-			if self.__debuggable != "":
-				self.__Raptor.Warn("DEBUGGABLE keyword has no effect as DEBUGGABLE or DEBUGGABLE_UDEBONLY is already set")
-			self.__debuggable = "udeb"
-		elif varname == 'FEATUREVARIANT':
-			self.BuildVariant.AddOperation(raptor_data.Set(varname,"1"))
-			self.featureVariant = True
-		elif varname in ['COMPRESSTARGET', 'NOCOMPRESSTARGET', 'INFLATECOMPRESSTARGET', 'BYTEPAIRCOMPRESSTARGET']:
-			if self.__compressionKeyword:
-				self.__Raptor.Warn("%s keyword in %s overrides earlier use of %s" % (varname, self.__currentMmpFile, self.__compressionKeyword))
-				self.BuildVariant.AddOperation(raptor_data.Set(self.__compressionKeyword,""))
-				self.__debug( "Set switch " + varname + " OFF")
-			self.BuildVariant.AddOperation(raptor_data.Set(varname,"1"))
-			self.__debug( "Set switch " + varname + " ON")
-			self.__compressionKeyword = varname
-		else:
-			self.__debug( "Set switch "+toks[0]+" ON")
-			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
-
-		return "OK"
-
-	def doAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		varname = toks[0].upper()
-		if varname=='TARGET':
-			(self.__TARGET, self.__TARGETEXT) = os.path.splitext(toks[1])
-			self.__TARGETEXT = self.__TARGETEXT.lstrip('.')
-
-			self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", self.__TARGETEXT.lower()))
-
-			lowercase_TARGET = self.__TARGET.lower()
-			self.__debug("Set "+toks[0]+" to " + lowercase_TARGET)
-			self.__debug("Set REQUESTEDTARGETEXT to " + self.__TARGETEXT.lower())
-
-			self.BuildVariant.AddOperation(raptor_data.Set("TARGET", self.__TARGET))
-			self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower", lowercase_TARGET))
-			if  lowercase_TARGET !=  self.__TARGET:
-				self.__debug("TARGET is not lowercase: '%s' - might cause BC problems." % self.__TARGET)
-		elif varname=='TARGETTYPE':
-			self.__debug("Set "+toks[0]+" to " + str(toks[1]))
-			self.__targettype=toks[1]
-			if  self.__targettype.lower() == "none":
-				self.BuildVariant.AddOperation(raptor_data.Set("TARGET", ""))
-				self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower",""))
-				self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", ""))
-			self.BuildVariant.AddOperation(raptor_data.Set(varname,toks[1].lower()))
-
-		elif varname=='TARGETPATH':
-			value = toks[1].lower().replace('\\','/')
-			self.__debug("Set "+varname+" to " + value)
-			self.BuildVariant.AddOperation(raptor_data.Set(varname, value))
-
-		elif varname=='OPTION' or varname=='LINKEROPTION':
-			self.__debug("Set "+toks[1]+varname+" to " + str(toks[2]))
-			self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper()," ".join(toks[2])))
-
-			# Warn about OPTION ARMASM
-			if "armasm" in toks[1].lower():
-				self.__Raptor.Warn(varname+" ARMASM has no effect (use OPTION ARMCC).")
-
-		elif varname=='OPTION_REPLACE':
-			# Warn about OPTION_REPLACE ARMASM
-			if "armasm" in toks[1].lower():
-				self.__Raptor.Warn("OPTION_REPLACE ARMASM has no effect (use OPTION_REPLACE ARMCC).")
-			else:
-				args = " ".join(toks[2])
-
-				searchReplacePairs = self.resolveOptionReplace(args)
-
-				for searchReplacePair in searchReplacePairs:
-					self.__debug("Append %s to OPTION_REPLACE_%s", searchReplacePair, toks[1].upper())
-					self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper(),searchReplacePair))
-
-		elif varname=='SYSTEMINCLUDE' or varname=='USERINCLUDE':
-			for path in toks[1]:
-				resolved = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, path)
-				self.BuildVariant.AddOperation(raptor_data.Append(varname,resolved))
-
-				if varname=='SYSTEMINCLUDE':
-					self.__systeminclude += ' ' + resolved
-					self.__debug("  %s = %s",varname, self.__systeminclude)
-				else:
-					self.__userinclude += ' ' + resolved
-					self.__debug("  %s = %s",varname, self.__userinclude)
-
-				self.__debug("Appending %s to %s",resolved, varname)
-
-			self.__systeminclude = self.__systeminclude.strip()
-			self.__systeminclude = self.__systeminclude.rstrip('\/')
-			self.__userinclude = self.__userinclude.strip()
-			self.__userinclude = self.__userinclude.rstrip('\/')
-
-		elif varname=='EXPORTLIBRARY':
-			# Remove extension from the EXPORTLIBRARY name
-			libName = toks[1].rsplit(".", 1)[0]
-			self.__debug("Set "+varname+" to " + libName)
-			self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(libName)))
-
-		elif varname=='CAPABILITY':
-			for cap in toks[1]:
-				self.__debug("Setting  "+toks[0]+": " + cap)
-				self.capabilities.append(cap)
-		elif varname=='DEFFILE':
-			self.__defFileRoot = self.__currentMmpFile
-			self.deffile = toks[1]
-		elif varname=='LINKAS':
-			self.__debug("Set "+toks[0]+"  OPTION to " + str(toks[1]))
-			self.__LINKAS = toks[1]
-			self.BuildVariant.AddOperation(raptor_data.Set(varname, toks[1]))
-		elif varname=='SECUREID' or varname=='VENDORID':
-			hexoutput = MMPRaptorBackend.canonicalUID(toks[1])
-			self.__debug("Set "+toks[0]+"  OPTION to " + hexoutput)
-			self.BuildVariant.AddOperation(raptor_data.Set(varname, hexoutput))
-		elif varname=='VERSION':
-			if toks[-1] == "EXPLICIT":
-				self.__explicitversion = True
-				self.BuildVariant.AddOperation(raptor_data.Set("EXPLICITVERSION", "1"))
-
-			vm = re.match(r'^(\d+)(\.(\d+))?$', toks[1])
-			if vm is not None:
-				version = vm.groups()
-				# the major version number
-				major = int(version[0],10)
-
-				# add in the minor number
-				minor = 0
-				if version[1] is not None:
-					minor = int(version[2],10)
-				else:
-					self.__Raptor.Warn("VERSION (%s) missing '.minor' in %s, using '.0'" % (toks[1],self.__currentMmpFile))
-
-				self.__versionhex = "%04x%04x" % (major, minor)
-				self.BuildVariant.AddOperation(raptor_data.Set(varname, "%d.%d" %(major, minor)))
-				self.BuildVariant.AddOperation(raptor_data.Set(varname+"HEX", self.__versionhex))
-				self.__debug("Set "+toks[0]+"  OPTION to " + toks[1])
-				self.__debug("Set "+toks[0]+"HEX OPTION to " + "%04x%04x" % (major,minor))
-
-			else:
-				self.__Raptor.Warn("Invalid version supplied to VERSION (%s), using default value" % toks[1])
-
-		elif varname=='EPOCHEAPSIZE':
-			# Standardise on sending hex numbers to the FLMS.
-
-			if toks[1].lower().startswith('0x'):
-				min = long(toks[1],16)
-			else:
-				min = long(toks[1],10)
-
-			if toks[2].lower().startswith('0x'):
-				max = long(toks[2],16)
-			else:
-				max = long(toks[2],10)
-
-			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN", "%x" % min))
-			self.__debug("Set "+varname+"MIN  OPTION to '%x' (hex)" % min )
-			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX", "%x" % max))
-			self.__debug("Set "+varname+"MAX  OPTION to '%x' (hex)" % max )
-
-			# Some toolchains require decimal versions of the min/max values, converted to KB and
-			# rounded up to the next 1KB boundary
-			min_dec_kb = (int(min) + 1023) / 1024
-			max_dec_kb = (int(max) + 1023) / 1024
-			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN_DEC_KB", "%d" % min_dec_kb))
-			self.__debug("Set "+varname+"MIN  OPTION KB to '%d' (dec)" % min_dec_kb )
-			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX_DEC_KB", "%d" % max_dec_kb))
-			self.__debug("Set "+varname+"MAX  OPTION KB to '%d' (dec)" % max_dec_kb )
-
-		elif varname=='EPOCSTACKSIZE':
-			if toks[1].lower().startswith('0x'):
-				stack = long(toks[1],16)
-			else:
-				stack = long(toks[1],10)
-			self.BuildVariant.AddOperation(raptor_data.Set(varname, "%x" % stack))
-			self.__debug("Set "+varname+"  OPTION to '%x' (hex)" % stack  )
-		elif varname=='EPOCPROCESSPRIORITY':
-			# low, background, foreground, high, windowserver, fileserver, realtimeserver or supervisor
-			# These are case insensitive in metadata entries, but must be mapped to a static case pattern for use
-			prio = toks[1].lower()
-
-			# NOTE: Original validation here didn't actually work.  This has been corrected to provide an error, but probably needs re-examination.
-			if not MMPRaptorBackend.epoc32priorities.has_key(prio):
-				self.__Raptor.Error("Priority setting '%s' is not a valid priority - should be one of %s.", prio, MMPRaptorBackend.epoc32priorities.values())
-			else:
-				self.__debug("Set "+toks[0]+" to " +  MMPRaptorBackend.epoc32priorities[prio])
-				self.BuildVariant.AddOperation(raptor_data.Set(varname,MMPRaptorBackend.epoc32priorities[prio]))
-		elif varname=='ROMTARGET' or varname=='RAMTARGET':
-			if len(toks) == 1:
-				self.__debug("Set "+toks[0]+" to <none>" )
-				self.BuildVariant.AddOperation(raptor_data.Set(varname,"<none>"))
-			else:
-				toks1 = str(toks[1]).replace("\\","/")
-				if toks1.find(","):
-					toks1 = re.sub("[,'\[\]]", "", toks1).replace("//","/")
-				self.__debug("Set "+toks[0]+" to " + toks1)
-				self.BuildVariant.AddOperation(raptor_data.Set(varname,toks1))
-		elif varname=='APPLY':
-			self.ApplyVariants.append(toks[1])
-		else:
-			self.__debug("Set "+toks[0]+" to " + str(toks[1]))
-			self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(toks[1])))
-
-			if varname=='LINKAS':
-				self.__LINKAS = toks[1]
-
-		return "OK"
-
-	def doAppend(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		"""MMP command
-		"""
-		name=toks[0].upper()
-		if len(toks) == 1:
-			# list can be empty e.g. MACRO _FRED_ when fred it defined in the HRH
-			# causes us to see just "MACRO" in the input - it is valid to ignore this
-			self.__debug("Empty append list for " + name)
-			return "OK"
-		self.__debug("Append to "+name+" the values: " +str(toks[1]))
-
-		if name=='MACRO':
-			name='MMPDEFS'
-		elif name=='LANG':
-			# don't break the environment variable
-			name='LANGUAGES'
-
-		for item in toks[1]:
-			if name=='MMPDEFS':
-				# Unquote any macros since the FLM does it anyhow
-				if item.startswith('"') and item.endswith('"') \
-				or item.startswith("'") and item.endswith("'"):
-					item = item.strip("'\"")
-			if name=='LIBRARY' or name=='DEBUGLIBRARY':
-				im = MMPRaptorBackend.library_re.match(item)
-				if not im:
-					self.__error("LIBRARY: %s Seems to have an invalid name.\nExpected xxxx.lib or xxxx.dso\n where xxxx might be\n\tname or \n\tname(n,m) where n is a major version number and m is a minor version number\n" %item)
-				d = im.groupdict()
-
-				item = d['name']
-				if d['version'] is not None:
-					item += "{%04x%04x}" % (int(d['major']), int(d['minor']))
-				item += ".dso"
-			elif name=='STATICLIBRARY':
-				# the FLM will decide on the ending appropriate to the platform
-				item = re.sub(r"^(.*)\.[Ll][Ii][Bb]$",r"\1", item)
-			elif name=="LANGUAGES":
-				item = item.lower()
-			elif (name=="WIN32_LIBRARY" and (item.startswith(".") or re.search(r'[\\|/]',item))) \
-				or (name=="WIN32_RESOURCE"):
-				# Relatively pathed win32 libraries, and all win32 resources, are resolved in relation
-				# to the wrapper bld.inf file in which their .mmp file is specified.  This equates to
-				# the current working directory in ABLD operation.
-				item = raptor_utilities.resolveSymbianPath(self.__bldInfFilename, item)
-				
-			self.BuildVariant.AddOperation(raptor_data.Append(name,item," "))
-			
-			# maintain a debug library list, the same as LIBRARY but with DEBUGLIBRARY values
-			# appended as they are encountered
-			if name=='LIBRARY' or name=='DEBUGLIBRARY':
-				self.BuildVariant.AddOperation(raptor_data.Append("LIBRARY_DEBUG",item," "))			
-
-		return "OK"
-
-	def canonicalUID(number):
-		""" convert a UID string into an 8 digit hexadecimal string without leading 0x """
-		if number.lower().startswith("0x"):
-			n = int(number,16)
-		else:
-			n = int(number,10)
-
-		return "%08x" % n
-
-	canonicalUID = staticmethod(canonicalUID)
-
-	def doUIDAssignment(self,s,loc,toks):
-		"""A single UID command results in a number of spec variables"""
-		self.__currentLineNumber += 1
-
-		hexoutput = MMPRaptorBackend.canonicalUID(toks[1][0])
-		self.__debug( "Set UID2 to %s" % hexoutput )
-		self.BuildVariant.AddOperation(raptor_data.Set("UID2", hexoutput))
-
-		if len(toks[1]) > 1:
-			hexoutput = MMPRaptorBackend.canonicalUID(toks[1][1])
-			self.__debug( "Set UID3 to %s" % hexoutput)
-			self.BuildVariant.AddOperation(raptor_data.Set("UID3", hexoutput))
-
-		self.__debug( "done set UID")
-		return "OK"
-
-	def doSourcePathAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, toks[1])
-		self.__debug( "Remembering self.sourcepath state:  "+str(toks[0])+" is now " + self.__sourcepath)
-		self.__debug("selfcurrentMmpFile: " + self.__currentMmpFile)
-		return "OK"
-
-
-	def doSourceAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Setting "+toks[0]+" to " + str(toks[1]))
-		for file in toks[1]:
-			# file is always relative to sourcepath but some MMP files
-			# have items that begin with a slash...
-			file = file.lstrip("/")
-			source = generic_path.Join(self.__sourcepath, file)
-
-			# If the SOURCEPATH itself begins with a '/', then dont look up the caseless version, since
-			# we don't know at this time what $(EPOCROOT) will evaluate to.
-			if source.GetLocalString().startswith('$(EPOCROOT)'):
-				self.sources.append(str(source))	
-				self.__debug("Append SOURCE " + str(source))
-
-			else:
-				foundsource = source.FindCaseless()
-				if foundsource == None:
-					# Hope that the file will be generated later
-					self.__debug("Sourcefile not found: %s" % source)
-					foundsource = source
-
-				self.sources.append(str(foundsource))	
-				self.__debug("Append SOURCE " + str(foundsource))
-
-
-		self.__debug("		sourcepath: " + self.__sourcepath)
-		return "OK"
-
-	# Resource
-
-	def doOldResourceAssignment(self,s,loc,toks):
-		# Technically deprecated, but still used, so...
-		self.__currentLineNumber += 1
-		self.__debug("Processing old-style "+toks[0]+" "+str(toks[1]))
-
-		sysRes = (toks[0].lower() == "systemresource")
-
-		for rss in toks[1]:
-			variant = raptor_data.Variant()
-
-			source = generic_path.Join(self.__sourcepath, rss)
-			variant.AddOperation(raptor_data.Set("SOURCE", str(source)))
-			self.__resourceFiles.append(str(source))
-
-			target = source.File().rsplit(".", 1)[0]	# remove the extension
-			variant.AddOperation(raptor_data.Set("TARGET", target))
-			variant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
-
-			header = target.lower() + ".rsg"			# filename policy
-			variant.AddOperation(raptor_data.Set("HEADER", header))
-
-			if sysRes:
-				dsrtp = self.getDefaultSystemResourceTargetPath()
-				variant.AddOperation(raptor_data.Set("TARGETPATH", dsrtp))
-
-			self.ResourceVariants.append(variant)
-
-		return "OK"
-
-	def getDefaultSystemResourceTargetPath(self):
-		# the default systemresource TARGETPATH value should come from the
-		# configuration rather than being hard-coded here. Then again, this
-		# should really be deprecated away into oblivion...
-		return "system/data"
-
-
-	def getDefaultResourceTargetPath(self, targettype):
-		# the different default TARGETPATH values should come from the
-		# configuration rather than being hard-coded here.
-		if targettype == "plugin":
-			return "resource/plugins"
-		if targettype == "pdl":
-			return "resource/printers"
-		return ""
-
-	def resolveOptionReplace(self, content):
-		"""
-		Constructs search/replace pairs based on .mmp OPTION_REPLACE entries for use on tool command lines
-		within FLMS.
-
-		Depending on what's supplied to OPTION_REPLACE <TOOL>, the core part of the <TOOL> command line
-		in the relevant FLM will have search and replace actions performed on it post-expansion (but pre-
-		any OPTION <TOOL> additions).
-
-		In terms of logic, we try to follow what ABLD does, as the current behaviour is undocumented.
-		What happens is a little inconsistent, and best described by some generic examples:
-
-			OPTION_REPLACE TOOL existing_option replacement_value
-
-				Replace all instances of "option existing_value" with "option replacement_value"
-
-			OPTION_REPLACE TOOL existing_option replacement_option
-
-				Replace all instances of "existing_option" with "replacement_option".
-
-			If "existing_option" is present in isolation then a removal is performed.
-
-		Any values encountered that don't follow an option are ignored.
-		Options are identified as being prefixed with either '-' or '--'.
-
-		The front-end processes each OPTION_REPLACE entry and then appends one or more search/replace pairs
-		to an OPTION_REPLACE_<TOOL> variable in the following format:
-
-		     search<->replace
-		"""
-		# Note that, for compatibility reasons, the following is mostly a port to Python of the corresponding
-		# ABLD Perl, and hence maintains ABLD's idiosyncrasies in what it achieves
-
-		searchReplacePairs = []
-		matches = re.findall("-{1,2}\S+\s*(?!-)\S*",content)
-
-		if matches:
-			# reverse so we can process as a stack whilst retaining original order
-			matches.reverse()
-
-			while (len(matches)):
-				match = matches.pop()
-
-				standaloneMatch = re.match('^(?P<option>\S+)\s+(?P<value>\S+)$', match)
-
-				if (standaloneMatch):
-					# Option listed standalone with a replacement value
-					# Example:
-					# 	OPTION_REPLACE ARMCC --cpu 6
-					# Intention:
-					# 	Replace instances of  "--cpu <something>" with "--cpu 6"
-
-					# Substitute any existing "option <existing_value>" instances with a single word
-					# "@@<existing_value>" for later replacement
-					searchReplacePairs.append('%s <->@@' % standaloneMatch.group('option'))
-
-					# Replace "@@<existing_value>" entries from above with "option <new_value>" entries
-					# A pattern substitution is used to cover pre-existing values
-					searchReplacePairs.append('@@%%<->%s %s' % (standaloneMatch.group('option'), standaloneMatch.group('value')))
-				else:
-					# Options specified in search/replace pairs with optional values
-					# Example:
-					#	OPTION_REPLACE ARMCC --O2 --O3
-					# Intention:
-					#	Replace instances of "--O2" with "--O3"
-
-					# At this point we will be looking at just the search option - there may or may not
-					# be a replacement to consider
-					search = match
-					replace = ""
-					if len(matches):
-						replace = matches.pop()
-
-					searchReplacePairs.append('%s<->%s' % (search, replace))
-
-			# Replace spaces to maintain word-based grouping in downstream makefile lists
-			for i in range(0,len(searchReplacePairs)):
-				searchReplacePairs[i] = searchReplacePairs[i].replace(' ','%20')
-
-		return searchReplacePairs
-
-	def doStartResource(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug("Start RESOURCE "+toks[1])
-
-		self.__current_resource = generic_path.Path(self.__sourcepath, toks[1])
-		self.__current_resource = str(self.__current_resource)
-
-		self.__debug("sourcepath: " + self.__sourcepath)
-		self.__debug("self.__current_resource source: " + toks[1])
-		self.__debug("adjusted self.__current_resource source=" + self.__current_resource)
-
-		self.__currentResourceVariant = raptor_data.Variant()
-		self.__currentResourceVariant.AddOperation(raptor_data.Set("SOURCE", self.__current_resource))
-		self.__resourceFiles.append(self.__current_resource)
-
-		# The target name is the basename of the resource without the extension
-		# e.g. "/fred/129ab34f.rss" would have a target name of "129ab34f"
-		target = self.__current_resource.rsplit("/",1)[-1]
-		target = target.rsplit(".",1)[0]
-		self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET", target))
-		self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
-		self.__headerspecified = False
-		self.__headeronlyspecified = False
-		self.__current_resource_header = target.lower() + ".rsg"
-
-		return "OK"
-
-	def doResourceAssignment(self,s,loc,toks):
-		""" Assign variables for resource files """
-		self.__currentLineNumber += 1
-		varname = toks[0].upper() # the mmp keyword
-		varvalue = "".join(toks[1])
-
-		# Get rid of any .rsc extension because the build system
-		# needs to have it stripped off to calculate other names
-		# for other purposes and # we aren't going to make it
-		# optional anyhow.
-		if varname == "TARGET":
-			target_withext = varvalue.rsplit("/\\",1)[-1]
-			target = target_withext.rsplit(".",1)[0]
-			self.__current_resource_header = target.lower() + ".rsg"
-			self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
-			self.__debug("Set resource "+varname+" to " + target)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,target))
-		if varname == "TARGETPATH":
-			varvalue=varvalue.replace('\\','/')
-			self.__debug("Set resource "+varname+" to " + varvalue)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,varvalue))
-		else:
-			self.__debug("Set resource "+varname+" to " + varvalue)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,varvalue))
-		return "OK"
-
-	def doResourceAppend(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug("Append resource to "+toks[0]+" the values: " +str(toks[1]))
-		varname = toks[0].upper()
-
-		# we cannot use LANG as it interferes with the environment
-		if varname == "LANG":
-			varname = "LANGUAGES"
-
-		for item in toks[1]:
-			if varname == "LANGUAGES":
-				item = item.lower()
-			self.__currentResourceVariant.AddOperation(raptor_data.Append(varname,item))
-		return "OK"
-
-	def doResourceSetSwitch(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		name = toks[0].upper()
-
-		if name == "HEADER":
-			self.__headerspecified = True
-
-		elif name == "HEADERONLY":
-			self.__headeronlyspecified = True
-
-		else:
-			value = "1"
-			self.__debug( "Set resource switch " + name + " " + value)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set(name, value))
-
-		return "OK"
-
-	def doEndResource(self,s,loc,toks):
-		self.__currentLineNumber += 1
-
-		# Header name can change, depening if there was a TARGET defined or not, so it must be appended at the end
-		if self.__headerspecified:
-			self.__debug("Set resource switch HEADER " + self.__current_resource_header)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADER", self.__current_resource_header))
-
-		if self.__headeronlyspecified:
-			self.__debug("Set resource switch HEADERONLY " + self.__current_resource_header)
-			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADER", self.__current_resource_header))
-			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADERONLY", "True"))
-
-		self.__debug("End RESOURCE")
-		self.ResourceVariants.append(self.__currentResourceVariant)
-		self.__currentResourceVariant = None
-		self.__current_resource = ""
-		return "OK"
-
-	# Bitmap
-
-	def doStartBitmap(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug("Start BITMAP "+toks[1])
-
-		self.__currentBitmapVariant = raptor_data.Variant(name = toks[1].replace('.','_'))
-		# Use BMTARGET and BMTARGET_lower because that prevents
-		# confusion with the TARGET and TARGET_lower of our parent MMP
-		# when setting the OUTPUTPATH.  This in turn allows us to
-		# not get tripped up by multiple mbms being generated with
-		# the same name to the same directory.
-		self.__currentBitmapVariant.AddOperation(raptor_data.Set("BMTARGET", toks[1]))
-		self.__currentBitmapVariant.AddOperation(raptor_data.Set("BMTARGET_lower", toks[1].lower()))
-		self.__currentBitmapVariant.AddOperation(raptor_data.Set("SOURCE", ""))
-		return "OK"
-
-	def doBitmapAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug("Set bitmap "+toks[0]+" to " + str(toks[1]))
-		name = toks[0].upper()
-		value = "".join(toks[1])
-		if name == "TARGETPATH":
-			value = value.replace('\\','/')
-
-		self.__currentBitmapVariant.AddOperation(raptor_data.Set(name,value))
-		return "OK"
-
-	def doBitmapSourcePathAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug("Previous bitmap sourcepath:" + self.__bitmapSourcepath)
-		self.__bitmapSourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, toks[1])
-		self.__debug("New bitmap sourcepath: " + self.__bitmapSourcepath)
-
-	def doBitmapSourceAssignment(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Setting "+toks[0]+" to " + str(toks[1]))
-		# The first "source" is the colour depth for all the others.
-		# The depth format is b[,m] where b is the bitmap depth and m is
-		# the mask depth.
-		# Valid values for b are: 1 2 4 8 c4 c8 c12 c16 c24 c32 c32a (?)
-		# Valid values for m are: 1 8 (any number?)
-		#
-		# If m is specified then the bitmaps are in pairs: b0 m0 b1 m1...
-		# If m is not specified then there are no masks, just bitmaps: b0 b1...
-		colordepth = toks[1][0].lower()
-		if "," in colordepth:
-			(bitmapdepth, maskdepth) = colordepth.split(",")
-		else:
-			bitmapdepth = colordepth
-			maskdepth = 0
-
-		sources=""
-		mask = False
-		for file in toks[1][1:]:
-			path = generic_path.Join(self.__bitmapSourcepath, file)
-			if sources:
-				sources += " "
-			if mask:
-				sources += "DEPTH=" + maskdepth + " FILE=" + str(path)
-			else:
-				sources += "DEPTH=" + bitmapdepth + " FILE=" + str(path)
-			if maskdepth:
-				mask = not mask
-		self.__debug("sources: " + sources)
-		self.__currentBitmapVariant.AddOperation(raptor_data.Append("SOURCE", sources))
-		return "OK"
-
-	def doBitmapSetSwitch(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Set bitmap switch "+toks[0]+" ON")
-		self.__currentBitmapVariant.AddOperation(raptor_data.Set(toks[0].upper(), "1"))
-		return "OK"
-
-	def doEndBitmap(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__bitmapSourcepath = self.__sourcepath
-		self.BitmapVariants.append(self.__currentBitmapVariant)
-		self.__currentBitmapVariant = None
-		self.__debug("End BITMAP")
-		return "OK"
-
-	# Stringtable
-
-	def doStartStringTable(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		self.__debug( "Start STRINGTABLE "+toks[1])
-
-		specstringtable = generic_path.Join(self.__sourcepath, toks[1])
-		uniqname = specstringtable.File().replace('.','_') # corrected, filename only
-		source = str(specstringtable.FindCaseless())
-
-		self.__debug("sourcepath: " + self.__sourcepath)
-		self.__debug("stringtable: " + toks[1])
-		self.__debug("adjusted stringtable source=" + source)
-
-		self.__currentStringTableVariant = raptor_data.Variant(name = uniqname)
-		self.__currentStringTableVariant.AddOperation(raptor_data.Set("SOURCE", source))
-		self.__currentStringTableVariant.AddOperation(raptor_data.Set("EXPORTPATH", ""))
-		self.__stringtableExported = False
-
-		# The target name by default is the name of the stringtable without the extension
-		# e.g. the stringtable "/fred/http.st" would have a default target name of "http"
-		stringtable_withext = specstringtable.File()
-		self.__stringtable = stringtable_withext.rsplit(".",1)[0].lower()
-		self.__currentStringTableVariant.AddOperation(raptor_data.Set("TARGET", self.__stringtable))
-
-		self.__stringtableHeaderonlyspecified = False
-
-		return "OK"
-
-	def doStringTableAssignment(self,s,loc,toks):
-		""" Assign variables for stringtables """
-		self.__currentLineNumber += 1
-		varname = toks[0].upper() # the mmp keyword
-		varvalue = "".join(toks[1])
-
-		# Get rid of any .rsc extension because the build system
-		# needs to have it stripped off to calculate other names
-		# for other purposes and # we aren't going to make it
-		# optional anyhow.
-		if varname == "EXPORTPATH":
-			finalvalue = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, varvalue)
-			self.__stringtableExported = True
-		else:
-			finalvalue = varvalue
-
-		self.__debug("Set stringtable "+varname+" to " + finalvalue)
-		self.__currentStringTableVariant.AddOperation(raptor_data.Set(varname,finalvalue))
-		return "OK"
-
-	def doStringTableSetSwitch(self,s,loc,toks):
-		self.__currentLineNumber += 1
-		if toks[0].upper()== "HEADERONLY":
-			self.__stringtableHeaderonlyspecified = True
-			self.__debug( "Set stringtable switch "+toks[0]+" ON")
-			self.__currentStringTableVariant.AddOperation(raptor_data.Set(toks[0].upper(), "1"))
-		return "OK"
-
-	def doEndStringTable(self,s,loc,toks):
-		self.__currentLineNumber += 1
-
-		if not self.__stringtableExported:
-			# There was no EXPORTPATH specified for this stringtable
-			# so for our other code to be able to reference it we
-			# must add the path of the generated location to the userinclude path
-
-			ipath = "$(OUTPUTPATH)"
-			self.BuildVariant.AddOperation(raptor_data.Append("USERINCLUDE",ipath))
-			self.__userinclude += ' ' + ipath
-			self.__debug("  USERINCLUDE = %s", self.__userinclude)
-			self.__userinclude.strip()
-
-		self.StringTableVariants.append(self.__currentStringTableVariant)
-		self.__currentStringTableVariant = None
-		self.__debug("End STRINGTABLE")
-		if not self.__stringtableHeaderonlyspecified:
-			# Have to assume that this is where the cpp file will be.  This has to be maintained
-			# in sync with the FLM's idea of where this file should be.  We need a better way.
-			# Interfaces also need outputs that allow other interfaces to refer to their outputs
-			# without having to "know" where they will be.
-			self.sources.append('$(OUTPUTPATH)/' + self.__stringtable + '.cpp')
-		return "OK"
-
-
-	def doUnknownStatement(self,s,loc,toks):
-		self.__warn("%s (%d) : Unrecognised Keyword %s", self.__currentMmpFile, self.__currentLineNumber, str(toks))
-		self.__currentLineNumber += 1
-		return "OK"
-
-
-	def doUnknownBlock(self,s,loc,toks):
-		self.__warn("%s (%d) : Unrecognised Block %s", self.__currentMmpFile, self.__currentLineNumber, str(toks))
-		self.__currentLineNumber += 1
-		return "OK"
-
-	def doDeprecated(self,s,loc,toks):
-		self.__debug( "Deprecated command " + str(toks))
-		self.__warn("%s (%d) : %s is deprecated .mmp file syntax", self.__currentMmpFile, self.__currentLineNumber, str(toks))
-		self.__currentLineNumber += 1
-		return "OK"
-
-	def doNothing(self):
-		self.__currentLineNumber += 1
-		return "OK"
-
-	def finalise(self, aBuildPlatform):
-		"""Post-processing of data that is only applicable in the context of a fully
-		processed .mmp file."""
-		resolvedDefFile = ""
-
-		if self.__TARGET:
-			defaultRootName = self.__TARGET
-			if self.__TARGETEXT!="":
-				defaultRootName += "." + self.__TARGETEXT
-
-			# NOTE: Changing default .def file name based on the LINKAS argument is actually
-			# a defect, but this follows the behaviour of the current build system.
-			if (self.__LINKAS):
-				defaultRootName = self.__LINKAS
-
-			resolvedDefFile = self.resolveDefFile(defaultRootName, aBuildPlatform)
-			self.__debug("Resolved def file:  %s" % resolvedDefFile )
-			# We need to store this resolved deffile location for the FREEZE target
-			self.BuildVariant.AddOperation(raptor_data.Set("RESOLVED_DEFFILE", resolvedDefFile))
-
-		# If a deffile is specified, an FLM will put in a dependency.
-		# If a deffile is specified then raptor_meta will guess a name but:
-		#	1) If the guess is wrong then the FLM will complain "no rule to make ..."
-		#	2) In some cases, e.g. plugin, 1) is not desirable as the presence of a def file
-		#		is not a necessity.  In these cases the FLM needs to know if DEFFILE
-		#		is a guess or not so it can decide if a dependency should be added.
-
-		# We check that the def file exists and that it is non-zero (incredible
-		# that this should be needed).
-
-		deffile_keyword="1"
-		if self.deffile == "":
-			# If the user didn't specify a deffile name then
-			# we must be guessing
-			# Let's check if our guess actually corresponds to a
-			# real file.  If it does then that confims the guess.
-			#  If there's no file then we still need to pass make the name
-			# so it can complain about there not being a DEF file
-			# for this particular target type and fail to build this target.
-
-			deffile_keyword=""
-			try:
-				findpath = generic_path.Path(resolvedDefFile)
-				foundfile = findpath.FindCaseless()
-
-				if foundfile == None:
-					raise IOError("file not found")
-
-				self.__debug("Found DEFFILE  " + foundfile.GetLocalString())
-				rfstat = os.stat(foundfile.GetLocalString())
-
-				mode = rfstat[stat.ST_MODE]
-				if mode != None and stat.S_ISREG(mode) and rfstat[stat.ST_SIZE] > 0:
-					resolvedDefFile = str(foundfile)
-				else:
-					resolvedDefFile=""
-			except Exception,e:
-				self.__debug("While Searching for an IMPLIED  DEFFILE: %s: %s" % (str(e),str(findpath)) )
-				resolvedDefFile=""
-		else:
-			if not resolvedDefFile == "":
-				try:
-					findpath = generic_path.Path(resolvedDefFile)
-					resolvedDefFile = str(findpath.FindCaseless())
-					if resolvedDefFile=="None":
-						raise IOError("file not found")
-				except Exception,e:
-					self.__warn("While Searching for a SPECIFIED DEFFILE: %s: %s" % (str(e),str(findpath)) )
-					resolvedDefFile=""
-			else:
-				self.__warn("DEFFILE KEYWORD used (%s) but def file not resolved" % (self.deffile) )
-
-
-		self.BuildVariant.AddOperation(raptor_data.Set("DEFFILE", resolvedDefFile))
-		self.__debug("Set DEFFILE to " + resolvedDefFile)
-		self.BuildVariant.AddOperation(raptor_data.Set("DEFFILEKEYWORD", deffile_keyword))
-		self.__debug("Set DEFFILEKEYWORD to '%s'",deffile_keyword)
-
-		# if this target type has a default TARGETPATH other than "" for
-		# resources then we need to add that default to all resources which
-		# do not explicitly set the TARGETPATH themselves.
-		tp = self.getDefaultResourceTargetPath(self.getTargetType())
-		if tp:
-			for i,var in enumerate(self.ResourceVariants):
-				# does this resource specify its own TARGETPATH?
-				needTP = True
-				for op in var.ops:
-					if isinstance(op, raptor_data.Set) \
-					and op.name == "TARGETPATH":
-						needTP = False
-						break
-				if needTP:
-					self.ResourceVariants[i].AddOperation(raptor_data.Set("TARGETPATH", tp))
-
-		# some core build configurations need to know about the resource builds, and
-		# some resource building configurations need knowledge of the core build
-		for resourceFile in self.__resourceFiles:
-			self.BuildVariant.AddOperation(raptor_data.Append("RESOURCEFILES", resourceFile))
-
-		for i,var in enumerate(self.ResourceVariants):
-			self.ResourceVariants[i].AddOperation(raptor_data.Set("MAIN_TARGET_lower", self.__TARGET.lower()))
-			self.ResourceVariants[i].AddOperation(raptor_data.Set("MAIN_REQUESTEDTARGETEXT", self.__TARGETEXT.lower()))
-
-		# Create Capability variable in one SET operation (more efficient than multiple appends)
-		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITY"," ".join(self.capabilities)))
-
-		# Resolve combined capabilities as hex flags, for configurations that require them
-		capabilityFlag1 = 0
-		capabilityFlag2 = 0			# Always 0
-
-		for capability in [c.lower() for c in self.capabilities]:
-			invert = 0
-
-			if capability.startswith('-'):
-				invert = 0xffffffff
-				capability = capability.lstrip('-')
-
-			if MMPRaptorBackend.supportedCapabilities.has_key(capability):
-				capabilityFlag1 = capabilityFlag1 ^ invert
-				capabilityFlag1 = capabilityFlag1 | MMPRaptorBackend.supportedCapabilities[capability]
-				capabilityFlag1 = capabilityFlag1 ^ invert
-
-		capabilityFlag1 = "%08xu" % capabilityFlag1
-		capabilityFlag2 = "%08xu" % capabilityFlag2
-
-		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITYFLAG1", capabilityFlag1))
-		self.__debug ("Set CAPABILITYFLAG1 to " + capabilityFlag1)
-		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITYFLAG2", capabilityFlag2))
-		self.__debug ("Set CAPABILITYFLAG2 to " + capabilityFlag2)
-
-		# For non-Feature Variant builds, the location of the product include hrh file is
-		# appended to the SYSTEMINCLUDE list
-		if not aBuildPlatform['ISFEATUREVARIANT']:
-			productIncludePath = str(aBuildPlatform['VARIANT_HRH'].Dir())
-			self.BuildVariant.AddOperation(raptor_data.Append("SYSTEMINCLUDE",productIncludePath))
-			self.__debug("Appending product include location %s to SYSTEMINCLUDE",productIncludePath)
-
-		# Specifying both a PAGED* and its opposite UNPAGED* keyword in a .mmp file
-		# will generate a warning and the last keyword specified will take effect.
-		self.__pageConflict.reverse()
-		if "PAGEDCODE" in self.__pageConflict and "UNPAGEDCODE" in self.__pageConflict:
-			for x in self.__pageConflict:
-				if x == "PAGEDCODE" or x == "UNPAGEDCODE":
-					self.__Raptor.Warn("Both PAGEDCODE and UNPAGEDCODE are specified. The last one %s will take effect" % x)
-					break
-		if "PAGEDDATA" in self.__pageConflict and "UNPAGEDDATA" in self.__pageConflict:
-			for x in self.__pageConflict:
-				if x == "PAGEDDATA" or x == "UNPAGEDDATA":
-					self.__Raptor.Warn("Both PAGEDDATA and UNPAGEDDATA are specified. The last one %s will take effect" % x)
-					break
-
-		# Set Debuggable
-		self.BuildVariant.AddOperation(raptor_data.Set("DEBUGGABLE", self.__debuggable))
-
-		if self.__explicitversion:
-			self.BuildVariant.AddOperation(raptor_data.Append("UNIQUETARGETPATH","$(TARGET_lower)_$(VERSIONHEX)_$(REQUESTEDTARGETEXT)",'/'))
-		else:
-			self.BuildVariant.AddOperation(raptor_data.Append("UNIQUETARGETPATH","$(TARGET_lower)_$(REQUESTEDTARGETEXT)",'/'))
-
-		# Put the list of sourcefiles in with one Set operation - saves memory
-		# and performance over using multiple Append operations.
-		self.BuildVariant.AddOperation(raptor_data.Set("SOURCE",
-						   " ".join(self.sources)))
-
-	def getTargetType(self):
-		"""Target type in lower case - the standard format"""
-		return self.__targettype.lower()
-
-	def resolveDefFile(self, aTARGET, aBuildPlatform):
-		"""Returns a fully resolved DEFFILE entry depending on .mmp file location and TARGET, DEFFILE and NOSTRICTDEF
-		entries in the .mmp file itself (where appropriate).
-		Is able to deal with target names that have multiple '.' characters e.g. messageintercept.esockdebug.dll
-		"""
-
-		resolvedDefFile = ""
-		platform = aBuildPlatform['PLATFORM']
-
-		# Not having a default .def file directory is a pretty strong indicator that
-		# .def files aren't supported for the particular platform
-		if PlatformDefaultDefFileDir.has_key(platform):
-			(targetname,targetext) = os.path.splitext(aTARGET)
-			(defname,defext) = os.path.splitext(self.deffile)
-			if defext=="":
-				defext = ".def"
-
-			# NOTE: WORKAROUND
-			if len(targetext) > 4:
-				targetname += defext
-
-			if not self.deffile:
-				resolvedDefFile = targetname
-			else:
-				if re.search('[\\|\/]$', self.deffile):
-					# If DEFFILE is *solely* a path, signified by ending in a slash, then TARGET is the
-					# basis for the default .def filename but with the specified path as prefix
-					resolvedDefFile = self.deffile + targetname
-
-				else:
-					resolvedDefFile = defname
-
-				resolvedDefFile = resolvedDefFile.replace('~', PlatformDefaultDefFileDir[platform])
-
-			if resolvedDefFile:
-				if not self.nostrictdef:
-					resolvedDefFile += 'u'
-
-				if self.__explicitversion:
-					resolvedDefFile += '{' + self.__versionhex + '}'
-
-				resolvedDefFile += defext
-
-
-				# If a DEFFILE statement doesn't specify a path in any shape or form, prepend the default .def file
-				# location based on the platform being built
-				if not re.search('[\\\/]+', self.deffile):
-					resolvedDefFile = '../'+PlatformDefaultDefFileDir[platform]+'/'+resolvedDefFile
-
-				resolvedDefFile = raptor_utilities.resolveSymbianPath(self.__defFileRoot, resolvedDefFile, 'DEFFILE', "", str(aBuildPlatform['EPOCROOT']))
-
-		return resolvedDefFile
-
-
-def CheckedGet(self, key, default = None):
-	"""extract a value from an self and raise an exception if None.
-
-	An optional default can be set to replace a None value.
-
-	This function belongs in the Evaluator class logically. But
-	Evaluator doesn't know how to raise a Metadata error. Since
-	being able to raise a metadata error is the whole point of
-	the method, it makes sense to adapt the Evaluator class from
-	raptor_meta for the use of everything inside raptor_meta.
-
-	... so it will be added to the Evaluator class.
-	"""
-
-	value = self.Get(key)
-	if value == None:
-		if default == None:
-			raise MetaDataError("configuration " + self.buildUnit.name +
-							    " has no variable " + key)
-		else:
-			return default
-	return value
-
-raptor_data.Evaluator.CheckedGet = CheckedGet 
-
-
-class MetaReader(object):
-	"""Entry point class for Symbian metadata processing.
-
-	Provides a means of integrating "traditional" Symbian metadata processing
-	with the new Raptor build system."""
-
-	filesplit_re = re.compile(r"^(?P<name>.*)\.(?P<ext>[^\.]*)$")
-
-	def __init__(self, aRaptor, configsToBuild):
-		self.__Raptor = aRaptor
-		self.BuildPlatforms = []
-		self.ExportPlatforms = []
-
-		# Get the version of CPP that we are using
-		metadata = self.__Raptor.cache.FindNamedVariant("meta")
-		evaluator = self.__Raptor.GetEvaluator(None, raptor_data.BuildUnit(metadata.name, [metadata]) )
-		self.__gnucpp = evaluator.CheckedGet("GNUCPP")
-		self.__defaultplatforms = evaluator.CheckedGet("DEFAULT_PLATFORMS")
-		self.__basedefaultplatforms = evaluator.CheckedGet("BASE_DEFAULT_PLATFORMS")
-		self.__baseuserdefaultplatforms = evaluator.CheckedGet("BASE_USER_DEFAULT_PLATFORMS")
-
-		# Only read each variant.cfg once
-		variantCfgs = {}
-
-		# Group the list of configurations into "build platforms".
-		# A build platform is a set of configurations which share
-		# the same metadata. In other words, a set of configurations
-		# for which the bld.inf and MMP files pre-process to exactly
-		# the same text.
-		platforms = {}
-
-		# Exports are not "platform dependent" but they are configuration
-		# dependent because different configs can have different EPOCROOT
-		# and VARIANT_HRH values. Each "build platform" has one associated
-		# "export platform" but several "build platforms" can be associated
-		# with the same "export platform".
-		exports = {}
-
-		self.__Raptor.Debug("MetaReader: configsToBuild:  %s", [b.name for b in configsToBuild])
-		for buildConfig in configsToBuild:
-			# get everything we need to know about the configuration
-			evaluator = self.__Raptor.GetEvaluator(None, buildConfig)
-
-			detail = {}
-			detail['PLATFORM'] = evaluator.CheckedGet("TRADITIONAL_PLATFORM")
-			epocroot = evaluator.CheckedGet("EPOCROOT")
-			detail['EPOCROOT'] = generic_path.Path(epocroot)
-
-			sbs_build_dir = evaluator.CheckedGet("SBS_BUILD_DIR")
-			detail['SBS_BUILD_DIR'] = generic_path.Path(sbs_build_dir)
-			flm_export_dir = evaluator.CheckedGet("FLM_EXPORT_DIR")
-			detail['FLM_EXPORT_DIR'] = generic_path.Path(flm_export_dir)
-			detail['CACHEID'] = flm_export_dir
-			if raptor_utilities.getOSPlatform().startswith("win"):
-				detail['PLATMACROS'] = evaluator.CheckedGet("PLATMACROS.WINDOWS")
-			else:
-				detail['PLATMACROS'] = evaluator.CheckedGet("PLATMACROS.LINUX")
-
-			# Apply OS variant provided we are not ignoring this
-			if not self.__Raptor.ignoreOsDetection:
-				self.__Raptor.Debug("Automatic OS detection enabled.")
-				self.ApplyOSVariant(buildConfig, epocroot)
-			else: # We are ignore OS versions so no detection required, so no variant will be applied
-				self.__Raptor.Debug("Automatic OS detection disabled.")
-
-			# is this a feature variant config or an ordinary variant
-			fv = evaluator.Get("FEATUREVARIANTNAME")
-			if fv:
-				variantHdr = evaluator.CheckedGet("VARIANT_HRH")
-				variantHRH = generic_path.Path(variantHdr)
-				detail['ISFEATUREVARIANT'] = True
-			else:
-				variantCfg = evaluator.CheckedGet("VARIANT_CFG")
-				variantCfg = generic_path.Path(variantCfg)
-				if not variantCfg in variantCfgs:
-					# get VARIANT_HRH from the variant.cfg file
-					varCfg = getVariantCfgDetail(detail['EPOCROOT'], variantCfg)
-					variantCfgs[variantCfg] = varCfg['VARIANT_HRH']
-					# we expect to always build ABIv2
-					if not 'ENABLE_ABIV2_MODE' in varCfg:
-						self.__Raptor.Warn("missing flag ENABLE_ABIV2_MODE in %s file. ABIV1 builds are not supported.",
-										   str(variantCfg))
-				variantHRH = variantCfgs[variantCfg]
-				detail['ISFEATUREVARIANT'] = False
-
-			detail['VARIANT_HRH'] = variantHRH
-			self.__Raptor.Info("'%s' uses variant hrh file '%s'", buildConfig.name, variantHRH)
-			detail['SYSTEMINCLUDE'] = evaluator.CheckedGet("SYSTEMINCLUDE")
-
-
-			# find all the interface names we need
-			ifaceTypes = evaluator.CheckedGet("INTERFACE_TYPES")
-			interfaces = ifaceTypes.split()
-
-			for iface in interfaces:
-				detail[iface] = evaluator.CheckedGet("INTERFACE." + iface)
-
-			# not test code unless positively specified
-			detail['TESTCODE'] = evaluator.CheckedGet("TESTCODE", "")
-
-			# make a key that identifies this platform uniquely
-			# - used to tell us whether we have done the pre-processing
-			# we need already using another platform with compatible values.
-
-			key = str(detail['VARIANT_HRH']) \
-			 	+ str(detail['EPOCROOT']) \
-		    	+ detail['SYSTEMINCLUDE'] \
-		    	+ detail['PLATFORM']
-
-		    # Keep a short version of the key for use in filenames.
-			uniq = hashlib.md5()
-			uniq.update(key)
-
-			detail['key'] = key
-			detail['key_md5'] = "p_" + uniq.hexdigest()
-			del uniq
-
-			# compare this configuration to the ones we have already seen
-
-			# Is this an unseen export platform?
-			# concatenate all the values we care about in a fixed order
-			# and use that as a signature for the exports.
-			items = ['EPOCROOT', 'VARIANT_HRH', 'SYSTEMINCLUDE', 'TESTCODE', 'export']
-			export = ""
-			for i in  items:
-				if i in detail:
-					export += i + str(detail[i])
-
-			if export in exports:
-				# add this configuration to an existing export platform
-				index = exports[export]
-				self.ExportPlatforms[index]['configs'].append(buildConfig)
-			else:
-				# create a new export platform with this configuration
-				exports[export] = len(self.ExportPlatforms)
-				exp = copy.copy(detail)
-				exp['PLATFORM'] = 'EXPORT'
-				exp['configs']  = [buildConfig]
-				self.ExportPlatforms.append(exp)
-
-			# Is this an unseen build platform?
-			# concatenate all the values we care about in a fixed order
-			# and use that as a signature for the platform.
-			items = ['PLATFORM', 'EPOCROOT', 'VARIANT_HRH', 'SYSTEMINCLUDE', 'TESTCODE']
-			if raptor_utilities.getOSPlatform().startswith("win"):
-				items.append('PLATMACROS.WINDOWS')
-			else:
-				items.append('PLATMACROS.LINUX')
-
-			items.extend(interfaces)
-			platform = ""
-			for i in  items:
-				if i in detail:
-					platform += i + str(detail[i])
-
-			if platform in platforms:
-				# add this configuration to an existing build platform
-				index = platforms[platform]
-				self.BuildPlatforms[index]['configs'].append(buildConfig)
-			else:
-				# create a new build platform with this configuration
-				platforms[platform] = len(self.BuildPlatforms)
-				detail['configs'] = [buildConfig]
-				self.BuildPlatforms.append(detail)
-
-		# one platform is picked as the "default" for extracting things
-		# that are supposedly platform independent (e.g. PRJ_PLATFORMS)
-		self.defaultPlatform = self.ExportPlatforms[0]
-
-
-	def ReadBldInfFiles(self, aComponentList, doexport, dobuild = True):
-		"""Take a list of bld.inf files and return a list of build specs.
-
-		The returned specification nodes will be suitable for all the build
-		configurations under consideration (using Filter nodes where required).
-		"""
-
-		# we need a Filter node per export platform
-		exportNodes = []
-		for i,ep in enumerate(self.ExportPlatforms):
-			filter = raptor_data.Filter(name = "export_" + str(i))
-
-			# what configurations is this node active for?
-			for config in ep['configs']:
-				filter.AddConfigCondition(config.name)
-
-			exportNodes.append(filter)
-
-		# we need a Filter node per build platform
-		platformNodes = []
-		for i,bp in enumerate(self.BuildPlatforms):
-			filter = raptor_data.Filter(name = "build_" + str(i))
-
-			# what configurations is this node active for?
-			for config in bp['configs']:
-				filter.AddConfigCondition(config.name)
-
-			# platform-wide data
-			platformVar = raptor_data.Variant()
-			platformVar.AddOperation(raptor_data.Set("PRODUCT_INCLUDE",
-													 str(bp['VARIANT_HRH'])))
-
-			filter.AddVariant(platformVar)
-			platformNodes.append(filter)
-
-		# check that each bld.inf exists and add a Specification node for it
-		# to the nodes of the export and build platforms that it supports.
-		for c in aComponentList:
-			if c.bldinf_filename.isFile():
-				self.__Raptor.Info("Processing %s", str(c.bldinf_filename))
-				try:
-					self.AddComponentNodes(c, exportNodes, platformNodes)
-
-				except MetaDataError, e:
-					self.__Raptor.Error(e.Text, bldinf=str(c.bldinf_filename))
-					if not self.__Raptor.keepGoing:
-						return []
-			else:
-				self.__Raptor.Error("build info file does not exist", bldinf=str(c.bldinf_filename))
-				if not self.__Raptor.keepGoing:
-					return []
-
-		# now we have the top-level structure in place...
-		#
-		# <filter exports 1>
-		#		<spec bld.inf 1 />
-		#		<spec bld.inf 2 />
-		#		<spec bld.inf N /> </filter>
-		# <filter build 1>
-		#		<spec bld.inf 1 />
-		#		<spec bld.inf 2 />
-		#		<spec bld.inf N /> </filter>
-		# <filter build 2>
-		#		<spec bld.inf 1 />
-		#		<spec bld.inf 2 />
-		#		<spec bld.inf N /> </filter>
-		# <filter build 3>
-		#		<spec bld.inf 1 />
-		#		<spec bld.inf 2 />
-		#		<spec bld.inf N /> </filter>
-		#
-		# assuming that every bld.inf builds for every platform and all
-		# exports go to the same place. clearly, it is more likely that
-		# some filters have less than N child nodes. in bigger builds there
-		# will also be more than one export platform.
-
-		# we now need to process the EXPORTS for all the bld.inf nodes
-		# before we can do anything else (because raptor itself must do
-		# some exports before the MMP files that include them can be
-		# processed).
-		if doexport:
-			for i,p in enumerate(exportNodes):
-				exportPlatform = self.ExportPlatforms[i]
-				for s in p.GetChildSpecs():
-					try:
-						self.ProcessExports(s, exportPlatform)
-
-					except MetaDataError, e:
-						self.__Raptor.Error("%s",e.Text)
-						if not self.__Raptor.keepGoing:
-							return []
-		else:
-			self.__Raptor.Info("Not Processing Exports (--noexport enabled)")
-
-		# this is a switch to return the function at this point if export
-		# only option is specified in the run
-		if dobuild is not True:
-			self.__Raptor.Info("Processing Exports only")
-			return[]
-
-		# after exports are done we can look to see if there are any
-		# new Interfaces which can be used for EXTENSIONS. Make sure
-		# that we only load each cache once as some export platforms
-		# may share a directory.
-		doneID = {}
-		for ep in self.ExportPlatforms:
-			flmDir = ep["FLM_EXPORT_DIR"]
-			cid = ep["CACHEID"]
-			if flmDir.isDir() and not cid in doneID:
-				self.__Raptor.cache.Load(flmDir, cid)
-			doneID[cid] = True
-
-		# finally we can process all the other parts of the bld.inf nodes.
-		# Keep a list of the projects we were asked to build so that we can
-		# tell at the end if there were any we didn't know about.
-		self.projectList = list(self.__Raptor.projects)
-		for i,p in enumerate(platformNodes):
-			buildPlatform = self.BuildPlatforms[i]
-			for s in p.GetChildSpecs():
-				try:
-					self.ProcessTEMs(s, buildPlatform)
-					self.ProcessMMPs(s, buildPlatform)
-
-				except MetaDataError, e:
-					self.__Raptor.Error(e.Text)
-					if not self.__Raptor.keepGoing:
-						return []
-
-		for badProj in self.projectList:
-			self.__Raptor.Warn("Can't find project '%s' in any build info file", badProj)
-
-		# everything is specified
-		return exportNodes + platformNodes
-
-	def ModuleName(self,aBldInfPath):
-		"""Calculate the name of the ROM/emulator batch files that run the tests"""
-
-		def LeftPortionOf(pth,sep):
-			""" Internal function to return portion of str that is to the left of sep. 
-			The split is case-insensitive."""
-			length = len((pth.lower().split(sep.lower()))[0])
-			return pth[0:length]
-			
-		modulePath = LeftPortionOf(LeftPortionOf(os.path.dirname(aBldInfPath), "group"), "ongoing")
-		moduleName = os.path.basename(modulePath.strip("/"))
-		
-		# Ensure that ModuleName does not return blank, if the above calculation determines
-		# that moduleName is blank
-		if moduleName == "" or moduleName.endswith(":"):
-			moduleName = "module"
-		return moduleName
-
-
-	def AddComponentNodes(self, component, exportNodes, platformNodes):	
-		"""Add Specification nodes for a bld.inf to the appropriate platforms."""
-		bldInfFile = BldInfFile(component.bldinf_filename, self.__gnucpp, component.depfiles, self.__Raptor)
-		component.bldinf = bldInfFile 
-
-		specName = getSpecName(component.bldinf_filename, fullPath=True)
-
-		if isinstance(component.bldinf, raptor_xml.SystemModelComponent):
-			# this component came from a system_definition.xml
-			layer = component.bldinf.GetContainerName("layer")
-			componentName = component.bldinf.GetContainerName("component")
-		else:
-			# this is a plain old bld.inf file from the command-line
-			layer = ""
-			componentName = ""
-
-		# exports are independent of build platform
-		for i,ep in enumerate(self.ExportPlatforms):
-			specNode = raptor_data.Specification(name = specName)
-
-			# keep the BldInfFile object for later
-			specNode.component = component
-
-			# add some basic data in a component-wide variant
-			var = raptor_data.Variant(name='component-wide')
-			var.AddOperation(raptor_data.Set("COMPONENT_META", str(component.bldinf_filename)))
-			var.AddOperation(raptor_data.Set("COMPONENT_NAME", componentName))
-			var.AddOperation(raptor_data.Set("COMPONENT_LAYER", layer))
-			specNode.AddVariant(var)
-
-			# add this bld.inf Specification to the export platform
-			exportNodes[i].AddChild(specNode)
-			component.exportspecs.append(specNode)
-
-		# get the relevant build platforms
-		listedPlatforms = bldInfFile.getBuildPlatforms(self.defaultPlatform)
-		platforms = getBuildableBldInfBuildPlatforms(listedPlatforms,
-								self.__defaultplatforms,
-								self.__basedefaultplatforms,
-								self.__baseuserdefaultplatforms)
-
-
-		outputDir = BldInfFile.outputPathFragment(component.bldinf_filename)
-
-		# Calculate "module name"
-		modulename = self.ModuleName(str(component.bldinf_filename))
-
-		for i,bp in enumerate(self.BuildPlatforms):
-			plat = bp['PLATFORM']
-			if bp['PLATFORM'] in platforms:
-				specNode = raptor_data.Specification(name = specName)
-
-				# remember what component this spec node comes from for later
-				specNode.component = component
-
-				# add some basic data in a component-wide variant
-				var = raptor_data.Variant(name='component-wide-settings-' + plat)
-				var.AddOperation(raptor_data.Set("COMPONENT_META",str(component.bldinf_filename)))
-				var.AddOperation(raptor_data.Set("COMPONENT_NAME", componentName))
-				var.AddOperation(raptor_data.Set("COMPONENT_LAYER", layer))
-				var.AddOperation(raptor_data.Set("MODULE", modulename))
-				var.AddOperation(raptor_data.Append("OUTPUTPATHOFFSET", outputDir, '/'))
-				var.AddOperation(raptor_data.Append("OUTPUTPATH", outputDir, '/'))
-				var.AddOperation(raptor_data.Append("BLDINF_OUTPUTPATH",outputDir, '/'))
-
-				var.AddOperation(raptor_data.Set("TEST_OPTION", component.bldinf.getRomTestType(bp)))
-				specNode.AddVariant(var)
-
-				# add this bld.inf Specification to the build platform
-				platformNodes[i].AddChild(specNode)
-				# also attach it into the component
-				component.specs.append(specNode)
-
-	def ProcessExports(self, componentNode, exportPlatform):
-		"""Do the exports for a given platform and skeleton bld.inf node.
-
-		This will actually perform exports as certain types of files (.mmh)
-		are required to be in place before the rest of the bld.inf node
-		(and parts of other bld.inf nodes) can be processed.
-
-		[some MMP files #include exported .mmh files]
-		"""
-		if exportPlatform["TESTCODE"]:
-			exports = componentNode.component.bldinf.getTestExports(exportPlatform)
-		else:
-			exports = componentNode.component.bldinf.getExports(exportPlatform)
-
-		self.__Raptor.Debug("%i exports for %s",
-							len(exports), str(componentNode.component.bldinf.filename))
-		if exports:
-
-			# each export is either a 'copy' or 'unzip'
-			# maybe we should trap multiple exports to the same location here?
-			epocroot = str(exportPlatform["EPOCROOT"])
-			bldinf_filename = str(componentNode.component.bldinf.filename)
-			exportwhatlog="<whatlog bldinf='%s' mmp='' config=''>\n" % bldinf_filename
-			for export in exports:
-				expSrc = export.getSource()
-				expDstList = export.getDestination() # Might not be a list in all circumstances
-
-				# make it a list if it isn't
-				if not isinstance(expDstList, list):
-					expDstList = [expDstList]
-
-				fromFile = generic_path.Path(expSrc.replace("$(EPOCROOT)", epocroot))
-
-				# For each destination in the destination list, add an export target, perform it if required.
-				# This ensures that make knows the dependency situation but that the export is made
-				# before any other part of the metadata requires it.  It also helps with the build
-				# from clean situation where we can't use order only prerequisites.
-				for expDst in expDstList:
-					toFile = generic_path.Path(expDst.replace("$(EPOCROOT)", epocroot))
-					try:
-						if export.getAction() == "copy":
-							# export the file
-							exportwhatlog += self.CopyExport(fromFile, toFile, bldinf_filename)
-						else:
-							members = self.UnzipExport(fromFile, toFile,
-									str(exportPlatform['SBS_BUILD_DIR']),
-									bldinf_filename)
-							
-							exportwhatlog += ("<archive zipfile='" + str(fromFile) + "'>\n")
-							if members != None:
-								exportwhatlog += members
-							exportwhatlog += "</archive>\n"
-					except MetaDataError, e:
-						if self.__Raptor.keepGoing:
-							self.__Raptor.Error("%s",e.Text, bldinf=bldinf_filename)
-						else:
-							raise e
-			exportwhatlog+="</whatlog>\n"
-			self.__Raptor.PrintXML("%s",exportwhatlog)
-
-	def CopyExport(self, _source, _destination, bldInfFile):
-		"""Copy the source file to the destination file (create a directory
-		   to copy into if it does not exist). Don't copy if the destination
-		   file exists and has an equal or newer modification time."""
-		source = generic_path.Path(str(_source).replace('%20',' '))
-		destination = generic_path.Path(str(_destination).replace('%20',' '))
-		dest_str = str(destination)
-		source_str = str(source)
-
-		exportwhatlog="<export destination='" + dest_str + "' source='" + \
-				source_str + "'/>\n"
-
-		try:
-
-
-			destDir = destination.Dir()
-			if not destDir.isDir():
-				os.makedirs(str(destDir))
-				shutil.copyfile(source_str, dest_str)
-				return exportwhatlog
-
-			sourceMTime = 0
-			destMTime = 0
-			try:
-				sourceMTime = os.stat(source_str)[stat.ST_MTIME]
-				destMTime = os.stat(dest_str)[stat.ST_MTIME]
-			except OSError, e:
-				if sourceMTime == 0:
-					message = "Source of export does not exist:  " + str(source)
-					if not self.__Raptor.keepGoing:
-						raise MetaDataError(message)
-					else:
-						self.__Raptor.Error(message, bldinf=bldInfFile)
-
-			if destMTime == 0 or destMTime < sourceMTime:
-				if os.path.exists(dest_str):
-					os.chmod(dest_str,stat.S_IREAD | stat.S_IWRITE)
-				shutil.copyfile(source_str, dest_str)
-				self.__Raptor.Info("Copied %s to %s", source_str, dest_str)
-			else:
-				self.__Raptor.Info("Up-to-date: %s", dest_str)
-
-
-		except Exception,e:
-			message = "Could not export " + source_str + " to " + dest_str + " : " + str(e)
-			if not self.__Raptor.keepGoing:
-				raise MetaDataError(message)
-			else:
-				self.__Raptor.Error(message, bldinf=bldInfFile)
-
-		return exportwhatlog
-
-
-	def UnzipExport(self, _source, _destination, _sbs_build_dir, bldinf_filename):
-		"""Unzip the source zipfile into the destination directory
-		   but only if the markerfile does not already exist there
-		   or it does exist but is older than the zipfile.
-		   the markerfile is comprised of the name of the zipfile
-		   with the ".zip" removed and ".unzipped" added.
-		"""
-
-		# Insert spaces into file if they are there
-		source = str(_source).replace('%20',' ')
-		destination = str(_destination).replace('%20',' ')
-		sanitisedSource = raptor_utilities.sanitise(source)
-		sanitisedDestination = raptor_utilities.sanitise(destination)
-
-		destination = str(_destination).replace('%20',' ')
-		exportwhatlog = ""
-
-
-		try:
-			if not _destination.isDir():
-				os.makedirs(destination)
-
-			# Form the directory to contain the unzipped marker files, and make the directory if require.
-			markerfiledir = generic_path.Path(_sbs_build_dir)
-			if not markerfiledir.isDir():
-				os.makedirs(str(markerfiledir))
-
-			# Form the marker file name and convert to Python string
-			markerfilename = str(generic_path.Join(markerfiledir, sanitisedSource + sanitisedDestination + ".unzipped"))
-
-			# Don't unzip if the marker file is already there or more uptodate
-			sourceMTime = 0
-			destMTime = 0
-			try:
-				sourceMTime = os.stat(source)[stat.ST_MTIME]
-				destMTime = os.stat(markerfilename)[stat.ST_MTIME]
-			except OSError, e:
-				if sourceMTime == 0:
-					raise MetaDataError("Source zip for export does not exist:  " + source)
-			if destMTime != 0 and destMTime >= sourceMTime:
-				# This file has already been unzipped. Print members then return
-				exportzip = zipfile.ZipFile(source, 'r')
-				files = exportzip.namelist()
-				files.sort()
-
-				for file in files:
-					if not file.endswith('/'):
-						expfilename = str(generic_path.Join(destination, file))
-						exportwhatlog += "<member>" + expfilename + "</member>\n"
-
-				self.__Raptor.PrintXML("<clean bldinf='" + bldinf_filename + "' mmp='' config=''>\n")
-				self.__Raptor.PrintXML("<zipmarker>" + markerfilename + "</zipmarker>\n")
-				self.__Raptor.PrintXML("</clean>\n")
-
-				return exportwhatlog
-
-			exportzip = zipfile.ZipFile(source, 'r')
-			files = exportzip.namelist()
-			files.sort()
-			filecount = 0
-			for file in files:
-				expfilename = str(generic_path.Join(destination, file))
-				if file.endswith('/'):
-					try:
-						os.makedirs(expfilename)
-					except OSError, e:
-						pass # errors to do with "already exists" are not interesting.
-				else:
-					try:
-						os.makedirs(os.path.split(expfilename)[0])
-					except OSError, e:
-						pass # errors to do with "already exists" are not interesting.
-
-					try:
-						if os.path.exists(expfilename):
-							os.chmod(expfilename,stat.S_IREAD | stat.S_IWRITE)
-						expfile = open(expfilename, 'wb')
-						expfile.write(exportzip.read(file))
-						expfile.close()
-						
-						# Resurrect any file execution permissions present in the archived version
-						if (exportzip.getinfo(file).external_attr >> 16L) & 0100:
-							os.chmod(expfilename, stat.S_IMODE(os.stat(expfilename).st_mode) | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)						
-						
-						# Each file keeps its modified time the same as what it was before unzipping
-						accesstime = time.time()
-						datetime = exportzip.getinfo(file).date_time
-						timeTuple=(int(datetime[0]), int(datetime[1]), int(datetime[2]), int(datetime[3]), \
-									int(datetime[4]), int(datetime[5]), int(0), int(0), int(0))
-						modifiedtime = time.mktime(timeTuple)
-						os.utime(expfilename,(accesstime, modifiedtime))
-
-						filecount += 1
-						exportwhatlog+="<member>" + expfilename + "</member>\n"
-					except IOError, e:
-						message = "Could not unzip %s to %s: file %s: %s" %(source, destination, expfilename, str(e))
-						if not self.__Raptor.keepGoing:
-							raise MetaDataError(message)
-						else:
-							self.__Raptor.Error(message, bldinf=bldinf_filename)
-
-			markerfile = open(markerfilename, 'wb+')
-			markerfile.close()
-			self.__Raptor.PrintXML("<clean bldinf='" + bldinf_filename + "' mmp='' config=''>\n")
-			self.__Raptor.PrintXML("<zipmarker>" + markerfilename +	"</zipmarker>\n")
-			self.__Raptor.PrintXML("</clean>\n")
-
-		except IOError:
-			self.__Raptor.Warn("Problem while unzipping export %s to %s: %s",source,destination,str(e))
-
-		self.__Raptor.Info("Unzipped %d files from %s to %s", filecount, source, destination)
-		return exportwhatlog
-
-	def ProcessTEMs(self, componentNode, buildPlatform):
-		"""Add Template Extension Makefile nodes for a given platform
-		   to a skeleton bld.inf node.
-
-		This happens after exports have been handled.
-		"""
-		if buildPlatform["ISFEATUREVARIANT"]:
-			return	# feature variation does not run extensions at all
-		
-		if buildPlatform["TESTCODE"]:
-			extensions = componentNode.component.bldinf.getTestExtensions(buildPlatform)
-		else:
-			extensions = componentNode.component.bldinf.getExtensions(buildPlatform)
-
-		self.__Raptor.Debug("%i template extension makefiles for %s",
-							len(extensions), str(componentNode.component.bldinf.filename))
-
-		for i,extension in enumerate(extensions):
-			if self.__Raptor.projects:
-				if not extension.nametag in self.__Raptor.projects:
-					self.__Raptor.Debug("Skipping %s", extension.getMakefile())
-					continue
-				elif extension.nametag in self.projectList:
-					self.projectList.remove(extension.nametag)
-
-			extensionSpec = raptor_data.Specification("extension" + str(i))
-
-			interface = buildPlatform["extension"]
-			customInterface = False
-
-			# is there an FLM replacement for this extension?
-			if extension.interface:
-				try:
-					interface = self.__Raptor.cache.FindNamedInterface(extension.interface, buildPlatform["CACHEID"])
-					customInterface = True
-				except KeyError:
-					# no, there isn't an FLM
-					pass
-
-			extensionSpec.SetInterface(interface)
-
-			var = raptor_data.Variant()
-			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
-			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
-			var.AddOperation(raptor_data.Set("PLATFORM_PATH", buildPlatform["PLATFORM"].lower()))
-			var.AddOperation(raptor_data.Set("CFG", "$(VARIANTTYPE)"))
-			var.AddOperation(raptor_data.Set("CFG_PATH", "$(VARIANTTYPE)"))
-			var.AddOperation(raptor_data.Set("GENERATEDCPP", "$(OUTPUTPATH)"))
-			var.AddOperation(raptor_data.Set("TEMPLATE_EXTENSION_MAKEFILE", extension.getMakefile()))
-			var.AddOperation(raptor_data.Set("TEMCOUNT", str(i)))
-
-			# Extension inputs are added to the build spec.
-			# '$'s are escaped so that they are not expanded by Raptor or
-			# by Make in the call to the FLM
-			# The Extension makefiles are supposed to expand them themselves
-			# Path separators need not be parameterised anymore
-			# as bash is the standard shell
-			standardVariables = extension.getStandardVariables()
-			for standardVariable in standardVariables.keys():
-				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
-				value = standardVariables[standardVariable].replace('$(', '$$$$(')
-				value = value.replace('$/', '/').replace('$;', ':')
-				var.AddOperation(raptor_data.Set(standardVariable, value))
-
-			# . . . as with the standard variables but the names and number
-			# of options are not known in advance so we add them to
-			# a "structure" that is self-describing
-			var.AddOperation(raptor_data.Set("O._MEMBERS", ""))
-			options = extension.getOptions()
-			for option in options:
-				self.__Raptor.Debug("Set %s=%s", option, options[option])
-				value = options[option].replace('$(EPOCROOT)', '$(EPOCROOT)/')
-				value = value.replace('$(', '$$$$(')
-				value = value.replace('$/', '/').replace('$;', ':')
-				value = value.replace('$/', '/').replace('$;', ':')
-
-				if customInterface:
-					var.AddOperation(raptor_data.Set(option, value))
-				else:
-					var.AddOperation(raptor_data.Append("O._MEMBERS", option))
-					var.AddOperation(raptor_data.Set("O." + option, value))
-
-			extensionSpec.AddVariant(var)
-			componentNode.AddChild(extensionSpec)
-
-
-	def ProcessMMPs(self, componentNode, buildPlatform):
-		"""Add project nodes for a given platform to a skeleton bld.inf node.
-
-		This happens after exports have been handled.
-		"""
-		gnuList = []
-		makefileList = []
-
-
-		component = componentNode.component
-
-
-		if buildPlatform["TESTCODE"]:
-			MMPList = component.bldinf.getTestMMPList(buildPlatform)
-		else:
-			MMPList = component.bldinf.getMMPList(buildPlatform)
-
-		bldInfFile = component.bldinf.filename
-
-		for mmpFileEntry in MMPList['mmpFileList']:
-			component.AddMMP(mmpFileEntry.filename) # Tell the component another mmp is specified (for this platform)
-
-			projectname = mmpFileEntry.filename.File().lower()
-
-			if self.__Raptor.projects:
-				if not projectname in self.__Raptor.projects:
-					self.__Raptor.Debug("Skipping %s", str(mmpFileEntry.filename))
-					continue
-				elif projectname in self.projectList:
-					self.projectList.remove(projectname)
-
-			foundmmpfile = (mmpFileEntry.filename).FindCaseless()
-
-			if foundmmpfile == None:
-				self.__Raptor.Error("Can't find mmp file '%s'", str(mmpFileEntry.filename), bldinf=str(bldInfFile))
-				continue
-
-			mmpFile = MMPFile(foundmmpfile,
-								   self.__gnucpp,
-								   component.bldinf,
-								   component.depfiles,
-								   log = self.__Raptor)
-
-			mmpFilename = mmpFile.filename
-
-			self.__Raptor.Info("Processing %s for platform %s",
-							   str(mmpFilename),
-							   " + ".join([x.name for x in buildPlatform["configs"]]))
-
-			# Run the Parser
-			# The backend supplies the actions
-			content = mmpFile.getContent(buildPlatform)
-			backend = MMPRaptorBackend(self.__Raptor, str(mmpFilename), str(bldInfFile))
-			parser  = MMPParser(backend)
-			parseresult = None
-			try:
-				parseresult = parser.mmp.parseString(content)
-			except ParseException,e:
-				self.__Raptor.Debug(e) # basically ignore parse exceptions
-
-			if (not parseresult) or (parseresult[0] != 'MMP'):
-				self.__Raptor.Error("The MMP Parser didn't recognise the mmp file '%s'",
-					                str(mmpFileEntry.filename), 
-					                bldinf=str(bldInfFile))
-				self.__Raptor.Debug(content)
-				self.__Raptor.Debug("The parse result was %s", parseresult)
-			else:
-				backend.finalise(buildPlatform)
-
-			# feature variation only processes FEATUREVARIANT binaries
-			if buildPlatform["ISFEATUREVARIANT"] and not backend.featureVariant:
-				continue
-			
-			# now build the specification tree
-			mmpSpec = raptor_data.Specification(generic_path.Path(getSpecName(mmpFilename)))
-			var = backend.BuildVariant
-
-			var.AddOperation(raptor_data.Set("PROJECT_META", str(mmpFilename)))
-
-			# If it is a TESTMMPFILE section, the FLM needs to know about it
-			if buildPlatform["TESTCODE"] and (mmpFileEntry.testoption in
-					["manual", "auto"]):
-
-				var.AddOperation(raptor_data.Set("TESTPATH",
-						mmpFileEntry.testoption.lower() + ".bat"))
-
-			# The output path for objects, stringtables and bitmaps specified by
-			# this MMP.  Adding in the requested target extension prevents build
-			# "fouling" in cases where there are several mmp targets which only differ
-			# by the requested extension. e.g. elocl.01 and elocl.18
-			var.AddOperation(raptor_data.Append("OUTPUTPATH","$(UNIQUETARGETPATH)",'/'))
-
-			# If the bld.inf entry for this MMP had the BUILD_AS_ARM option then
-			# tell the FLM.
-			if mmpFileEntry.armoption:
-				var.AddOperation(raptor_data.Set("ALWAYS_BUILD_AS_ARM","1"))
-
-			# what interface builds this node?
-			try:
-				interfaceName = buildPlatform[backend.getTargetType()]
-				mmpSpec.SetInterface(interfaceName)
-			except KeyError:
-				self.__Raptor.Error("Unsupported target type '%s' in %s",
-								    backend.getTargetType(),
-								    str(mmpFileEntry.filename),
-								    bldinf=str(bldInfFile))
-				continue
-
-			# Although not part of the MMP, some MMP-based build specs additionally require knowledge of their
-			# container bld.inf exported headers
-			for export in componentNode.component.bldinf.getExports(buildPlatform):
-				destination = export.getDestination()
-				if isinstance(destination, list):
-					exportfile = str(destination[0])
-				else:
-					exportfile = str(destination)
-
-				if re.search('\.h',exportfile,re.IGNORECASE):
-					var.AddOperation(raptor_data.Append("EXPORTHEADERS", str(exportfile)))
-
-			# now we have something worth adding to the component
-			mmpSpec.AddVariant(var)
-			componentNode.AddChild(mmpSpec)
-			
-			# if there are APPLY variants then add them to the mmpSpec too
-			for applyVar in backend.ApplyVariants:
-				try:
-					mmpSpec.AddVariant(self.__Raptor.cache.FindNamedVariant(applyVar))
-				except KeyError:
-					self.__Raptor.Error("APPLY unknown variant '%s' in %s",
-								        applyVar,
-								        str(mmpFileEntry.filename),
-								        bldinf=str(bldInfFile))
-
-			# resources, stringtables and bitmaps are sub-nodes of this project
-			# (do not add these for feature variant builds)
-			
-			if not buildPlatform["ISFEATUREVARIANT"]:
-				# Buildspec for Resource files
-				for i,rvar in enumerate(backend.ResourceVariants):
-					resourceSpec = raptor_data.Specification('resource' + str(i))
-					resourceSpec.SetInterface(buildPlatform['resource'])
-					resourceSpec.AddVariant(rvar)
-					mmpSpec.AddChild(resourceSpec)
-
-				# Buildspec for String Tables
-				for i,stvar in enumerate(backend.StringTableVariants):
-					stringTableSpec = raptor_data.Specification('stringtable' + str(i))
-					stringTableSpec.SetInterface(buildPlatform['stringtable'])
-					stringTableSpec.AddVariant(stvar)
-					mmpSpec.AddChild(stringTableSpec)
-
-				# Buildspec for Bitmaps
-				for i,bvar in enumerate(backend.BitmapVariants):
-					bitmapSpec = raptor_data.Specification('bitmap' + str(i))
-					bitmapSpec.SetInterface(buildPlatform['bitmap'])
-					bitmapSpec.AddVariant(bvar)
-					mmpSpec.AddChild(bitmapSpec)
-
-		# feature variation does not run extensions at all
-		# so return without considering .*MAKEFILE sections
-		if buildPlatform["ISFEATUREVARIANT"]:
-			return
-			
-		# Build spec for gnumakefile
-		for g in MMPList['gnuList']:
-			projectname = g.getMakefileName().lower()
-
-			if self.__Raptor.projects:
-				if not projectname in self.__Raptor.projects:
-					self.__Raptor.Debug("Skipping %s", str(g.getMakefileName()))
-					continue
-				elif projectname in self.projectList:
-					self.projectList.remove(projectname)
-
-			self.__Raptor.Debug("%i gnumakefile extension makefiles for %s",
-						len(gnuList), str(componentNode.component.bldinf.filename))
-			var = raptor_data.Variant()
-			gnuSpec = raptor_data.Specification("gnumakefile " + str(g.getMakefileName()))
-			interface = buildPlatform["ext_makefile"]
-			gnuSpec.SetInterface(interface)
-			gnumakefilePath = raptor_utilities.resolveSymbianPath(str(bldInfFile), g.getMakefileName())
-			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
-			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
-			var.AddOperation(raptor_data.Set("EXTMAKEFILENAME", g.getMakefileName()))
-			var.AddOperation(raptor_data.Set("DIRECTORY",g.getMakeDirectory()))
-			var.AddOperation(raptor_data.Set("CFG","$(VARIANTTYPE)"))
-			standardVariables = g.getStandardVariables()
-			for standardVariable in standardVariables.keys():
-				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
-				value = standardVariables[standardVariable].replace('$(', '$$$$(')
-				value = value.replace('$/', '/').replace('$;', ':')
-				var.AddOperation(raptor_data.Set(standardVariable, value))
-			gnuSpec.AddVariant(var)
-			componentNode.AddChild(gnuSpec)
-
-		# Build spec for makefile
-		for m in MMPList['makefileList']:
-			projectname = m.getMakefileName().lower()
-
-			if self.__Raptor.projects:
-				if not projectname in self.__Raptor.projects:
-					self.__Raptor.Debug("Skipping %s", str(m.getMakefileName()))
-					continue
-				elif projectname in self.projectList:
-					projectList.remove(projectname)
-
-			self.__Raptor.Debug("%i makefile extension makefiles for %s",
-						len(makefileList), str(componentNode.component.bldinf.filename))
-			var = raptor_data.Variant()
-			gnuSpec = raptor_data.Specification("makefile " + str(m.getMakefileName()))
-			interface = buildPlatform["ext_makefile"]
-			gnuSpec.SetInterface(interface)
-			gnumakefilePath = raptor_utilities.resolveSymbianPath(str(bldInfFile), m.getMakefileName())
-			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
-			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
-			var.AddOperation(raptor_data.Set("EXTMAKEFILENAME", m.getMakefileName()))
-			var.AddOperation(raptor_data.Set("DIRECTORY",m.getMakeDirectory()))
-			var.AddOperation(raptor_data.Set("CFG","$(VARIANTTYPE)"))
-			var.AddOperation(raptor_data.Set("USENMAKE","1"))
-			standardVariables = m.getStandardVariables()
-			for standardVariable in standardVariables.keys():
-				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
-				value = standardVariables[standardVariable].replace('$(', '$$$$(')
-				value = value.replace('$/', '/').replace('$;', ':')
-				var.AddOperation(raptor_data.Set(standardVariable, value))
-			gnuSpec.AddVariant(var)
-			componentNode.AddChild(gnuSpec)
-
-
-	def ApplyOSVariant(self, aBuildUnit, aEpocroot):
-		# Form path to kif.xml and path to buildinfo.txt
-		kifXmlPath = generic_path.Join(aEpocroot, "epoc32", "data","kif.xml")
-		buildInfoTxtPath = generic_path.Join(aEpocroot, "epoc32", "data","buildinfo.txt")
-
-		# Start with osVersion being None. This variable is a string and does two things:
-		# 1) is a representation of the OS version
-		# 2) is potentially the name of a variant
-		osVersion = None
-		if kifXmlPath.isFile(): # kif.xml exists so try to read it
-			osVersion = getOsVerFromKifXml(str(kifXmlPath))
-			if osVersion != None:
-				self.__Raptor.Info("OS version \"%s\" determined from file \"%s\"" % (osVersion, kifXmlPath))
-
-		# OS version was not determined from the kif.xml, e.g. because it doesn't exist
-		# or there was a problem parsing it. So, we fall over to using the buildinfo.txt
-		if osVersion == None and buildInfoTxtPath.isFile():
-			osVersion = getOsVerFromBuildInfoTxt(str(buildInfoTxtPath))
-			if osVersion != None:
-				self.__Raptor.Info("OS version \"%s\" determined from file \"%s\"" % (osVersion, buildInfoTxtPath))
-
-		# If we determined a non-empty string for the OS Version, attempt to apply it
-		if osVersion and osVersion in self.__Raptor.cache.variants:
-			self.__Raptor.Info("applying the OS variant to the configuration \"%s\"." % aBuildUnit.name)
-			aBuildUnit.variants.append(self.__Raptor.cache.variants[osVersion])
-		else:
-			self.__Raptor.Info("no OS variant for the configuration \"%s\"." % aBuildUnit.name)
-
+#
+# Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# This module includes classes that process bld.inf and .mmp files to
+# generate Raptor build specifications
+#
+
+import copy
+import re
+import os.path
+import shutil
+import stat
+import hashlib
+import base64
+
+import raptor
+import raptor_data
+import raptor_utilities
+import raptor_xml
+import generic_path
+import subprocess
+import zipfile
+from mmpparser import *
+
+import time
+
+
+PiggyBackedBuildPlatforms = {'ARMV5':['GCCXML']}
+
+PlatformDefaultDefFileDir = {'WINSCW':'bwins',
+				  'ARMV5' :'eabi',
+				  'ARMV5SMP' :'eabi',
+				  'GCCXML':'eabi',
+				  'ARMV6':'eabi',
+				  'ARMV7' : 'eabi',
+				  'ARMV7SMP' : 'eabi'}
+
+def getVariantCfgDetail(aEPOCROOT, aVariantCfgFile):
+	"""Obtain pertinent build related detail from the Symbian variant.cfg file.
+
+	This variant.cfg file, usually located relative to $(EPOCROOT), contains:
+	(1) The $(EPOCROOT) relative location of the primary .hrh file used to configure the specific OS variant build
+	(2) A flag determining whether ARMV5 represents an ABIV1 or ABIV2 build (currently unused by Raptor)."""
+
+	variantCfgDetails = {}
+	variantCfgFile = None
+
+	try:
+		variantCfgFile = open(str(aVariantCfgFile))
+	except IOError, (number, message):
+		raise MetaDataError("Could not read variant configuration file "+str(aVariantCfgFile)+" ("+message+")")
+
+	for line in variantCfgFile.readlines():
+		if re.search('^(\s$|\s*#)', line):
+			continue
+		# Note that this detection of the .hrh file matches the command line build i.e. ".hrh" somewhere
+		# in the specified line
+		elif re.search('\.hrh', line, re.I):
+			variantHrh = line.strip()
+			if variantHrh.startswith('\\') or variantHrh.startswith('/'):
+				variantHrh = variantHrh[1:]
+			variantHrh = aEPOCROOT.Append(variantHrh)
+			variantCfgDetails['VARIANT_HRH'] = variantHrh
+		else:
+			lineContent = line.split()
+
+			if len(lineContent) == 1:
+				variantCfgDetails[lineContent.pop(0)] = 1
+			else:
+				variantCfgDetails[lineContent.pop(0)] = lineContent
+
+	variantCfgFile.close()
+
+	if not variantCfgDetails.has_key('VARIANT_HRH'):
+		raise MetaDataError("No variant file specified in "+str(aVariantCfgFile))
+	if not variantHrh.isFile():
+		raise MetaDataError("Variant file "+str(variantHrh)+" does not exist")
+
+	return variantCfgDetails
+
+def getOsVerFromKifXml(aPathToKifXml):
+	"""Obtain the OS version from the kif.xml file located at $EPOCROOT/epoc32/data/kif.xml.
+
+	If successful, the function returns a string such as "v95" to indicate 9.5; None is
+	returned if for any reason the function cannot determine the OS version."""
+
+	releaseTagName = "ki:release"
+	osVersion = None
+
+	import xml.dom.minidom
+
+	try:
+		# Parsed document object
+		kifDom = xml.dom.minidom.parse(str(aPathToKifXml))
+
+		# elements - the elements whose names are releaseTagName
+		elements = kifDom.getElementsByTagName(releaseTagName)
+
+		# There should be exactly one of the elements whose name is releaseTagName
+		# If more than one, osVersion is left as None, since the version should be
+		# unique to the kif.xml file
+		if len(elements) == 1:
+			osVersionTemp = elements[0].getAttribute("version")
+			osVersion = "v" + osVersionTemp.replace(".", "")
+
+		kifDom.unlink() # Clean up
+
+	except:
+		# There's no documentation on which exceptions are raised by these functions.
+		# We catch everything and assume any exception means there was a failure to
+		# determine OS version. None is returned, and the code will fall back
+		# to looking at the buildinfo.txt file.
+		pass
+
+	return osVersion
+
+def getOsVerFromBuildInfoTxt(aPathToBuildInfoTxt):
+	"""Obtain the OS version from the buildinfo.txt file located at $EPOCROOT/epoc32/data/buildinfo.txt.
+
+	If successful, the function returns a string such as "v95" to indicate 9.5; None is
+	returned if for any reason the function cannot determine the OS version.
+
+	The file $EPOCROOT/epoc32/data/buildinfo.txt is presumed to exist. The client code should
+	handle existance/non-existance."""
+
+	pathToBuildInfoTxt = str(aPathToBuildInfoTxt) # String form version of path to buildinfo.txt
+
+	# Open the file for reading; throw an exception if it could not be read - note that
+	# it should exist at this point.
+	try:
+		buildInfoTxt = open(pathToBuildInfoTxt)
+	except IOError, (number, message):
+		raise MetaDataError("Could not read buildinfo.txt file at" + pathToBuildInfoTxt + ": (" + message + ")")
+
+	# Example buildinfo.txt contents:
+	#
+	# DeviceFamily               100
+	# DeviceFamilyRev            0x900
+	# ManufacturerSoftwareBuild  M08765_Symbian_OS_v9.5
+	#
+	# Regexp to match the line containing the OS version
+	# Need to match things like M08765_Symbian_OS_v9.5 and M08765_Symbian_OS_vFuture
+	# So for the version, match everything except whitespace after v. Whitespace
+	# signifies the end of the regexp.
+	osVersionMatcher = re.compile('.*_Symbian_OS_v([^\s]*)', re.I)
+	osVersion = None
+
+	# Search for a regexp match over all the times in the file
+	# Note: if two or more lines match the search pattern then
+	# the latest match will overwrite the osVersion string.
+	for line in buildInfoTxt:
+		matchResult = osVersionMatcher.match(line)
+		if matchResult:
+			result = matchResult.groups()
+			osVersion = "v" +  str(reduce(lambda x, y: x + y, result))
+			osVersion = osVersion.replace(".", "")
+
+	buildInfoTxt.close() # Clean-up
+
+	return osVersion
+
+def getBuildableBldInfBuildPlatforms(aBldInfBuildPlatforms,
+									aDefaultOSBuildPlatforms,
+									aBaseDefaultOSBuildPlatforms,
+									aBaseUserDefaultOSBuildPlatforms):
+	"""Obtain a set of build platform names supported by a bld.inf file
+
+	Build platform deduction is based on both the contents of the PRJ_PLATFORMS section of
+	a bld.inf file together with a hard-coded set of default build platforms supported by
+	the build system itself."""
+
+	expandedBldInfBuildPlatforms = []
+	removePlatforms = set()
+
+	for bldInfBuildPlatform in aBldInfBuildPlatforms:
+		if bldInfBuildPlatform.upper() == "DEFAULT":
+			expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split())
+		elif bldInfBuildPlatform.upper() == "BASEDEFAULT":
+			expandedBldInfBuildPlatforms.extend(aBaseDefaultOSBuildPlatforms.split())
+		elif bldInfBuildPlatform.upper() == "BASEUSERDEFAULT":
+			expandedBldInfBuildPlatforms.extend(aBaseUserDefaultOSBuildPlatforms.split())
+		elif bldInfBuildPlatform.startswith("-"):
+			removePlatforms.add(bldInfBuildPlatform.lstrip("-").upper())
+		else:
+			expandedBldInfBuildPlatforms.append(bldInfBuildPlatform.upper())
+
+	if len(expandedBldInfBuildPlatforms) == 0:
+		expandedBldInfBuildPlatforms.extend(aDefaultOSBuildPlatforms.split())
+
+	# make a set of platforms that can be built
+	buildableBldInfBuildPlatforms = set(expandedBldInfBuildPlatforms)
+
+	# Add platforms that are buildable by virtue of the presence of another
+	for piggyBackedPlatform in PiggyBackedBuildPlatforms:
+		if piggyBackedPlatform in buildableBldInfBuildPlatforms:
+			buildableBldInfBuildPlatforms.update(PiggyBackedBuildPlatforms.get(piggyBackedPlatform))
+
+	# Remove platforms that were negated
+	buildableBldInfBuildPlatforms -= removePlatforms
+
+	return buildableBldInfBuildPlatforms
+
+
+def getPreProcessorCommentDetail (aPreProcessorComment):
+	"""Takes a preprocessor comment and returns an array containing the filename and linenumber detail."""
+
+	commentDetail = []
+	commentMatch = re.search('# (?P<LINENUMBER>\d+) "(?P<FILENAME>.*)"', aPreProcessorComment)
+
+	if commentMatch:
+		filename = commentMatch.group('FILENAME')
+		filename = os.path.abspath(filename)
+		filename = re.sub(r'\\\\', r'\\', filename)
+		filename = re.sub(r'//', r'/', filename)
+		filename = generic_path.Path(filename)
+		linenumber = int (commentMatch.group('LINENUMBER'))
+
+		commentDetail.append(filename)
+		commentDetail.append(linenumber)
+
+	return commentDetail
+
+
+def getSpecName(aFileRoot, fullPath=False):
+	"""Returns a build spec name: this is the file root (full path
+	or simple file name) made safe for use as a file name."""
+
+	if fullPath:
+		specName = str(aFileRoot).replace("/","_")
+		specName = specName.replace(":","")
+	else:
+		specName = aFileRoot.File()
+
+	return specName.lower()
+
+
+# Classes
+
+class MetaDataError(Exception):
+	"""Fatal error wrapper, to be thrown directly back to whatever is calling."""
+
+	def __init__(self, aText):
+		self.Text = aText
+	def __str__(self):
+		return repr(self.Text)
+
+
+class PreProcessedLine(str):
+	"""Custom string class that accepts filename and line number information from
+	a preprocessed context."""
+
+	def __new__(cls, value, *args, **keywargs):
+		return str.__new__(cls, value)
+
+	def __init__(self, value, aFilename, aLineNumber):
+		self.filename = aFilename
+		self.lineNumber = aLineNumber
+
+	def getFilename (self):
+		return self.filename
+
+	def getLineNumber (self):
+		return self.lineNumber
+
+class PreProcessor(raptor_utilities.ExternalTool):
+	"""Preprocessor wrapper suitable for Symbian metadata file processing."""
+
+	def __init__(self, aPreProcessor,
+				 aStaticOptions,
+				 aIncludeOption,
+				 aMacroOption,
+				 aPreIncludeOption,
+				 aRaptor):
+		raptor_utilities.ExternalTool.__init__(self, aPreProcessor)
+		self.__StaticOptions = aStaticOptions
+		self.__IncludeOption = aIncludeOption
+		self.__MacroOption = aMacroOption
+		self.__PreIncludeOption = aPreIncludeOption
+
+		self.filename = ""
+		self.__Macros = []
+		self.__IncludePaths = []
+		self.__PreIncludeFile = ""
+		self.raptor = aRaptor
+
+	def call(self, aArgs, sourcefilename):
+		""" Override call so that we can do our own error handling."""
+		tool = self._ExternalTool__Tool
+		commandline = tool + " " + aArgs + " " + str(sourcefilename)
+		try:
+			# the actual call differs between Windows and Unix
+			if raptor_utilities.getOSFileSystem() == "unix":
+				p = subprocess.Popen(commandline, \
+									 shell=True, bufsize=65535, \
+									 stdin=subprocess.PIPE, \
+									 stdout=subprocess.PIPE, \
+									 stderr=subprocess.PIPE, \
+									 close_fds=True)
+			else:
+				p = subprocess.Popen(commandline, \
+									 bufsize=65535, \
+									 stdin=subprocess.PIPE, \
+									 stdout=subprocess.PIPE, \
+									 stderr=subprocess.PIPE, \
+									 universal_newlines=True)
+
+			# run the command and wait for all the output
+			(self._ExternalTool__Output, errors) = p.communicate()
+
+			if self.raptor.debugOutput:
+				self.raptor.Debug("Preprocessing Start %s", str(sourcefilename))
+				self.raptor.Debug("Output:\n%s", self._ExternalTool__Output)
+				self.raptor.Debug("Errors:\n%s", errors)
+				self.raptor.Debug("Preprocessing End %s", str(sourcefilename))
+
+			incRE = re.compile("In file included from")
+			fromRE = re.compile(r"\s+from")
+			warningRE = re.compile("warning:|pasting.+token|from.+:")
+			remarkRE = re.compile("no newline at end of file|does not give a valid preprocessing token")
+
+			actualErr = False
+			if errors != "":
+				for error in errors.splitlines():
+					if incRE.search(error) or fromRE.search(error):
+						continue
+					if not remarkRE.search(error):
+						if warningRE.search(error):
+							self.raptor.Warn("%s: %s", tool, error)
+						else:
+							self.raptor.Error("%s: %s", tool, error)
+							actualErr = True
+			if actualErr:
+				raise MetaDataError("Errors in %s" % str(sourcefilename))
+
+		except Exception,e:
+			raise MetaDataError("Preprocessor exception: '%s' : in command : '%s'" % (str(e), commandline))
+
+		return 0	# all OK
+
+	def setMacros(self, aMacros):
+		self.__Macros = aMacros
+
+	def addMacro(self, aMacro):
+		self.__Macros.append(aMacro)
+
+	def addMacros(self, aMacros):
+		self.__Macros.extend(aMacros)
+
+	def getMacros(self):
+		return self.__Macros
+
+
+	def addIncludePath(self, aIncludePath):
+		p = str(aIncludePath)
+		if p == "":
+			self.raptor.Warn("attempt to set an empty preprocessor include path for %s" % str(self.filename))
+		else:
+			self.__IncludePaths.append(p)
+
+	def addIncludePaths(self, aIncludePaths):
+		for path in aIncludePaths:
+			self.addIncludePath(path)
+
+	def setIncludePaths(self, aIncludePaths):
+		self.__IncludePaths = []
+		self.addIncludePaths(aIncludePaths)
+
+	def setPreIncludeFile(self, aPreIncludeFile):
+		self.__PreIncludeFile = aPreIncludeFile
+
+	def preprocess(self):
+		preProcessorCall = self.__constructPreProcessorCall()
+		returnValue = self.call(preProcessorCall, self.filename)
+
+		return self.getOutput()
+
+	def __constructPreProcessorCall(self):
+
+		call = self.__StaticOptions
+
+		if self.__PreIncludeFile:
+			call += " " + self.__PreIncludeOption
+			call += " " + str(self.__PreIncludeFile)
+
+		for macro in self.__Macros:
+			call += " " + self.__MacroOption + macro
+
+		for includePath in self.__IncludePaths:
+			call += " " + self.__IncludeOption
+			call += " " + str(includePath)
+
+		return call
+
+
+class MetaDataFile(object):
+	"""A generic representation of a Symbian metadata file
+
+	Symbian metadata files are subject to preprocessing, primarily with macros based
+	on the selected build platform.  This class provides a generic means of wrapping
+	up the preprocessing of such files."""
+
+	def __init__(self, aFilename, gnucpp, depfiles, aRootLocation=None, log=None):
+		"""
+		@param aFilename	An MMP, bld.inf or other preprocessable build spec file
+		@param aDefaultPlatform  Default preprocessed version of this file
+		@param aCPP 		location of GNU CPP
+		@param depfiles     	list to add dependency file tuples to
+		@param aRootLocation    where the file is 
+		@param log 		A class with Debug(<string>), Info(<string>) and Error(<string>) methods
+		"""
+		self.filename = aFilename
+		self.__RootLocation = aRootLocation
+		# Dictionary with key of build platform and a text string of processed output as values
+		self.__PreProcessedContent = {}
+		self.log = log
+		self.depfiles = depfiles
+
+		self.__gnucpp = gnucpp
+		if gnucpp is None:
+			raise ValueError('gnucpp must be set')
+
+	def depspath(self, platform):
+	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
+	       Subclasses should redefine this
+	   """
+	   return str(platform['SBS_BUILD_DIR']) + "/" + str(self.__RootLocation) + "." + platform['key_md5'] + ".d"
+
+	def getContent(self, aBuildPlatform):
+
+		key = aBuildPlatform['key']
+
+		config_macros = []
+
+		adepfilename = self.depspath(aBuildPlatform)
+		generateDepsOptions = ""
+		if adepfilename:
+
+			if raptor_utilities.getOSPlatform().startswith("win"):
+				metatarget = "$(PARSETARGET)"
+			else:
+				metatarget = "'$(PARSETARGET)'"
+			generateDepsOptions = "-MD -MF%s -MT%s" % (adepfilename, metatarget)
+			self.depfiles.append((adepfilename, metatarget))
+			try:
+				os.makedirs(os.path.dirname(adepfilename))
+			except Exception, e:
+				self.log.Debug("Couldn't make bldinf outputpath for dependency generation")
+
+		config_macros = (aBuildPlatform['PLATMACROS']).split()
+
+		if not key in self.__PreProcessedContent:
+
+			preProcessor = PreProcessor(self.__gnucpp, '-undef -nostdinc ' + generateDepsOptions + ' ',
+										'-I', '-D', '-include', self.log)
+			preProcessor.filename = self.filename
+
+			# always have the current directory on the include path
+			preProcessor.addIncludePath('.')
+
+			# the SYSTEMINCLUDE directories defined in the build config
+			# should be on the include path. This is added mainly to support
+			# Feature Variation as SYSTEMINCLUDE is usually empty at this point.
+			systemIncludes = aBuildPlatform['SYSTEMINCLUDE']
+			if systemIncludes:
+				preProcessor.addIncludePaths(systemIncludes.split())
+
+			preInclude = aBuildPlatform['VARIANT_HRH']
+
+			# for non-Feature Variant builds, the directory containing the HRH should
+			# be on the include path
+			if not aBuildPlatform['ISFEATUREVARIANT']:
+				preProcessor.addIncludePath(preInclude.Dir())
+
+			# and EPOCROOT/epoc32/include
+			preProcessor.addIncludePath(aBuildPlatform['EPOCROOT'].Append('epoc32/include'))
+
+			# and the directory containing the bld.inf file
+			if self.__RootLocation is not None and str(self.__RootLocation) != "":
+				preProcessor.addIncludePath(self.__RootLocation)
+
+			# and the directory containing the file we are processing
+			preProcessor.addIncludePath(self.filename.Dir())
+
+			# there is always a pre-include file
+			preProcessor.setPreIncludeFile(preInclude)
+
+			macros = ["SBSV2"]
+
+			if config_macros:
+				macros.extend(config_macros)
+
+			if macros:
+				for macro in macros:
+					preProcessor.addMacro(macro + "=_____" +macro)
+
+			# extra "raw" macros that do not need protecting
+			preProcessor.addMacro("__GNUC__=3")
+
+			preProcessorOutput = preProcessor.preprocess()
+
+			# Resurrect preprocessing replacements
+			pattern = r'([\\|/]| |) ?_____(('+macros[0]+')'
+			for macro in macros[1:]:
+				pattern += r'|('+macro+r')'
+
+			pattern += r'\s*)'
+			# Work on all Macros in one substitution.
+			text = re.sub(pattern, r"\1\2", preProcessorOutput)
+			text = re.sub(r"\n[\t ]*", r"\n", text)
+
+			self.__PreProcessedContent[key] = text
+
+		return self.__PreProcessedContent[key]
+
+class MMPFile(MetaDataFile):
+	"""A generic representation of a Symbian metadata file
+
+	Symbian metadata files are subject to preprocessing, primarily with macros based
+	on the selected build platform.  This class provides a generic means of wrapping
+	up the preprocessing of such files."""
+
+	def __init__(self, aFilename, gnucpp, bldinf, depfiles, log=None):
+		"""
+		@param aFilename	An MMP, bld.inf or other preprocessable build spec file
+		@param gnucpp 		location of GNU CPP
+		@param bldinf		the bld.inf file this mmp was specified in
+		@param depfiles         list to fill with mmp dependency files
+		@param log 		A class with Debug(<string>), Info(<string>) and Error(<string>) methods
+		"""
+		super(MMPFile, self).__init__(aFilename, gnucpp, depfiles, str(bldinf.filename.Dir()),  log)
+		self.__bldinf = bldinf
+		self.depfiles = depfiles
+
+		self.__gnucpp = gnucpp
+		if gnucpp is None:
+			raise ValueError('gnucpp must be set')
+
+	def depspath(self, platform):
+	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
+	       Subclasses should redefine this
+	   """
+	   return self.__bldinf.outputpath(platform) + "/" + self.filename.File() + '.' + platform['key_md5'] + ".d"
+
+class Export(object):
+	"""Single processed PRJ_EXPORTS or PRJ_TESTEXPORTS entry from a bld.inf file"""
+
+	def getPossiblyQuotedStrings(cls,spec):
+		""" 	Split a string based on whitespace
+			but keep double quoted substrings together.
+		"""
+		inquotes=False
+		intokengap=False
+		sourcedest=[]
+		word = 0
+		for c in spec:
+			if c == '"':
+				if inquotes:
+					inquotes = False
+					word += 1
+					intokengap = True
+				else:
+					inquotes = True
+					intokengap = False
+				pass
+			elif c == ' ' or c == '\t':
+				if inquotes:
+					if len(sourcedest) == word:
+						sourcedest.append(c)
+					else:
+						sourcedest[word] += c
+				else:
+					if intokengap:
+						# gobble unquoted spaces
+						pass
+					else:
+						word += 1
+						intokengap=True
+				pass
+			else:
+				intokengap = False
+				if len(sourcedest) == word:
+					sourcedest.append(c)
+				else:
+					sourcedest[word] += c
+
+		return sourcedest
+
+	getPossiblyQuotedStrings = classmethod(getPossiblyQuotedStrings)
+
+
+	def __init__(self, aBldInfFile, aExportsLine, aType):
+		"""
+		Rules from the OS library for convenience:
+
+		For PRJ_TESTEXPORTS
+		source_file_1 [destination_file]
+		source_file_n [destination_file]
+		If the source file is listed with a relative path, the path will
+	 	  be considered relative to the directory containing the bld.inf file.
+		If a destination file is not specified, the source file will be copied
+		  to the directory containing the bld.inf file.
+		If a relative path is specified with the destination file, the path
+		  will be considered relative to directory containing the bld.inf file.
+
+		For PRJ_EXPORTS
+		source_file_1 [destination_file]
+		source_file_n [destination_file]
+		:zip zip_file [destination_path]
+
+		Note that:
+		If a source file is listed with a relative path, the path will be
+		considered relative to the directory containing the bld.inf file.
+
+		If a destination file is not specified, the source file will be copied
+		to epoc32\include\.
+
+		If a destination file is specified with the relative path, the path will
+		be considered relative to directory epoc32\include\.
+
+		If a destination begins with a drive letter, then the file is copied to
+		epoc32\data\<drive_letter>\<path>. For example,
+
+			mydata.dat e:\appdata\mydata.dat
+			copies mydata.dat to epoc32\data\e\appdata\mydata.dat.
+			You can use any driveletter between A and Z.
+
+		A line can start with the preface :zip. This instructs the build tools
+		to unzip the specified zip file to the specified destination path. If a
+		destination path is not specified, the source file will be unzipped in
+		the root directory.
+
+
+		"""
+
+		# Work out what action is required - unzip or copy?
+		action = "copy"
+		typematch = re.match(r'^\s*(?P<type>:zip\s+)?(?P<spec>[^\s].*[^\s])\s*$',aExportsLine, re.I)
+
+		spec = typematch.group('spec')
+		if spec == None:
+			raise ValueError('must specify at least a source file for an export')
+
+		if typematch.group('type') is not None:
+			action = "unzip"
+
+		# Split the spec into source and destination but take care
+		# to allow filenames with quoted strings.
+		exportEntries = Export.getPossiblyQuotedStrings(spec)
+
+		# Get the source path as specified by the bld.inf
+		source_spec = exportEntries.pop(0).replace(' ','%20')
+
+		# Resolve the source file
+		sourcepath = generic_path.Path(raptor_utilities.resolveSymbianPath(str(aBldInfFile), source_spec))
+
+		# Find it if the case of the filename is wrong:
+		# Carry on even if we don't find it
+		foundfile = sourcepath.FindCaseless()
+		if foundfile != None:
+			source = str(foundfile).replace(' ','%20')
+		else:
+			source = str(sourcepath).replace(' ','%20')
+
+
+		# Get the destination path as specified by the bld.inf
+		if len(exportEntries) > 0:
+			dest_spec = exportEntries.pop(0).replace(' ','%20')
+		else:
+			dest_spec = None
+		# Destination list - list of destinations. For the WINSCW resource building stage,
+		# files exported to the emulated drives and there are several locations, for example,
+		# PRJ_[TEST]EXPORTS
+		# 1234ABCD.SPD		z:/private/10009876/policy/1234ABCD.spd
+		# needs to end up copied in
+		# epoc32/data/z/private/10009876/policy/1234ABCD.spd *and* in
+		# epoc32/release/winscw/udeb/z/private/10009876/policy/1234ABCD.spd *and* in
+		# epoc32/release/winscw/urel/z/private/10009876/policy/1234ABCD.spd
+		dest_list = []
+
+		# Resolve the destination if one is specified
+		if dest_spec:
+			# check for troublesome characters
+			if ':' in dest_spec and not re.search('^[a-z]:', dest_spec, re.I):
+				raise ValueError("invalid filename " + dest_spec)
+
+			dest_spec = dest_spec.replace(' ','%20')
+			aSubType=""
+			if action == "unzip":
+				aSubType=":zip"
+				dest_spec = dest_spec.rstrip("\\/")
+
+			# Get the export destination(s) - note this can be a list of strings or just a string.
+			dest_list = raptor_utilities.resolveSymbianPath(str(aBldInfFile), dest_spec, aType, aSubType)
+
+			def process_dest(aDest):
+				if dest_spec.endswith('/') or  dest_spec.endswith('\\'):
+					m = generic_path.Path(source)
+					aDest += '/'+m.File()
+				return aDest
+
+			if isinstance(dest_list, list):
+				# Process each file in the list
+				dest_list = map(process_dest, dest_list)
+			else:
+				# Process the single destination
+				dest_list = process_dest(dest_list)
+
+		else:
+			# No destination was specified so we assume an appropriate one
+
+			dest_filename=generic_path.Path(source).File()
+
+			if aType == "PRJ_EXPORTS":
+				if action == "copy":
+					destination = '$(EPOCROOT)/epoc32/include/'+dest_filename
+				elif action == "unzip":
+					destination = '$(EPOCROOT)'
+			elif aType == "PRJ_TESTEXPORTS":
+				d = aBldInfFile.Dir()
+				if action == "copy":
+					destination = str(d.Append(dest_filename))
+				elif action == "unzip":
+					destination = "$(EPOCROOT)"
+			else:
+				raise ValueError("Export type should be 'PRJ_EXPORTS' or 'PRJ_TESTEXPORTS'. It was: "+str(aType))
+
+
+		self.__Source = source
+		if len(dest_list) > 0: # If the list has length > 0, this means there are several export destinations.
+			self.__Destination = dest_list
+		else: # Otherwise the list has length zero, so there is only a single export destination.
+			self.__Destination = destination
+		self.__Action = action
+
+	def getSource(self):
+		return self.__Source
+
+	def getDestination(self):
+		return self.__Destination # Note that this could be either a list, or a string, depending on the export destination
+
+	def getAction(self):
+		return self.__Action
+
+class ExtensionmakefileEntry(object):
+	def __init__(self, aGnuLine, aBldInfFile, tmp):
+
+		self.__BldInfFile = aBldInfFile
+		bldInfLocation = self.__BldInfFile.Dir()
+		biloc = str(bldInfLocation)
+		extInfLocation = tmp.filename.Dir()
+		eiloc = str(extInfLocation)
+
+		if eiloc is None or eiloc == "":
+			eiloc="." # Someone building with a relative raptor path
+		if biloc is None or biloc == "":
+			biloc="." # Someone building with a relative raptor path
+
+		self.__StandardVariables = {}
+		# Relative step-down to the root - let's try ignoring this for now, as it
+		# should amount to the same thing in a world where absolute paths are king
+		self.__StandardVariables['TO_ROOT'] = ""
+		# Top-level bld.inf location
+		self.__StandardVariables['TO_BLDINF'] = biloc
+		self.__StandardVariables['EXTENSION_ROOT'] = eiloc
+
+		# Get the directory and filename from the full path containing the extension makefile
+		self.__FullPath = generic_path.Join(eiloc,aGnuLine)
+		self.__FullPath = self.__FullPath.GetLocalString()
+		self.__Filename = os.path.split(self.__FullPath)[1]
+		self.__Directory = os.path.split(self.__FullPath)[0]
+
+	def getMakefileName(self):
+		return self.__Filename
+
+	def getMakeDirectory(self):
+		return self.__Directory
+
+	def getStandardVariables(self):
+		return self.__StandardVariables
+
+class Extension(object):
+	"""Single processed PRJ_EXTENSIONS or PRJ_TESTEXTENSIONS START EXTENSIONS...END block
+	from a bld.inf file"""
+
+	def __init__(self, aBldInfFile, aStartLine, aOptionLines, aBuildPlatform, aRaptor):
+		self.__BldInfFile = aBldInfFile
+		self.__Options = {}
+		self.interface = ""
+		self.__Raptor = aRaptor
+
+		makefile = ""
+		makefileMatch = re.search(r'^\s*START EXTENSION\s+(?P<MAKEFILE>\S+)\s*(?P<NAMETAG>\S*)$', aStartLine, re.I)
+
+		self.__RawMakefile = ""
+
+		if (makefileMatch):
+			self.__RawMakefile = makefileMatch.group('MAKEFILE')
+			self.nametag = makefileMatch.group('NAMETAG').lower()
+
+			# Ensure all \'s are translated into /'s if required
+			self.interface = self.__RawMakefile
+			self.interface = self.interface.replace("\\", "/").replace("/", ".")
+
+		# To support standalone testing, '$(' prefixed TEMs  are assumed to  start with
+		# a makefile variable and hence be fully located in FLM operation
+		if self.__RawMakefile.startswith("$("):
+			self.__Makefile = self.__RawMakefile + ".mk"
+		else:
+			self.__Makefile = '$(MAKEFILE_TEMPLATES)/' + self.__RawMakefile + ".mk"
+
+		for optionLine in aOptionLines:
+			optionMatch = re.search(r'^\s*(OPTION\s+)?(?P<VARIABLE>\S+)\s+(?P<VALUE>\S+.*)$',optionLine, re.I)
+			if optionMatch:
+				self.__Options[optionMatch.group('VARIABLE').upper()] = optionMatch.group('VALUE')
+
+		bldInfLocation = self.__BldInfFile.Dir()
+
+		biloc = str(bldInfLocation)
+		if biloc is None or biloc == "":
+			biloc="." # Someone building with a relative raptor path
+
+		extInfLocation = aStartLine.filename.Dir()
+
+		eiloc = str(extInfLocation)
+		if eiloc is None or eiloc == "":
+			eiloc="." # Someone building with a relative raptor path
+
+		self.__StandardVariables = {}
+		# Relative step-down to the root - let's try ignoring this for now, as it
+		# should amount to the same thing in a world where absolute paths are king
+		self.__StandardVariables['TO_ROOT'] = ""
+		# Top-level bld.inf location
+		self.__StandardVariables['TO_BLDINF'] = biloc
+		# Location of bld.inf file containing the current EXTENSION block
+		self.__StandardVariables['EXTENSION_ROOT'] = eiloc
+
+		# If the interface exists, this means it's not a Template Extension Makefile so don't look for a .meta file for it;
+		# so do nothing if it's not a template extension makefile
+		try:
+			self.__Raptor.cache.FindNamedInterface(str(self.interface), aBuildPlatform['CACHEID'])
+		except KeyError: # This means that this Raptor doesn't have the interface self.interface, so we are in a TEM
+			# Read extension meta file and get default options from it.  The use of TEM meta file is compulsory if TEM is used
+			metaFilename = "%s/epoc32/tools/makefile_templates/%s.meta" % (aBuildPlatform['EPOCROOT'], self.__RawMakefile)
+			metaFile = None
+			try:
+				metaFile = open(metaFilename, "r")
+			except IOError, e:
+				self.__warn("Extension: %s - cannot open Meta file: %s" % (self.__RawMakefile, metaFilename))
+
+			if metaFile:
+				for line in metaFile.readlines():
+					defaultOptionMatch = re.search(r'^OPTION\s+(?P<VARIABLE>\S+)\s+(?P<VALUE>\S+.*)$',line, re.I)
+					if defaultOptionMatch and defaultOptionMatch.group('VARIABLE').upper() not in self.__Options.keys():
+						self.__Options[defaultOptionMatch.group('VARIABLE').upper()] = defaultOptionMatch.group('VALUE')
+
+				metaFile.close()
+
+	def __warn(self, format, *extras):
+		if (self.__Raptor):
+			self.__Raptor.Warn(format, *extras)
+
+	def getIdentifier(self):
+		return re.sub (r'\\|\/|\$|\(|\)', '_', self.__RawMakefile)
+
+	def getMakefile(self):
+		return self.__Makefile
+
+	def getOptions(self):
+		return self.__Options
+
+	def getStandardVariables(self):
+		return self.__StandardVariables
+
+class MMPFileEntry(object):
+	def __init__(self, aFilename, aTestOption, aARMOption):
+		self.filename = aFilename
+		self.testoption = aTestOption
+		if aARMOption:
+			self.armoption = True
+		else:
+			self.armoption = False
+
+
+class BldInfFile(MetaDataFile):
+	"""Representation of a Symbian bld.inf file"""
+
+	def __init__(self, aFilename, gnucpp, depfiles, log=None):
+		MetaDataFile.__init__(self, aFilename, gnucpp, depfiles, None, log)
+		self.__Raptor = log
+		self.testManual = 0
+		self.testAuto = 0
+	# Generic
+
+	def getBuildPlatforms(self, aBuildPlatform):
+		platformList = []
+
+		for platformLine in self.__getSection(aBuildPlatform, 'PRJ_PLATFORMS'):
+			for platformEntry in platformLine.split():
+				platformList.append(platformEntry)
+
+		return platformList
+
+	# Build Platform Specific
+	def getMMPList(self, aBuildPlatform, aType="PRJ_MMPFILES"):
+		mmpFileList=[]
+		gnuList = []
+		makefileList = []
+		extFound = False
+		m = None
+
+		hashValue = {'mmpFileList': [] , 'gnuList': [], 'makefileList' : []}
+
+		for mmpFileEntry in self.__getSection(aBuildPlatform, aType):
+
+			actualBldInfRoot = mmpFileEntry.getFilename()
+			n = re.match('\s*(?P<makefiletype>(GNUMAKEFILE|N?MAKEFILE))\s+(?P<extmakefile>[^ ]+)\s*(support|manual)?\s*(?P<invalid>\S+.*)?\s*$',mmpFileEntry,re.I)
+			if n:
+
+				if (n.groupdict()['invalid']):
+					self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), n.groupdict()['invalid'])
+				if raptor_utilities.getOSFileSystem() == "unix":
+					self.log.Warn("NMAKEFILE/GNUMAKEFILE/MAKEFILE keywords not supported on Linux")
+				else:
+					extmakefilearg = n.groupdict()['extmakefile']
+					bldInfDir = actualBldInfRoot.Dir()
+					extmakefilename = bldInfDir.Append(extmakefilearg)
+					extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, mmpFileEntry)
+
+					if (n.groupdict()['makefiletype']).upper() == "GNUMAKEFILE":
+						gnuList.append(extmakefile)
+					else:
+						makefileList.append(extmakefile)
+			else:
+				# Currently there is only one possible option - build as arm.
+				# For TESTMMPFILES, the supported options are support, tidy, ignore, manual and build as arm
+				if aType.upper()=="PRJ_TESTMMPFILES":
+					if re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I):
+						m = re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I)
+				else:
+					if re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I):
+						m = re.match('\s*(?P<name>[^ ]+)\s*(?P<baa>build_as_arm)?\s*(?P<invalid>\S+.*)?\s*$', mmpFileEntry, re.I)
+
+			if m:
+				if (m.groupdict()['invalid']):
+					self.log.Error("%s (%d) : invalid .mmp file qualifier \"%s\"", mmpFileEntry.filename, mmpFileEntry.getLineNumber(), m.groupdict()['invalid'])
+
+				mmpFileName = m.groupdict()['name']
+				testmmpoption = "auto" # Setup tests to be automatic by default
+				tokens = m.groupdict()
+				for key,item in tokens.iteritems():
+					if key=="manual" and item=="manual":
+						testmmpoption = "manual"
+					elif key=="support" and item=="support":
+						testmmpoption = "support"
+					elif key=="ignore" and item=="ignore":
+						testmmpoption = "ignore"
+
+				buildasarm = False
+				if  m.groupdict()['baa']:
+					if m.groupdict()['baa'].lower() == 'build_as_arm':
+						buildasarm = True
+
+				if not mmpFileName.lower().endswith('.mmp'):
+					mmpFileName += '.mmp'
+				bldInfDir = actualBldInfRoot.Dir()
+				try:
+					mmpFileName = bldInfDir.Append(mmpFileName)
+					mmpfe = MMPFileEntry(mmpFileName, testmmpoption, buildasarm)
+					mmpFileList.append(mmpfe)
+				except ValueError, e:
+					self.log.Error("invalid .mmp file name: %s" % str(e))
+
+				m = None
+
+
+		hashValue['mmpFileList'] = mmpFileList
+		hashValue['gnuList'] = gnuList
+		hashValue['makefileList'] = makefileList
+
+		return hashValue
+
+	# Return a list of gnumakefiles used in the bld.inf
+	def getExtensionmakefileList(self, aBuildPlatform, aType="PRJ_MMPFILES",aString = ""):
+		extMakefileList=[]
+		m = None
+		for extmakeFileEntry in self.__getSection(aBuildPlatform, aType):
+
+			actualBldInfRoot = extmakeFileEntry.filename
+			if aType.upper()=="PRJ_TESTMMPFILES":
+				m = re.match('\s*GNUMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<support>support)?\s*(?P<ignore>ignore)?\s*(?P<tidy>tidy)?\s*(?P<manual>manual)?\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
+			else:
+				if aString == "gnumakefile":
+					m = re.match('\s*GNUMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
+				elif aString == "nmakefile":
+					m = re.match('\s*NMAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
+				elif aString == "makefile":
+					m = re.match('\s*MAKEFILE\s+(?P<extmakefile>[^ ]+)\s*(?P<invalid>\S+.*)?\s*$',extmakeFileEntry,re.I)
+			if m:
+				if (m.groupdict()['invalid']):
+					self.log.Error("%s (%d) : invalid extension makefile qualifier \"%s\"", extmakeFileEntry.filename, extmakeFileEntry.getLineNumber(), m.groupdict()['invalid'])
+
+				extmakefilearg = m.groupdict()['extmakefile']
+				bldInfDir = actualBldInfRoot.Dir()
+				extmakefilename = bldInfDir.Append(extmakefilearg)
+				extmakefile = ExtensionmakefileEntry(extmakefilearg, self.filename, extmakeFileEntry)
+				extMakefileList.append(extmakefile)
+				m = None
+
+		return extMakefileList
+
+	def getTestExtensionmakefileList(self,aBuildPlatform,aString=""):
+		return self.getExtensionmakefileList(aBuildPlatform,"PRJ_TESTMMPFILES",aString)
+
+	def getTestMMPList(self, aBuildPlatform):
+		return self.getMMPList(aBuildPlatform, "PRJ_TESTMMPFILES")
+
+	def getRomTestType(self, aBuildPlatform):
+		testMMPList = self.getTestMMPList(aBuildPlatform)
+		for testMMPFileEntry in testMMPList['mmpFileList']:
+			if aBuildPlatform["TESTCODE"]:
+				# Calculate test type (manual or auto)
+				if testMMPFileEntry.testoption == "manual":
+					self.testManual += 1
+				if not (testMMPFileEntry.testoption == "support" or testMMPFileEntry.testoption == "manual" or testMMPFileEntry.testoption == "ignore"):
+					self.testAuto += 1
+		if self.testManual and self.testAuto:
+			return 'BOTH'
+		elif self.testAuto:
+			return 'AUTO'
+		elif self.testManual:
+			return 'MANUAL'
+		else:
+			return 'NONE'
+
+	def getExports(self, aBuildPlatform, aType="PRJ_EXPORTS"):
+		exportList = []
+
+		for exportLine in self.__getSection(aBuildPlatform, aType):
+
+			if not re.match(r'\S+', exportLine):
+				continue
+
+			try:
+				exportList.append(Export(exportLine.getFilename(), exportLine, aType))
+			except ValueError,e:
+				self.log.Error(str(e))
+
+		return exportList
+
+	def getTestExports(self, aBuildPlatform):
+		return self.getExports(aBuildPlatform, "PRJ_TESTEXPORTS")
+
+	def getExtensions(self, aBuildPlatform, aType="PRJ_EXTENSIONS"):
+		extensionObjects = []
+		start = ""
+		options = []
+
+		for extensionLine in self.__getSection(aBuildPlatform, aType):
+			if (re.search(r'^\s*START ',extensionLine, re.I)):
+				start = extensionLine
+			elif re.search(r'^\s*END\s*$',extensionLine, re.I):
+				extensionObjects.append(Extension(self.filename, start, options, aBuildPlatform, self.__Raptor))
+				start = ""
+				options = []
+			elif re.search(r'^\s*$',extensionLine, re.I):
+				continue
+			elif start:
+				options.append(extensionLine)
+
+		return extensionObjects
+
+	def getTestExtensions(self, aBuildPlatform):
+		return self.getExtensions(aBuildPlatform, "PRJ_TESTEXTENSIONS")
+
+	def __getSection(self, aBuildPlatform, aSection):
+
+		activeSection = False
+		sectionContent = []
+		lineContent = re.split(r'\n', self.getContent(aBuildPlatform));
+
+		currentBldInfFile = self.filename
+		currentLineNumber = 0
+
+		for line in lineContent:
+			if line.startswith("#"):
+				commentDetail = getPreProcessorCommentDetail(line)
+				currentBldInfFile = commentDetail[0]
+				currentLineNumber = commentDetail[1]-1
+				continue
+
+			currentLineNumber += 1
+
+			if not re.match(r'.*\S+', line):
+				continue
+			elif re.match(r'\s*' + aSection + r'\s*$', line, re.I):
+				activeSection = True
+			elif re.match(r'\s*PRJ_\w+\s*$', line, re.I):
+				activeSection = False
+			elif activeSection:
+				sectionContent.append(PreProcessedLine(line, currentBldInfFile, currentLineNumber))
+
+		return sectionContent
+
+	@staticmethod
+	def outputPathFragment(bldinfpath):
+		"""Return a relative path that uniquely identifies this bldinf file
+		   whilst being short so that it can be appended to epoc32/build.
+		   The  build product of a particular bld.inf may be placed in here.
+		   This affects its TEMs and its MMPs"""
+
+		absroot_str = os.path.abspath(str(bldinfpath)).lower().replace("\\","/")
+
+		uniqueid = hashlib.md5()
+		uniqueid.update(absroot_str)
+
+		specnamecomponents = (re.sub("^[A-Za-z]:", "", absroot_str)).split('/') # split, removing any drive identifier (if present)
+
+		pathlist=[]
+		while len(specnamecomponents) > 0:
+			top = specnamecomponents.pop()
+			if top.endswith('.inf'):
+				continue
+			elif top == 'group':
+				continue
+			else:
+				pathlist = [top]
+				break
+
+		pathlist.append("c_"+uniqueid.hexdigest()[:16])
+		return "/".join(pathlist)
+
+	def outputpath(self, platform):
+		""" The full path where product from this bldinf is created."""
+		return str(platform['SBS_BUILD_DIR']) + "/" + BldInfFile.outputPathFragment(self.filename)
+
+	def depspath(self, platform):
+	   """ Where does dependency information go relative to platform's SBS_BUILD_DIR?
+	       Subclasses should redefine this
+	   """
+	   return self.outputpath(platform) + "/bldinf." + platform['key_md5'] + ".d"
+
+
+
+class MMPRaptorBackend(MMPBackend):
+	"""A parser "backend" for the MMP language
+
+	This is used to map recognised MMP syntax onto a buildspec """
+
+	# Support priorities, with case-fixed mappings for use
+	epoc32priorities = {
+		'low':'Low',
+		'background':'Background',
+		'foreground':'Foreground',
+		'high':'High',
+		'windowserver':'WindowServer',
+		'fileserver':'FileServer',
+		'realtimeserver':'RealTimeServer',
+		'supervisor':'SuperVisor'
+		}
+
+	# Known capability flags with associated bitwise operations
+	supportedCapabilities = {
+		'tcb':(1<<0),
+		'commdd':(1<<1),
+		'powermgmt':(1<<2),
+		'multimediadd':(1<<3),
+		'readdevicedata':(1<<4),
+		'writedevicedata':(1<<5),
+		'drm':(1<<6),
+		'trustedui':(1<<7),
+		'protserv':(1<<8),
+		'diskadmin':(1<<9),
+		'networkcontrol':(1<<10),
+		'allfiles':(1<<11),
+		'swevent':(1<<12),
+		'networkservices':(1<<13),
+		'localservices':(1<<14),
+		'readuserdata':(1<<15),
+		'writeuserdata':(1<<16),
+		'location':(1<<17),
+		'surroundingsdd':(1<<18),
+		'userenvironment':(1<<19),
+	# Old capability names have zero value
+		'root':0,
+		'mediadd':0,
+		'readsystemdata':0,
+		'writesystemdata':0,
+		'sounddd':0,
+		'uidd':0,
+		'killanyprocess':0,
+		'devman':0,
+		'phonenetwork':0,
+		'localnetwork':0
+	  	}
+
+	library_re = re.compile(r"^(?P<name>[^{]+?)(?P<version>{(?P<major>[0-9]+)\.(?P<minor>[0-9]+)})?(\.(lib|dso))?$",re.I)
+
+
+	def __init__(self, aRaptor, aMmpfilename, aBldInfFilename):
+		super(MMPRaptorBackend,self).__init__()
+		self.platformblock = None
+		self.__Raptor = aRaptor
+		self.__debug("-----+++++ %s " % aMmpfilename)
+		self.BuildVariant = raptor_data.Variant(name = "mmp")
+		self.ApplyVariants = []
+		self.ResourceVariants = []
+		self.BitmapVariants = []
+		self.StringTableVariants = []
+		self.__bldInfFilename = aBldInfFilename
+		self.__targettype = "UNKNOWN"
+		self.__currentMmpFile = aMmpfilename
+		self.__defFileRoot = self.__currentMmpFile
+		self.__currentLineNumber = 0
+		self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, "")
+		self.__userinclude = ""
+		self.__systeminclude = ""
+		self.__bitmapSourcepath = self.__sourcepath
+		self.__current_resource = ""
+		self.__resourceFiles = []
+		self.__pageConflict = []
+		self.__debuggable = ""
+		self.__compressionKeyword = ""
+		self.sources = []
+		self.capabilities = []
+
+		self.__TARGET = ""
+		self.__TARGETEXT = ""
+		self.deffile = ""
+		self.__LINKAS = ""
+		self.nostrictdef = False
+		self.featureVariant = False
+
+		self.__currentResourceVariant = None
+		self.__currentStringTableVariant = None
+		self.__explicitversion = False
+		self.__versionhex = ""
+
+		# "ALL" capability calculated based on the total capabilities currently supported
+		allCapabilities = 0
+		for supportedCapability in MMPRaptorBackend.supportedCapabilities.keys():
+			allCapabilities = allCapabilities | MMPRaptorBackend.supportedCapabilities[supportedCapability]
+		MMPRaptorBackend.supportedCapabilities['all'] = allCapabilities
+
+	# Permit unit-testing output without a Raptor context
+	def __debug(self, format, *extras):
+		if (self.__Raptor):
+			self.__Raptor.Debug(format, *extras)
+
+	def __warn(self, format, *extras):
+		if (self.__Raptor):
+			self.__Raptor.Warn(format, *extras)
+
+	def doPreProcessorComment(self,s,loc,toks):
+		commentDetail = getPreProcessorCommentDetail(toks[0])
+		self.__currentMmpFile = commentDetail[0].GetLocalString()
+		self.__currentLineNumber = commentDetail[1]
+		self.__debug("Current file %s, line number %s\n"  % (self.__currentMmpFile,str(self.__currentLineNumber)))
+		return "OK"
+
+	def doBlankLine(self,s,loc,toks):
+		self.__currentLineNumber += 1
+
+	def doStartPlatform(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Start Platform block "+toks[0])
+		self.platformblock = toks[0]
+		return "OK"
+
+	def doEndPlatform(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Finalise platform " + self.platformblock)
+		return "OK"
+
+	def doSetSwitch(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		prefix=""
+		varname = toks[0].upper()
+
+		# A bright spark made the optionname the same as
+		# the env variable. One will override the other if we pass this
+		# on to make.  Add a prefix to prevent the clash.
+		if varname=='ARMINC':
+			prefix="SET_"
+			self.__debug( "Set switch "+toks[0]+" ON")
+			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
+
+		elif varname=='NOSTRICTDEF':
+			self.nostrictdef = True
+			self.__debug( "Set switch "+toks[0]+" ON")
+			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
+
+		elif varname == 'PAGED':
+			self.BuildVariant.AddOperation(raptor_data.Set(varname, "1"))
+			self.__debug( "Set switch PAGE ON")
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged"))
+			self.__debug( "Set switch PAGEDCODE ON")
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged"))
+			self.__debug( "Set data PAGEDDATA ON")
+			self.__pageConflict.append("PAGEDCODE")
+			self.__pageConflict.append("PAGEDDATA")
+
+		elif varname == 'UNPAGED':
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGED", "0"))
+			self.__debug( "Set switch PAGED OFF")
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged"))
+			self.__debug( "Set switch PAGEDCODE OFF")
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged"))
+			self.__debug( "Set data PAGEDDATA OFF")
+			self.__pageConflict.append("UNPAGEDCODE")
+			self.__pageConflict.append("UNPAGEDDATA")
+
+		elif varname == 'PAGEDCODE':
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "paged"))
+			self.__debug( "Set switch " + varname + " ON")
+			self.__pageConflict.append(varname)
+
+		elif varname == 'PAGEDDATA':
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "paged"))
+			self.__debug( "Set switch " + varname + " ON")
+			self.__pageConflict.append(varname)
+
+		elif varname == 'UNPAGEDCODE':
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDCODE_OPTION", "unpaged"))
+			self.__debug( "Set switch " + varname + " ON")
+			self.__pageConflict.append(varname)
+		elif varname == 'UNPAGEDDATA':
+			self.BuildVariant.AddOperation(raptor_data.Set("PAGEDDATA_OPTION", "unpaged"))
+			self.__debug( "Set switch " + varname + " ON")
+			self.__pageConflict.append(varname)
+
+		elif varname == 'NOLINKTIMECODEGENERATION':
+			self.BuildVariant.AddOperation(raptor_data.Set("LTCG",""))
+			self.__debug( "Set switch " + varname + " OFF")
+		elif varname == 'NOMULTIFILECOMPILATION':
+			self.BuildVariant.AddOperation(raptor_data.Set("MULTIFILE_ENABLED",""))
+			self.__debug( "Set switch " + varname + " OFF")
+
+		elif varname == 'DEBUGGABLE':
+			if self.__debuggable != "udeb":
+				self.__debuggable = "udeb urel"
+			else:
+				self.__Raptor.Warn("DEBUGGABLE keyword ignored as DEBUGGABLE_UDEBONLY is already specified")
+		elif varname == 'DEBUGGABLE_UDEBONLY':
+			if self.__debuggable != "":
+				self.__Raptor.Warn("DEBUGGABLE keyword has no effect as DEBUGGABLE or DEBUGGABLE_UDEBONLY is already set")
+			self.__debuggable = "udeb"
+		elif varname == 'FEATUREVARIANT':
+			self.BuildVariant.AddOperation(raptor_data.Set(varname,"1"))
+			self.featureVariant = True
+		elif varname in ['COMPRESSTARGET', 'NOCOMPRESSTARGET', 'INFLATECOMPRESSTARGET', 'BYTEPAIRCOMPRESSTARGET']:
+			if self.__compressionKeyword:
+				self.__Raptor.Warn("%s keyword in %s overrides earlier use of %s" % (varname, self.__currentMmpFile, self.__compressionKeyword))
+				self.BuildVariant.AddOperation(raptor_data.Set(self.__compressionKeyword,""))
+				self.__debug( "Set switch " + varname + " OFF")
+			self.BuildVariant.AddOperation(raptor_data.Set(varname,"1"))
+			self.__debug( "Set switch " + varname + " ON")
+			self.__compressionKeyword = varname
+		else:
+			self.__debug( "Set switch "+toks[0]+" ON")
+			self.BuildVariant.AddOperation(raptor_data.Set(prefix+varname, "1"))
+
+		return "OK"
+
+	def doAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		varname = toks[0].upper()
+		if varname=='TARGET':
+			(self.__TARGET, self.__TARGETEXT) = os.path.splitext(toks[1])
+			self.__TARGETEXT = self.__TARGETEXT.lstrip('.')
+
+			self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", self.__TARGETEXT.lower()))
+
+			lowercase_TARGET = self.__TARGET.lower()
+			self.__debug("Set "+toks[0]+" to " + lowercase_TARGET)
+			self.__debug("Set REQUESTEDTARGETEXT to " + self.__TARGETEXT.lower())
+
+			self.BuildVariant.AddOperation(raptor_data.Set("TARGET", self.__TARGET))
+			self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower", lowercase_TARGET))
+			if  lowercase_TARGET !=  self.__TARGET:
+				self.__debug("TARGET is not lowercase: '%s' - might cause BC problems." % self.__TARGET)
+		elif varname=='TARGETTYPE':
+			self.__debug("Set "+toks[0]+" to " + str(toks[1]))
+			self.__targettype=toks[1]
+			if  self.__targettype.lower() == "none":
+				self.BuildVariant.AddOperation(raptor_data.Set("TARGET", ""))
+				self.BuildVariant.AddOperation(raptor_data.Set("TARGET_lower",""))
+				self.BuildVariant.AddOperation(raptor_data.Set("REQUESTEDTARGETEXT", ""))
+			self.BuildVariant.AddOperation(raptor_data.Set(varname,toks[1].lower()))
+
+		elif varname=='TARGETPATH':
+			value = toks[1].lower().replace('\\','/')
+			self.__debug("Set "+varname+" to " + value)
+			self.BuildVariant.AddOperation(raptor_data.Set(varname, value))
+
+		elif varname=='OPTION' or varname=='LINKEROPTION':
+			self.__debug("Set "+toks[1]+varname+" to " + str(toks[2]))
+			self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper()," ".join(toks[2])))
+
+			# Warn about OPTION ARMASM
+			if "armasm" in toks[1].lower():
+				self.__Raptor.Warn(varname+" ARMASM has no effect (use OPTION ARMCC).")
+
+		elif varname=='OPTION_REPLACE':
+			# Warn about OPTION_REPLACE ARMASM
+			if "armasm" in toks[1].lower():
+				self.__Raptor.Warn("OPTION_REPLACE ARMASM has no effect (use OPTION_REPLACE ARMCC).")
+			else:
+				args = " ".join(toks[2])
+
+				searchReplacePairs = self.resolveOptionReplace(args)
+
+				for searchReplacePair in searchReplacePairs:
+					self.__debug("Append %s to OPTION_REPLACE_%s", searchReplacePair, toks[1].upper())
+					self.BuildVariant.AddOperation(raptor_data.Append(varname+"_"+toks[1].upper(),searchReplacePair))
+
+		elif varname=='SYSTEMINCLUDE' or varname=='USERINCLUDE':
+			for path in toks[1]:
+				resolved = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, path)
+				self.BuildVariant.AddOperation(raptor_data.Append(varname,resolved))
+
+				if varname=='SYSTEMINCLUDE':
+					self.__systeminclude += ' ' + resolved
+					self.__debug("  %s = %s",varname, self.__systeminclude)
+				else:
+					self.__userinclude += ' ' + resolved
+					self.__debug("  %s = %s",varname, self.__userinclude)
+
+				self.__debug("Appending %s to %s",resolved, varname)
+
+			self.__systeminclude = self.__systeminclude.strip()
+			self.__systeminclude = self.__systeminclude.rstrip('\/')
+			self.__userinclude = self.__userinclude.strip()
+			self.__userinclude = self.__userinclude.rstrip('\/')
+
+		elif varname=='EXPORTLIBRARY':
+			# Remove extension from the EXPORTLIBRARY name
+			libName = toks[1].rsplit(".", 1)[0]
+			self.__debug("Set "+varname+" to " + libName)
+			self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(libName)))
+
+		elif varname=='CAPABILITY':
+			for cap in toks[1]:
+				self.__debug("Setting  "+toks[0]+": " + cap)
+				self.capabilities.append(cap)
+		elif varname=='DEFFILE':
+			self.__defFileRoot = self.__currentMmpFile
+			self.deffile = toks[1]
+		elif varname=='LINKAS':
+			self.__debug("Set "+toks[0]+"  OPTION to " + str(toks[1]))
+			self.__LINKAS = toks[1]
+			self.BuildVariant.AddOperation(raptor_data.Set(varname, toks[1]))
+		elif varname=='SECUREID' or varname=='VENDORID':
+			hexoutput = MMPRaptorBackend.canonicalUID(toks[1])
+			self.__debug("Set "+toks[0]+"  OPTION to " + hexoutput)
+			self.BuildVariant.AddOperation(raptor_data.Set(varname, hexoutput))
+		elif varname=='VERSION':
+			if toks[-1] == "EXPLICIT":
+				self.__explicitversion = True
+				self.BuildVariant.AddOperation(raptor_data.Set("EXPLICITVERSION", "1"))
+
+			vm = re.match(r'^(\d+)(\.(\d+))?$', toks[1])
+			if vm is not None:
+				version = vm.groups()
+				# the major version number
+				major = int(version[0],10)
+
+				# add in the minor number
+				minor = 0
+				if version[1] is not None:
+					minor = int(version[2],10)
+				else:
+					self.__Raptor.Warn("VERSION (%s) missing '.minor' in %s, using '.0'" % (toks[1],self.__currentMmpFile))
+
+				self.__versionhex = "%04x%04x" % (major, minor)
+				self.BuildVariant.AddOperation(raptor_data.Set(varname, "%d.%d" %(major, minor)))
+				self.BuildVariant.AddOperation(raptor_data.Set(varname+"HEX", self.__versionhex))
+				self.__debug("Set "+toks[0]+"  OPTION to " + toks[1])
+				self.__debug("Set "+toks[0]+"HEX OPTION to " + "%04x%04x" % (major,minor))
+
+			else:
+				self.__Raptor.Warn("Invalid version supplied to VERSION (%s), using default value" % toks[1])
+
+		elif varname=='EPOCHEAPSIZE':
+			# Standardise on sending hex numbers to the FLMS.
+
+			if toks[1].lower().startswith('0x'):
+				min = long(toks[1],16)
+			else:
+				min = long(toks[1],10)
+
+			if toks[2].lower().startswith('0x'):
+				max = long(toks[2],16)
+			else:
+				max = long(toks[2],10)
+
+			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN", "%x" % min))
+			self.__debug("Set "+varname+"MIN  OPTION to '%x' (hex)" % min )
+			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX", "%x" % max))
+			self.__debug("Set "+varname+"MAX  OPTION to '%x' (hex)" % max )
+
+			# Some toolchains require decimal versions of the min/max values, converted to KB and
+			# rounded up to the next 1KB boundary
+			min_dec_kb = (int(min) + 1023) / 1024
+			max_dec_kb = (int(max) + 1023) / 1024
+			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MIN_DEC_KB", "%d" % min_dec_kb))
+			self.__debug("Set "+varname+"MIN  OPTION KB to '%d' (dec)" % min_dec_kb )
+			self.BuildVariant.AddOperation(raptor_data.Set(varname+"MAX_DEC_KB", "%d" % max_dec_kb))
+			self.__debug("Set "+varname+"MAX  OPTION KB to '%d' (dec)" % max_dec_kb )
+
+		elif varname=='EPOCSTACKSIZE':
+			if toks[1].lower().startswith('0x'):
+				stack = long(toks[1],16)
+			else:
+				stack = long(toks[1],10)
+			self.BuildVariant.AddOperation(raptor_data.Set(varname, "%x" % stack))
+			self.__debug("Set "+varname+"  OPTION to '%x' (hex)" % stack  )
+		elif varname=='EPOCPROCESSPRIORITY':
+			# low, background, foreground, high, windowserver, fileserver, realtimeserver or supervisor
+			# These are case insensitive in metadata entries, but must be mapped to a static case pattern for use
+			prio = toks[1].lower()
+
+			# NOTE: Original validation here didn't actually work.  This has been corrected to provide an error, but probably needs re-examination.
+			if not MMPRaptorBackend.epoc32priorities.has_key(prio):
+				self.__Raptor.Error("Priority setting '%s' is not a valid priority - should be one of %s.", prio, MMPRaptorBackend.epoc32priorities.values())
+			else:
+				self.__debug("Set "+toks[0]+" to " +  MMPRaptorBackend.epoc32priorities[prio])
+				self.BuildVariant.AddOperation(raptor_data.Set(varname,MMPRaptorBackend.epoc32priorities[prio]))
+		elif varname=='ROMTARGET' or varname=='RAMTARGET':
+			if len(toks) == 1:
+				self.__debug("Set "+toks[0]+" to <none>" )
+				self.BuildVariant.AddOperation(raptor_data.Set(varname,"<none>"))
+			else:
+				toks1 = str(toks[1]).replace("\\","/")
+				if toks1.find(","):
+					toks1 = re.sub("[,'\[\]]", "", toks1).replace("//","/")
+				self.__debug("Set "+toks[0]+" to " + toks1)
+				self.BuildVariant.AddOperation(raptor_data.Set(varname,toks1))
+		elif varname=='APPLY':
+			self.ApplyVariants.append(toks[1])
+		else:
+			self.__debug("Set "+toks[0]+" to " + str(toks[1]))
+			self.BuildVariant.AddOperation(raptor_data.Set(varname,"".join(toks[1])))
+
+			if varname=='LINKAS':
+				self.__LINKAS = toks[1]
+
+		return "OK"
+
+	def doAppend(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		"""MMP command
+		"""
+		name=toks[0].upper()
+		if len(toks) == 1:
+			# list can be empty e.g. MACRO _FRED_ when fred it defined in the HRH
+			# causes us to see just "MACRO" in the input - it is valid to ignore this
+			self.__debug("Empty append list for " + name)
+			return "OK"
+		self.__debug("Append to "+name+" the values: " +str(toks[1]))
+
+		if name=='MACRO':
+			name='MMPDEFS'
+		elif name=='LANG':
+			# don't break the environment variable
+			name='LANGUAGES'
+
+		for item in toks[1]:
+			if name=='MMPDEFS':
+				# Unquote any macros since the FLM does it anyhow
+				if item.startswith('"') and item.endswith('"') \
+				or item.startswith("'") and item.endswith("'"):
+					item = item.strip("'\"")
+			if name=='LIBRARY' or name=='DEBUGLIBRARY':
+				im = MMPRaptorBackend.library_re.match(item)
+				if not im:
+					self.__error("LIBRARY: %s Seems to have an invalid name.\nExpected xxxx.lib or xxxx.dso\n where xxxx might be\n\tname or \n\tname(n,m) where n is a major version number and m is a minor version number\n" %item)
+				d = im.groupdict()
+
+				item = d['name']
+				if d['version'] is not None:
+					item += "{%04x%04x}" % (int(d['major']), int(d['minor']))
+				item += ".dso"
+			elif name=='STATICLIBRARY':
+				# the FLM will decide on the ending appropriate to the platform
+				item = re.sub(r"^(.*)\.[Ll][Ii][Bb]$",r"\1", item)
+			elif name=="LANGUAGES":
+				item = item.lower()
+			elif (name=="WIN32_LIBRARY" and (item.startswith(".") or re.search(r'[\\|/]',item))) \
+				or (name=="WIN32_RESOURCE"):
+				# Relatively pathed win32 libraries, and all win32 resources, are resolved in relation
+				# to the wrapper bld.inf file in which their .mmp file is specified.  This equates to
+				# the current working directory in ABLD operation.
+				item = raptor_utilities.resolveSymbianPath(self.__bldInfFilename, item)
+				
+			self.BuildVariant.AddOperation(raptor_data.Append(name,item," "))
+			
+			# maintain a debug library list, the same as LIBRARY but with DEBUGLIBRARY values
+			# appended as they are encountered
+			if name=='LIBRARY' or name=='DEBUGLIBRARY':
+				self.BuildVariant.AddOperation(raptor_data.Append("LIBRARY_DEBUG",item," "))			
+
+		return "OK"
+
+	def canonicalUID(number):
+		""" convert a UID string into an 8 digit hexadecimal string without leading 0x """
+		if number.lower().startswith("0x"):
+			n = int(number,16)
+		else:
+			n = int(number,10)
+
+		return "%08x" % n
+
+	canonicalUID = staticmethod(canonicalUID)
+
+	def doUIDAssignment(self,s,loc,toks):
+		"""A single UID command results in a number of spec variables"""
+		self.__currentLineNumber += 1
+
+		hexoutput = MMPRaptorBackend.canonicalUID(toks[1][0])
+		self.__debug( "Set UID2 to %s" % hexoutput )
+		self.BuildVariant.AddOperation(raptor_data.Set("UID2", hexoutput))
+
+		if len(toks[1]) > 1:
+			hexoutput = MMPRaptorBackend.canonicalUID(toks[1][1])
+			self.__debug( "Set UID3 to %s" % hexoutput)
+			self.BuildVariant.AddOperation(raptor_data.Set("UID3", hexoutput))
+
+		self.__debug( "done set UID")
+		return "OK"
+
+	def doSourcePathAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__sourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, toks[1])
+		self.__debug( "Remembering self.sourcepath state:  "+str(toks[0])+" is now " + self.__sourcepath)
+		self.__debug("selfcurrentMmpFile: " + self.__currentMmpFile)
+		return "OK"
+
+
+	def doSourceAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Setting "+toks[0]+" to " + str(toks[1]))
+		for file in toks[1]:
+			# file is always relative to sourcepath but some MMP files
+			# have items that begin with a slash...
+			file = file.lstrip("/")
+			source = generic_path.Join(self.__sourcepath, file)
+
+			# If the SOURCEPATH itself begins with a '/', then dont look up the caseless version, since
+			# we don't know at this time what $(EPOCROOT) will evaluate to.
+			if source.GetLocalString().startswith('$(EPOCROOT)'):
+				self.sources.append(str(source))	
+				self.__debug("Append SOURCE " + str(source))
+
+			else:
+				foundsource = source.FindCaseless()
+				if foundsource == None:
+					# Hope that the file will be generated later
+					self.__debug("Sourcefile not found: %s" % source)
+					foundsource = source
+
+				self.sources.append(str(foundsource))	
+				self.__debug("Append SOURCE " + str(foundsource))
+
+
+		self.__debug("		sourcepath: " + self.__sourcepath)
+		return "OK"
+
+	# Resource
+
+	def doOldResourceAssignment(self,s,loc,toks):
+		# Technically deprecated, but still used, so...
+		self.__currentLineNumber += 1
+		self.__debug("Processing old-style "+toks[0]+" "+str(toks[1]))
+
+		sysRes = (toks[0].lower() == "systemresource")
+
+		for rss in toks[1]:
+			variant = raptor_data.Variant()
+
+			source = generic_path.Join(self.__sourcepath, rss)
+			variant.AddOperation(raptor_data.Set("SOURCE", str(source)))
+			self.__resourceFiles.append(str(source))
+
+			target = source.File().rsplit(".", 1)[0]	# remove the extension
+			variant.AddOperation(raptor_data.Set("TARGET", target))
+			variant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
+
+			header = target.lower() + ".rsg"			# filename policy
+			variant.AddOperation(raptor_data.Set("HEADER", header))
+
+			if sysRes:
+				dsrtp = self.getDefaultSystemResourceTargetPath()
+				variant.AddOperation(raptor_data.Set("TARGETPATH", dsrtp))
+
+			self.ResourceVariants.append(variant)
+
+		return "OK"
+
+	def getDefaultSystemResourceTargetPath(self):
+		# the default systemresource TARGETPATH value should come from the
+		# configuration rather than being hard-coded here. Then again, this
+		# should really be deprecated away into oblivion...
+		return "system/data"
+
+
+	def getDefaultResourceTargetPath(self, targettype):
+		# the different default TARGETPATH values should come from the
+		# configuration rather than being hard-coded here.
+		if targettype == "plugin":
+			return "resource/plugins"
+		if targettype == "pdl":
+			return "resource/printers"
+		return ""
+
+	def resolveOptionReplace(self, content):
+		"""
+		Constructs search/replace pairs based on .mmp OPTION_REPLACE entries for use on tool command lines
+		within FLMS.
+
+		Depending on what's supplied to OPTION_REPLACE <TOOL>, the core part of the <TOOL> command line
+		in the relevant FLM will have search and replace actions performed on it post-expansion (but pre-
+		any OPTION <TOOL> additions).
+
+		In terms of logic, we try to follow what ABLD does, as the current behaviour is undocumented.
+		What happens is a little inconsistent, and best described by some generic examples:
+
+			OPTION_REPLACE TOOL existing_option replacement_value
+
+				Replace all instances of "option existing_value" with "option replacement_value"
+
+			OPTION_REPLACE TOOL existing_option replacement_option
+
+				Replace all instances of "existing_option" with "replacement_option".
+
+			If "existing_option" is present in isolation then a removal is performed.
+
+		Any values encountered that don't follow an option are ignored.
+		Options are identified as being prefixed with either '-' or '--'.
+
+		The front-end processes each OPTION_REPLACE entry and then appends one or more search/replace pairs
+		to an OPTION_REPLACE_<TOOL> variable in the following format:
+
+		     search<->replace
+		"""
+		# Note that, for compatibility reasons, the following is mostly a port to Python of the corresponding
+		# ABLD Perl, and hence maintains ABLD's idiosyncrasies in what it achieves
+
+		searchReplacePairs = []
+		matches = re.findall("-{1,2}\S+\s*(?!-)\S*",content)
+
+		if matches:
+			# reverse so we can process as a stack whilst retaining original order
+			matches.reverse()
+
+			while (len(matches)):
+				match = matches.pop()
+
+				standaloneMatch = re.match('^(?P<option>\S+)\s+(?P<value>\S+)$', match)
+
+				if (standaloneMatch):
+					# Option listed standalone with a replacement value
+					# Example:
+					# 	OPTION_REPLACE ARMCC --cpu 6
+					# Intention:
+					# 	Replace instances of  "--cpu <something>" with "--cpu 6"
+
+					# Substitute any existing "option <existing_value>" instances with a single word
+					# "@@<existing_value>" for later replacement
+					searchReplacePairs.append('%s <->@@' % standaloneMatch.group('option'))
+
+					# Replace "@@<existing_value>" entries from above with "option <new_value>" entries
+					# A pattern substitution is used to cover pre-existing values
+					searchReplacePairs.append('@@%%<->%s %s' % (standaloneMatch.group('option'), standaloneMatch.group('value')))
+				else:
+					# Options specified in search/replace pairs with optional values
+					# Example:
+					#	OPTION_REPLACE ARMCC --O2 --O3
+					# Intention:
+					#	Replace instances of "--O2" with "--O3"
+
+					# At this point we will be looking at just the search option - there may or may not
+					# be a replacement to consider
+					search = match
+					replace = ""
+					if len(matches):
+						replace = matches.pop()
+
+					searchReplacePairs.append('%s<->%s' % (search, replace))
+
+			# Replace spaces to maintain word-based grouping in downstream makefile lists
+			for i in range(0,len(searchReplacePairs)):
+				searchReplacePairs[i] = searchReplacePairs[i].replace(' ','%20')
+
+		return searchReplacePairs
+
+	def doStartResource(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug("Start RESOURCE "+toks[1])
+
+		self.__current_resource = generic_path.Path(self.__sourcepath, toks[1])
+		self.__current_resource = str(self.__current_resource)
+
+		self.__debug("sourcepath: " + self.__sourcepath)
+		self.__debug("self.__current_resource source: " + toks[1])
+		self.__debug("adjusted self.__current_resource source=" + self.__current_resource)
+
+		self.__currentResourceVariant = raptor_data.Variant()
+		self.__currentResourceVariant.AddOperation(raptor_data.Set("SOURCE", self.__current_resource))
+		self.__resourceFiles.append(self.__current_resource)
+
+		# The target name is the basename of the resource without the extension
+		# e.g. "/fred/129ab34f.rss" would have a target name of "129ab34f"
+		target = self.__current_resource.rsplit("/",1)[-1]
+		target = target.rsplit(".",1)[0]
+		self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET", target))
+		self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
+		self.__headerspecified = False
+		self.__headeronlyspecified = False
+		self.__current_resource_header = target.lower() + ".rsg"
+
+		return "OK"
+
+	def doResourceAssignment(self,s,loc,toks):
+		""" Assign variables for resource files """
+		self.__currentLineNumber += 1
+		varname = toks[0].upper() # the mmp keyword
+		varvalue = "".join(toks[1])
+
+		# Get rid of any .rsc extension because the build system
+		# needs to have it stripped off to calculate other names
+		# for other purposes and # we aren't going to make it
+		# optional anyhow.
+		if varname == "TARGET":
+			target_withext = varvalue.rsplit("/\\",1)[-1]
+			target = target_withext.rsplit(".",1)[0]
+			self.__current_resource_header = target.lower() + ".rsg"
+			self.__currentResourceVariant.AddOperation(raptor_data.Set("TARGET_lower", target.lower()))
+			self.__debug("Set resource "+varname+" to " + target)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,target))
+		if varname == "TARGETPATH":
+			varvalue=varvalue.replace('\\','/')
+			self.__debug("Set resource "+varname+" to " + varvalue)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,varvalue))
+		else:
+			self.__debug("Set resource "+varname+" to " + varvalue)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set(varname,varvalue))
+		return "OK"
+
+	def doResourceAppend(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug("Append resource to "+toks[0]+" the values: " +str(toks[1]))
+		varname = toks[0].upper()
+
+		# we cannot use LANG as it interferes with the environment
+		if varname == "LANG":
+			varname = "LANGUAGES"
+
+		for item in toks[1]:
+			if varname == "LANGUAGES":
+				item = item.lower()
+			self.__currentResourceVariant.AddOperation(raptor_data.Append(varname,item))
+		return "OK"
+
+	def doResourceSetSwitch(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		name = toks[0].upper()
+
+		if name == "HEADER":
+			self.__headerspecified = True
+
+		elif name == "HEADERONLY":
+			self.__headeronlyspecified = True
+
+		else:
+			value = "1"
+			self.__debug( "Set resource switch " + name + " " + value)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set(name, value))
+
+		return "OK"
+
+	def doEndResource(self,s,loc,toks):
+		self.__currentLineNumber += 1
+
+		# Header name can change, depening if there was a TARGET defined or not, so it must be appended at the end
+		if self.__headerspecified:
+			self.__debug("Set resource switch HEADER " + self.__current_resource_header)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADER", self.__current_resource_header))
+
+		if self.__headeronlyspecified:
+			self.__debug("Set resource switch HEADERONLY " + self.__current_resource_header)
+			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADER", self.__current_resource_header))
+			self.__currentResourceVariant.AddOperation(raptor_data.Set("HEADERONLY", "True"))
+
+		self.__debug("End RESOURCE")
+		self.ResourceVariants.append(self.__currentResourceVariant)
+		self.__currentResourceVariant = None
+		self.__current_resource = ""
+		return "OK"
+
+	# Bitmap
+
+	def doStartBitmap(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug("Start BITMAP "+toks[1])
+
+		self.__currentBitmapVariant = raptor_data.Variant(name = toks[1].replace('.','_'))
+		# Use BMTARGET and BMTARGET_lower because that prevents
+		# confusion with the TARGET and TARGET_lower of our parent MMP
+		# when setting the OUTPUTPATH.  This in turn allows us to
+		# not get tripped up by multiple mbms being generated with
+		# the same name to the same directory.
+		self.__currentBitmapVariant.AddOperation(raptor_data.Set("BMTARGET", toks[1]))
+		self.__currentBitmapVariant.AddOperation(raptor_data.Set("BMTARGET_lower", toks[1].lower()))
+		self.__currentBitmapVariant.AddOperation(raptor_data.Set("SOURCE", ""))
+		return "OK"
+
+	def doBitmapAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug("Set bitmap "+toks[0]+" to " + str(toks[1]))
+		name = toks[0].upper()
+		value = "".join(toks[1])
+		if name == "TARGETPATH":
+			value = value.replace('\\','/')
+
+		self.__currentBitmapVariant.AddOperation(raptor_data.Set(name,value))
+		return "OK"
+
+	def doBitmapSourcePathAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug("Previous bitmap sourcepath:" + self.__bitmapSourcepath)
+		self.__bitmapSourcepath = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, toks[1])
+		self.__debug("New bitmap sourcepath: " + self.__bitmapSourcepath)
+
+	def doBitmapSourceAssignment(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Setting "+toks[0]+" to " + str(toks[1]))
+		# The first "source" is the colour depth for all the others.
+		# The depth format is b[,m] where b is the bitmap depth and m is
+		# the mask depth.
+		# Valid values for b are: 1 2 4 8 c4 c8 c12 c16 c24 c32 c32a (?)
+		# Valid values for m are: 1 8 (any number?)
+		#
+		# If m is specified then the bitmaps are in pairs: b0 m0 b1 m1...
+		# If m is not specified then there are no masks, just bitmaps: b0 b1...
+		colordepth = toks[1][0].lower()
+		if "," in colordepth:
+			(bitmapdepth, maskdepth) = colordepth.split(",")
+		else:
+			bitmapdepth = colordepth
+			maskdepth = 0
+
+		sources=""
+		mask = False
+		for file in toks[1][1:]:
+			path = generic_path.Join(self.__bitmapSourcepath, file)
+			if sources:
+				sources += " "
+			if mask:
+				sources += "DEPTH=" + maskdepth + " FILE=" + str(path)
+			else:
+				sources += "DEPTH=" + bitmapdepth + " FILE=" + str(path)
+			if maskdepth:
+				mask = not mask
+		self.__debug("sources: " + sources)
+		self.__currentBitmapVariant.AddOperation(raptor_data.Append("SOURCE", sources))
+		return "OK"
+
+	def doBitmapSetSwitch(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Set bitmap switch "+toks[0]+" ON")
+		self.__currentBitmapVariant.AddOperation(raptor_data.Set(toks[0].upper(), "1"))
+		return "OK"
+
+	def doEndBitmap(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__bitmapSourcepath = self.__sourcepath
+		self.BitmapVariants.append(self.__currentBitmapVariant)
+		self.__currentBitmapVariant = None
+		self.__debug("End BITMAP")
+		return "OK"
+
+	# Stringtable
+
+	def doStartStringTable(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		self.__debug( "Start STRINGTABLE "+toks[1])
+
+		specstringtable = generic_path.Join(self.__sourcepath, toks[1])
+		uniqname = specstringtable.File().replace('.','_') # corrected, filename only
+		source = str(specstringtable.FindCaseless())
+
+		self.__debug("sourcepath: " + self.__sourcepath)
+		self.__debug("stringtable: " + toks[1])
+		self.__debug("adjusted stringtable source=" + source)
+
+		self.__currentStringTableVariant = raptor_data.Variant(name = uniqname)
+		self.__currentStringTableVariant.AddOperation(raptor_data.Set("SOURCE", source))
+		self.__currentStringTableVariant.AddOperation(raptor_data.Set("EXPORTPATH", ""))
+		self.__stringtableExported = False
+
+		# The target name by default is the name of the stringtable without the extension
+		# e.g. the stringtable "/fred/http.st" would have a default target name of "http"
+		stringtable_withext = specstringtable.File()
+		self.__stringtable = stringtable_withext.rsplit(".",1)[0].lower()
+		self.__currentStringTableVariant.AddOperation(raptor_data.Set("TARGET", self.__stringtable))
+
+		self.__stringtableHeaderonlyspecified = False
+
+		return "OK"
+
+	def doStringTableAssignment(self,s,loc,toks):
+		""" Assign variables for stringtables """
+		self.__currentLineNumber += 1
+		varname = toks[0].upper() # the mmp keyword
+		varvalue = "".join(toks[1])
+
+		# Get rid of any .rsc extension because the build system
+		# needs to have it stripped off to calculate other names
+		# for other purposes and # we aren't going to make it
+		# optional anyhow.
+		if varname == "EXPORTPATH":
+			finalvalue = raptor_utilities.resolveSymbianPath(self.__currentMmpFile, varvalue)
+			self.__stringtableExported = True
+		else:
+			finalvalue = varvalue
+
+		self.__debug("Set stringtable "+varname+" to " + finalvalue)
+		self.__currentStringTableVariant.AddOperation(raptor_data.Set(varname,finalvalue))
+		return "OK"
+
+	def doStringTableSetSwitch(self,s,loc,toks):
+		self.__currentLineNumber += 1
+		if toks[0].upper()== "HEADERONLY":
+			self.__stringtableHeaderonlyspecified = True
+			self.__debug( "Set stringtable switch "+toks[0]+" ON")
+			self.__currentStringTableVariant.AddOperation(raptor_data.Set(toks[0].upper(), "1"))
+		return "OK"
+
+	def doEndStringTable(self,s,loc,toks):
+		self.__currentLineNumber += 1
+
+		if not self.__stringtableExported:
+			# There was no EXPORTPATH specified for this stringtable
+			# so for our other code to be able to reference it we
+			# must add the path of the generated location to the userinclude path
+
+			ipath = "$(OUTPUTPATH)"
+			self.BuildVariant.AddOperation(raptor_data.Append("USERINCLUDE",ipath))
+			self.__userinclude += ' ' + ipath
+			self.__debug("  USERINCLUDE = %s", self.__userinclude)
+			self.__userinclude.strip()
+
+		self.StringTableVariants.append(self.__currentStringTableVariant)
+		self.__currentStringTableVariant = None
+		self.__debug("End STRINGTABLE")
+		if not self.__stringtableHeaderonlyspecified:
+			# Have to assume that this is where the cpp file will be.  This has to be maintained
+			# in sync with the FLM's idea of where this file should be.  We need a better way.
+			# Interfaces also need outputs that allow other interfaces to refer to their outputs
+			# without having to "know" where they will be.
+			self.sources.append('$(OUTPUTPATH)/' + self.__stringtable + '.cpp')
+		return "OK"
+
+
+	def doUnknownStatement(self,s,loc,toks):
+		self.__warn("%s (%d) : Unrecognised Keyword %s", self.__currentMmpFile, self.__currentLineNumber, str(toks))
+		self.__currentLineNumber += 1
+		return "OK"
+
+
+	def doUnknownBlock(self,s,loc,toks):
+		self.__warn("%s (%d) : Unrecognised Block %s", self.__currentMmpFile, self.__currentLineNumber, str(toks))
+		self.__currentLineNumber += 1
+		return "OK"
+
+	def doDeprecated(self,s,loc,toks):
+		self.__debug( "Deprecated command " + str(toks))
+		self.__warn("%s (%d) : %s is deprecated .mmp file syntax", self.__currentMmpFile, self.__currentLineNumber, str(toks))
+		self.__currentLineNumber += 1
+		return "OK"
+
+	def doNothing(self):
+		self.__currentLineNumber += 1
+		return "OK"
+
+	def finalise(self, aBuildPlatform):
+		"""Post-processing of data that is only applicable in the context of a fully
+		processed .mmp file."""
+		resolvedDefFile = ""
+
+		if self.__TARGET:
+			defaultRootName = self.__TARGET
+			if self.__TARGETEXT!="":
+				defaultRootName += "." + self.__TARGETEXT
+
+			# NOTE: Changing default .def file name based on the LINKAS argument is actually
+			# a defect, but this follows the behaviour of the current build system.
+			if (self.__LINKAS):
+				defaultRootName = self.__LINKAS
+
+			resolvedDefFile = self.resolveDefFile(defaultRootName, aBuildPlatform)
+			self.__debug("Resolved def file:  %s" % resolvedDefFile )
+			# We need to store this resolved deffile location for the FREEZE target
+			self.BuildVariant.AddOperation(raptor_data.Set("RESOLVED_DEFFILE", resolvedDefFile))
+
+		# If a deffile is specified, an FLM will put in a dependency.
+		# If a deffile is specified then raptor_meta will guess a name but:
+		#	1) If the guess is wrong then the FLM will complain "no rule to make ..."
+		#	2) In some cases, e.g. plugin, 1) is not desirable as the presence of a def file
+		#		is not a necessity.  In these cases the FLM needs to know if DEFFILE
+		#		is a guess or not so it can decide if a dependency should be added.
+
+		# We check that the def file exists and that it is non-zero (incredible
+		# that this should be needed).
+
+		deffile_keyword="1"
+		if self.deffile == "":
+			# If the user didn't specify a deffile name then
+			# we must be guessing
+			# Let's check if our guess actually corresponds to a
+			# real file.  If it does then that confims the guess.
+			#  If there's no file then we still need to pass make the name
+			# so it can complain about there not being a DEF file
+			# for this particular target type and fail to build this target.
+
+			deffile_keyword=""
+			try:
+				findpath = generic_path.Path(resolvedDefFile)
+				foundfile = findpath.FindCaseless()
+
+				if foundfile == None:
+					raise IOError("file not found")
+
+				self.__debug("Found DEFFILE  " + foundfile.GetLocalString())
+				rfstat = os.stat(foundfile.GetLocalString())
+
+				mode = rfstat[stat.ST_MODE]
+				if mode != None and stat.S_ISREG(mode) and rfstat[stat.ST_SIZE] > 0:
+					resolvedDefFile = str(foundfile)
+				else:
+					resolvedDefFile=""
+			except Exception,e:
+				self.__debug("While Searching for an IMPLIED  DEFFILE: %s: %s" % (str(e),str(findpath)) )
+				resolvedDefFile=""
+		else:
+			if not resolvedDefFile == "":
+				try:
+					findpath = generic_path.Path(resolvedDefFile)
+					resolvedDefFile = str(findpath.FindCaseless())
+					if resolvedDefFile=="None":
+						raise IOError("file not found")
+				except Exception,e:
+					self.__warn("While Searching for a SPECIFIED DEFFILE: %s: %s" % (str(e),str(findpath)) )
+					resolvedDefFile=""
+			else:
+				self.__warn("DEFFILE KEYWORD used (%s) but def file not resolved" % (self.deffile) )
+
+
+		self.BuildVariant.AddOperation(raptor_data.Set("DEFFILE", resolvedDefFile))
+		self.__debug("Set DEFFILE to " + resolvedDefFile)
+		self.BuildVariant.AddOperation(raptor_data.Set("DEFFILEKEYWORD", deffile_keyword))
+		self.__debug("Set DEFFILEKEYWORD to '%s'",deffile_keyword)
+
+		# if this target type has a default TARGETPATH other than "" for
+		# resources then we need to add that default to all resources which
+		# do not explicitly set the TARGETPATH themselves.
+		tp = self.getDefaultResourceTargetPath(self.getTargetType())
+		if tp:
+			for i,var in enumerate(self.ResourceVariants):
+				# does this resource specify its own TARGETPATH?
+				needTP = True
+				for op in var.ops:
+					if isinstance(op, raptor_data.Set) \
+					and op.name == "TARGETPATH":
+						needTP = False
+						break
+				if needTP:
+					self.ResourceVariants[i].AddOperation(raptor_data.Set("TARGETPATH", tp))
+
+		# some core build configurations need to know about the resource builds, and
+		# some resource building configurations need knowledge of the core build
+		for resourceFile in self.__resourceFiles:
+			self.BuildVariant.AddOperation(raptor_data.Append("RESOURCEFILES", resourceFile))
+
+		for i,var in enumerate(self.ResourceVariants):
+			self.ResourceVariants[i].AddOperation(raptor_data.Set("MAIN_TARGET_lower", self.__TARGET.lower()))
+			self.ResourceVariants[i].AddOperation(raptor_data.Set("MAIN_REQUESTEDTARGETEXT", self.__TARGETEXT.lower()))
+
+		# Create Capability variable in one SET operation (more efficient than multiple appends)
+		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITY"," ".join(self.capabilities)))
+
+		# Resolve combined capabilities as hex flags, for configurations that require them
+		capabilityFlag1 = 0
+		capabilityFlag2 = 0			# Always 0
+
+		for capability in [c.lower() for c in self.capabilities]:
+			invert = 0
+
+			if capability.startswith('-'):
+				invert = 0xffffffff
+				capability = capability.lstrip('-')
+
+			if MMPRaptorBackend.supportedCapabilities.has_key(capability):
+				capabilityFlag1 = capabilityFlag1 ^ invert
+				capabilityFlag1 = capabilityFlag1 | MMPRaptorBackend.supportedCapabilities[capability]
+				capabilityFlag1 = capabilityFlag1 ^ invert
+
+		capabilityFlag1 = "%08xu" % capabilityFlag1
+		capabilityFlag2 = "%08xu" % capabilityFlag2
+
+		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITYFLAG1", capabilityFlag1))
+		self.__debug ("Set CAPABILITYFLAG1 to " + capabilityFlag1)
+		self.BuildVariant.AddOperation(raptor_data.Set("CAPABILITYFLAG2", capabilityFlag2))
+		self.__debug ("Set CAPABILITYFLAG2 to " + capabilityFlag2)
+
+		# For non-Feature Variant builds, the location of the product include hrh file is
+		# appended to the SYSTEMINCLUDE list
+		if not aBuildPlatform['ISFEATUREVARIANT']:
+			productIncludePath = str(aBuildPlatform['VARIANT_HRH'].Dir())
+			self.BuildVariant.AddOperation(raptor_data.Append("SYSTEMINCLUDE",productIncludePath))
+			self.__debug("Appending product include location %s to SYSTEMINCLUDE",productIncludePath)
+
+		# Specifying both a PAGED* and its opposite UNPAGED* keyword in a .mmp file
+		# will generate a warning and the last keyword specified will take effect.
+		self.__pageConflict.reverse()
+		if "PAGEDCODE" in self.__pageConflict and "UNPAGEDCODE" in self.__pageConflict:
+			for x in self.__pageConflict:
+				if x == "PAGEDCODE" or x == "UNPAGEDCODE":
+					self.__Raptor.Warn("Both PAGEDCODE and UNPAGEDCODE are specified. The last one %s will take effect" % x)
+					break
+		if "PAGEDDATA" in self.__pageConflict and "UNPAGEDDATA" in self.__pageConflict:
+			for x in self.__pageConflict:
+				if x == "PAGEDDATA" or x == "UNPAGEDDATA":
+					self.__Raptor.Warn("Both PAGEDDATA and UNPAGEDDATA are specified. The last one %s will take effect" % x)
+					break
+
+		# Set Debuggable
+		self.BuildVariant.AddOperation(raptor_data.Set("DEBUGGABLE", self.__debuggable))
+
+		if self.__explicitversion:
+			self.BuildVariant.AddOperation(raptor_data.Append("UNIQUETARGETPATH","$(TARGET_lower)_$(VERSIONHEX)_$(REQUESTEDTARGETEXT)",'/'))
+		else:
+			self.BuildVariant.AddOperation(raptor_data.Append("UNIQUETARGETPATH","$(TARGET_lower)_$(REQUESTEDTARGETEXT)",'/'))
+
+		# Put the list of sourcefiles in with one Set operation - saves memory
+		# and performance over using multiple Append operations.
+		self.BuildVariant.AddOperation(raptor_data.Set("SOURCE",
+						   " ".join(self.sources)))
+
+	def getTargetType(self):
+		"""Target type in lower case - the standard format"""
+		return self.__targettype.lower()
+
+	def resolveDefFile(self, aTARGET, aBuildPlatform):
+		"""Returns a fully resolved DEFFILE entry depending on .mmp file location and TARGET, DEFFILE and NOSTRICTDEF
+		entries in the .mmp file itself (where appropriate).
+		Is able to deal with target names that have multiple '.' characters e.g. messageintercept.esockdebug.dll
+		"""
+
+		resolvedDefFile = ""
+		platform = aBuildPlatform['PLATFORM']
+
+		# Not having a default .def file directory is a pretty strong indicator that
+		# .def files aren't supported for the particular platform
+		if PlatformDefaultDefFileDir.has_key(platform):
+			(targetname,targetext) = os.path.splitext(aTARGET)
+			(defname,defext) = os.path.splitext(self.deffile)
+			if defext=="":
+				defext = ".def"
+
+			# NOTE: WORKAROUND
+			if len(targetext) > 4:
+				targetname += defext
+
+			if not self.deffile:
+				resolvedDefFile = targetname
+			else:
+				if re.search('[\\|\/]$', self.deffile):
+					# If DEFFILE is *solely* a path, signified by ending in a slash, then TARGET is the
+					# basis for the default .def filename but with the specified path as prefix
+					resolvedDefFile = self.deffile + targetname
+
+				else:
+					resolvedDefFile = defname
+
+				resolvedDefFile = resolvedDefFile.replace('~', PlatformDefaultDefFileDir[platform])
+
+			if resolvedDefFile:
+				if not self.nostrictdef:
+					resolvedDefFile += 'u'
+
+				if self.__explicitversion:
+					resolvedDefFile += '{' + self.__versionhex + '}'
+
+				resolvedDefFile += defext
+
+
+				# If a DEFFILE statement doesn't specify a path in any shape or form, prepend the default .def file
+				# location based on the platform being built
+				if not re.search('[\\\/]+', self.deffile):
+					resolvedDefFile = '../'+PlatformDefaultDefFileDir[platform]+'/'+resolvedDefFile
+
+				resolvedDefFile = raptor_utilities.resolveSymbianPath(self.__defFileRoot, resolvedDefFile, 'DEFFILE', "", str(aBuildPlatform['EPOCROOT']))
+
+		return resolvedDefFile
+
+
+def CheckedGet(self, key, default = None):
+	"""extract a value from an self and raise an exception if None.
+
+	An optional default can be set to replace a None value.
+
+	This function belongs in the Evaluator class logically. But
+	Evaluator doesn't know how to raise a Metadata error. Since
+	being able to raise a metadata error is the whole point of
+	the method, it makes sense to adapt the Evaluator class from
+	raptor_meta for the use of everything inside raptor_meta.
+
+	... so it will be added to the Evaluator class.
+	"""
+
+	value = self.Get(key)
+	if value == None:
+		if default == None:
+			raise MetaDataError("configuration " + self.buildUnit.name +
+							    " has no variable " + key)
+		else:
+			return default
+	return value
+
+raptor_data.Evaluator.CheckedGet = CheckedGet 
+
+
+class MetaReader(object):
+	"""Entry point class for Symbian metadata processing.
+
+	Provides a means of integrating "traditional" Symbian metadata processing
+	with the new Raptor build system."""
+
+	filesplit_re = re.compile(r"^(?P<name>.*)\.(?P<ext>[^\.]*)$")
+
+	def __init__(self, aRaptor, configsToBuild):
+		self.__Raptor = aRaptor
+		self.BuildPlatforms = []
+		self.ExportPlatforms = []
+
+		# Get the version of CPP that we are using
+		metadata = self.__Raptor.cache.FindNamedVariant("meta")
+		evaluator = self.__Raptor.GetEvaluator(None, raptor_data.BuildUnit(metadata.name, [metadata]) )
+		self.__gnucpp = evaluator.CheckedGet("GNUCPP")
+		self.__defaultplatforms = evaluator.CheckedGet("DEFAULT_PLATFORMS")
+		self.__basedefaultplatforms = evaluator.CheckedGet("BASE_DEFAULT_PLATFORMS")
+		self.__baseuserdefaultplatforms = evaluator.CheckedGet("BASE_USER_DEFAULT_PLATFORMS")
+
+		# Only read each variant.cfg once
+		variantCfgs = {}
+
+		# Group the list of configurations into "build platforms".
+		# A build platform is a set of configurations which share
+		# the same metadata. In other words, a set of configurations
+		# for which the bld.inf and MMP files pre-process to exactly
+		# the same text.
+		platforms = {}
+
+		# Exports are not "platform dependent" but they are configuration
+		# dependent because different configs can have different EPOCROOT
+		# and VARIANT_HRH values. Each "build platform" has one associated
+		# "export platform" but several "build platforms" can be associated
+		# with the same "export platform".
+		exports = {}
+
+		self.__Raptor.Debug("MetaReader: configsToBuild:  %s", [b.name for b in configsToBuild])
+		for buildConfig in configsToBuild:
+			# get everything we need to know about the configuration
+			evaluator = self.__Raptor.GetEvaluator(None, buildConfig)
+
+			detail = {}
+			detail['PLATFORM'] = evaluator.CheckedGet("TRADITIONAL_PLATFORM")
+			epocroot = evaluator.CheckedGet("EPOCROOT")
+			detail['EPOCROOT'] = generic_path.Path(epocroot)
+
+			sbs_build_dir = evaluator.CheckedGet("SBS_BUILD_DIR")
+			detail['SBS_BUILD_DIR'] = generic_path.Path(sbs_build_dir)
+			flm_export_dir = evaluator.CheckedGet("FLM_EXPORT_DIR")
+			detail['FLM_EXPORT_DIR'] = generic_path.Path(flm_export_dir)
+			detail['CACHEID'] = flm_export_dir
+			if raptor_utilities.getOSPlatform().startswith("win"):
+				detail['PLATMACROS'] = evaluator.CheckedGet("PLATMACROS.WINDOWS")
+			else:
+				detail['PLATMACROS'] = evaluator.CheckedGet("PLATMACROS.LINUX")
+
+			# Apply OS variant provided we are not ignoring this
+			if not self.__Raptor.ignoreOsDetection:
+				self.__Raptor.Debug("Automatic OS detection enabled.")
+				self.ApplyOSVariant(buildConfig, epocroot)
+			else: # We are ignore OS versions so no detection required, so no variant will be applied
+				self.__Raptor.Debug("Automatic OS detection disabled.")
+
+			# is this a feature variant config or an ordinary variant
+			fv = evaluator.Get("FEATUREVARIANTNAME")
+			if fv:
+				variantHdr = evaluator.CheckedGet("VARIANT_HRH")
+				variantHRH = generic_path.Path(variantHdr)
+				detail['ISFEATUREVARIANT'] = True
+			else:
+				variantCfg = evaluator.CheckedGet("VARIANT_CFG")
+				variantCfg = generic_path.Path(variantCfg)
+				if not variantCfg in variantCfgs:
+					# get VARIANT_HRH from the variant.cfg file
+					varCfg = getVariantCfgDetail(detail['EPOCROOT'], variantCfg)
+					variantCfgs[variantCfg] = varCfg['VARIANT_HRH']
+					# we expect to always build ABIv2
+					if not 'ENABLE_ABIV2_MODE' in varCfg:
+						self.__Raptor.Warn("missing flag ENABLE_ABIV2_MODE in %s file. ABIV1 builds are not supported.",
+										   str(variantCfg))
+				variantHRH = variantCfgs[variantCfg]
+				detail['ISFEATUREVARIANT'] = False
+
+			detail['VARIANT_HRH'] = variantHRH
+			self.__Raptor.Info("'%s' uses variant hrh file '%s'", buildConfig.name, variantHRH)
+			detail['SYSTEMINCLUDE'] = evaluator.CheckedGet("SYSTEMINCLUDE")
+
+
+			# find all the interface names we need
+			ifaceTypes = evaluator.CheckedGet("INTERFACE_TYPES")
+			interfaces = ifaceTypes.split()
+
+			for iface in interfaces:
+				detail[iface] = evaluator.CheckedGet("INTERFACE." + iface)
+
+			# not test code unless positively specified
+			detail['TESTCODE'] = evaluator.CheckedGet("TESTCODE", "")
+
+			# make a key that identifies this platform uniquely
+			# - used to tell us whether we have done the pre-processing
+			# we need already using another platform with compatible values.
+
+			key = str(detail['VARIANT_HRH']) \
+			 	+ str(detail['EPOCROOT']) \
+		    	+ detail['SYSTEMINCLUDE'] \
+		    	+ detail['PLATFORM']
+
+		    # Keep a short version of the key for use in filenames.
+			uniq = hashlib.md5()
+			uniq.update(key)
+
+			detail['key'] = key
+			detail['key_md5'] = "p_" + uniq.hexdigest()
+			del uniq
+
+			# compare this configuration to the ones we have already seen
+
+			# Is this an unseen export platform?
+			# concatenate all the values we care about in a fixed order
+			# and use that as a signature for the exports.
+			items = ['EPOCROOT', 'VARIANT_HRH', 'SYSTEMINCLUDE', 'TESTCODE', 'export']
+			export = ""
+			for i in  items:
+				if i in detail:
+					export += i + str(detail[i])
+
+			if export in exports:
+				# add this configuration to an existing export platform
+				index = exports[export]
+				self.ExportPlatforms[index]['configs'].append(buildConfig)
+			else:
+				# create a new export platform with this configuration
+				exports[export] = len(self.ExportPlatforms)
+				exp = copy.copy(detail)
+				exp['PLATFORM'] = 'EXPORT'
+				exp['configs']  = [buildConfig]
+				self.ExportPlatforms.append(exp)
+
+			# Is this an unseen build platform?
+			# concatenate all the values we care about in a fixed order
+			# and use that as a signature for the platform.
+			items = ['PLATFORM', 'EPOCROOT', 'VARIANT_HRH', 'SYSTEMINCLUDE', 'TESTCODE']
+			if raptor_utilities.getOSPlatform().startswith("win"):
+				items.append('PLATMACROS.WINDOWS')
+			else:
+				items.append('PLATMACROS.LINUX')
+
+			items.extend(interfaces)
+			platform = ""
+			for i in  items:
+				if i in detail:
+					platform += i + str(detail[i])
+
+			if platform in platforms:
+				# add this configuration to an existing build platform
+				index = platforms[platform]
+				self.BuildPlatforms[index]['configs'].append(buildConfig)
+			else:
+				# create a new build platform with this configuration
+				platforms[platform] = len(self.BuildPlatforms)
+				detail['configs'] = [buildConfig]
+				self.BuildPlatforms.append(detail)
+
+		# one platform is picked as the "default" for extracting things
+		# that are supposedly platform independent (e.g. PRJ_PLATFORMS)
+		self.defaultPlatform = self.ExportPlatforms[0]
+
+
+	def ReadBldInfFiles(self, aComponentList, doexport, dobuild = True):
+		"""Take a list of bld.inf files and return a list of build specs.
+
+		The returned specification nodes will be suitable for all the build
+		configurations under consideration (using Filter nodes where required).
+		"""
+
+		# we need a Filter node per export platform
+		exportNodes = []
+		for i,ep in enumerate(self.ExportPlatforms):
+			filter = raptor_data.Filter(name = "export_" + str(i))
+
+			# what configurations is this node active for?
+			for config in ep['configs']:
+				filter.AddConfigCondition(config.name)
+
+			exportNodes.append(filter)
+
+		# we need a Filter node per build platform
+		platformNodes = []
+		for i,bp in enumerate(self.BuildPlatforms):
+			filter = raptor_data.Filter(name = "build_" + str(i))
+
+			# what configurations is this node active for?
+			for config in bp['configs']:
+				filter.AddConfigCondition(config.name)
+
+			# platform-wide data
+			platformVar = raptor_data.Variant()
+			platformVar.AddOperation(raptor_data.Set("PRODUCT_INCLUDE",
+													 str(bp['VARIANT_HRH'])))
+
+			filter.AddVariant(platformVar)
+			platformNodes.append(filter)
+
+		# check that each bld.inf exists and add a Specification node for it
+		# to the nodes of the export and build platforms that it supports.
+		for c in aComponentList:
+			if c.bldinf_filename.isFile():
+				self.__Raptor.Info("Processing %s", str(c.bldinf_filename))
+				try:
+					self.AddComponentNodes(c, exportNodes, platformNodes)
+
+				except MetaDataError, e:
+					self.__Raptor.Error(e.Text, bldinf=str(c.bldinf_filename))
+					if not self.__Raptor.keepGoing:
+						return []
+			else:
+				self.__Raptor.Error("build info file does not exist", bldinf=str(c.bldinf_filename))
+				if not self.__Raptor.keepGoing:
+					return []
+
+		# now we have the top-level structure in place...
+		#
+		# <filter exports 1>
+		#		<spec bld.inf 1 />
+		#		<spec bld.inf 2 />
+		#		<spec bld.inf N /> </filter>
+		# <filter build 1>
+		#		<spec bld.inf 1 />
+		#		<spec bld.inf 2 />
+		#		<spec bld.inf N /> </filter>
+		# <filter build 2>
+		#		<spec bld.inf 1 />
+		#		<spec bld.inf 2 />
+		#		<spec bld.inf N /> </filter>
+		# <filter build 3>
+		#		<spec bld.inf 1 />
+		#		<spec bld.inf 2 />
+		#		<spec bld.inf N /> </filter>
+		#
+		# assuming that every bld.inf builds for every platform and all
+		# exports go to the same place. clearly, it is more likely that
+		# some filters have less than N child nodes. in bigger builds there
+		# will also be more than one export platform.
+
+		# we now need to process the EXPORTS for all the bld.inf nodes
+		# before we can do anything else (because raptor itself must do
+		# some exports before the MMP files that include them can be
+		# processed).
+		if doexport:
+			for i,p in enumerate(exportNodes):
+				exportPlatform = self.ExportPlatforms[i]
+				for s in p.GetChildSpecs():
+					try:
+						self.ProcessExports(s, exportPlatform)
+
+					except MetaDataError, e:
+						self.__Raptor.Error("%s",e.Text)
+						if not self.__Raptor.keepGoing:
+							return []
+		else:
+			self.__Raptor.Info("Not Processing Exports (--noexport enabled)")
+
+		# this is a switch to return the function at this point if export
+		# only option is specified in the run
+		if dobuild is not True:
+			self.__Raptor.Info("Processing Exports only")
+			return[]
+
+		# after exports are done we can look to see if there are any
+		# new Interfaces which can be used for EXTENSIONS. Make sure
+		# that we only load each cache once as some export platforms
+		# may share a directory.
+		doneID = {}
+		for ep in self.ExportPlatforms:
+			flmDir = ep["FLM_EXPORT_DIR"]
+			cid = ep["CACHEID"]
+			if flmDir.isDir() and not cid in doneID:
+				self.__Raptor.cache.Load(flmDir, cid)
+			doneID[cid] = True
+
+		# finally we can process all the other parts of the bld.inf nodes.
+		# Keep a list of the projects we were asked to build so that we can
+		# tell at the end if there were any we didn't know about.
+		self.projectList = list(self.__Raptor.projects)
+		for i,p in enumerate(platformNodes):
+			buildPlatform = self.BuildPlatforms[i]
+			for s in p.GetChildSpecs():
+				try:
+					self.ProcessTEMs(s, buildPlatform)
+					self.ProcessMMPs(s, buildPlatform)
+
+				except MetaDataError, e:
+					self.__Raptor.Error(e.Text)
+					if not self.__Raptor.keepGoing:
+						return []
+
+		for badProj in self.projectList:
+			self.__Raptor.Warn("Can't find project '%s' in any build info file", badProj)
+
+		# everything is specified
+		return exportNodes + platformNodes
+
+	def ModuleName(self,aBldInfPath):
+		"""Calculate the name of the ROM/emulator batch files that run the tests"""
+
+		def LeftPortionOf(pth,sep):
+			""" Internal function to return portion of str that is to the left of sep. 
+			The split is case-insensitive."""
+			length = len((pth.lower().split(sep.lower()))[0])
+			return pth[0:length]
+			
+		modulePath = LeftPortionOf(LeftPortionOf(os.path.dirname(aBldInfPath), "group"), "ongoing")
+		moduleName = os.path.basename(modulePath.strip("/"))
+		
+		# Ensure that ModuleName does not return blank, if the above calculation determines
+		# that moduleName is blank
+		if moduleName == "" or moduleName.endswith(":"):
+			moduleName = "module"
+		return moduleName
+
+
+	def AddComponentNodes(self, component, exportNodes, platformNodes):	
+		"""Add Specification nodes for a bld.inf to the appropriate platforms."""
+		bldInfFile = BldInfFile(component.bldinf_filename, self.__gnucpp, component.depfiles, self.__Raptor)
+		component.bldinf = bldInfFile 
+
+		specName = getSpecName(component.bldinf_filename, fullPath=True)
+
+		if isinstance(component.bldinf, raptor_xml.SystemModelComponent):
+			# this component came from a system_definition.xml
+			layer = component.bldinf.GetContainerName("layer")
+			componentName = component.bldinf.GetContainerName("component")
+		else:
+			# this is a plain old bld.inf file from the command-line
+			layer = ""
+			componentName = ""
+
+		# exports are independent of build platform
+		for i,ep in enumerate(self.ExportPlatforms):
+			specNode = raptor_data.Specification(name = specName)
+
+			# keep the BldInfFile object for later
+			specNode.component = component
+
+			# add some basic data in a component-wide variant
+			var = raptor_data.Variant(name='component-wide')
+			var.AddOperation(raptor_data.Set("COMPONENT_META", str(component.bldinf_filename)))
+			var.AddOperation(raptor_data.Set("COMPONENT_NAME", componentName))
+			var.AddOperation(raptor_data.Set("COMPONENT_LAYER", layer))
+			specNode.AddVariant(var)
+
+			# add this bld.inf Specification to the export platform
+			exportNodes[i].AddChild(specNode)
+			component.exportspecs.append(specNode)
+
+		# get the relevant build platforms
+		listedPlatforms = bldInfFile.getBuildPlatforms(self.defaultPlatform)
+		platforms = getBuildableBldInfBuildPlatforms(listedPlatforms,
+								self.__defaultplatforms,
+								self.__basedefaultplatforms,
+								self.__baseuserdefaultplatforms)
+
+
+		outputDir = BldInfFile.outputPathFragment(component.bldinf_filename)
+
+		# Calculate "module name"
+		modulename = self.ModuleName(str(component.bldinf_filename))
+
+		for i,bp in enumerate(self.BuildPlatforms):
+			plat = bp['PLATFORM']
+			if bp['PLATFORM'] in platforms:
+				specNode = raptor_data.Specification(name = specName)
+
+				# remember what component this spec node comes from for later
+				specNode.component = component
+
+				# add some basic data in a component-wide variant
+				var = raptor_data.Variant(name='component-wide-settings-' + plat)
+				var.AddOperation(raptor_data.Set("COMPONENT_META",str(component.bldinf_filename)))
+				var.AddOperation(raptor_data.Set("COMPONENT_NAME", componentName))
+				var.AddOperation(raptor_data.Set("COMPONENT_LAYER", layer))
+				var.AddOperation(raptor_data.Set("MODULE", modulename))
+				var.AddOperation(raptor_data.Append("OUTPUTPATHOFFSET", outputDir, '/'))
+				var.AddOperation(raptor_data.Append("OUTPUTPATH", outputDir, '/'))
+				var.AddOperation(raptor_data.Append("BLDINF_OUTPUTPATH",outputDir, '/'))
+
+				var.AddOperation(raptor_data.Set("TEST_OPTION", component.bldinf.getRomTestType(bp)))
+				specNode.AddVariant(var)
+
+				# add this bld.inf Specification to the build platform
+				platformNodes[i].AddChild(specNode)
+				# also attach it into the component
+				component.specs.append(specNode)
+
+	def ProcessExports(self, componentNode, exportPlatform):
+		"""Do the exports for a given platform and skeleton bld.inf node.
+
+		This will actually perform exports as certain types of files (.mmh)
+		are required to be in place before the rest of the bld.inf node
+		(and parts of other bld.inf nodes) can be processed.
+
+		[some MMP files #include exported .mmh files]
+		"""
+		if exportPlatform["TESTCODE"]:
+			exports = componentNode.component.bldinf.getTestExports(exportPlatform)
+		else:
+			exports = componentNode.component.bldinf.getExports(exportPlatform)
+
+		self.__Raptor.Debug("%i exports for %s",
+							len(exports), str(componentNode.component.bldinf.filename))
+		if exports:
+
+			# each export is either a 'copy' or 'unzip'
+			# maybe we should trap multiple exports to the same location here?
+			epocroot = str(exportPlatform["EPOCROOT"])
+			bldinf_filename = str(componentNode.component.bldinf.filename)
+			exportwhatlog="<whatlog bldinf='%s' mmp='' config=''>\n" % bldinf_filename
+			for export in exports:
+				expSrc = export.getSource()
+				expDstList = export.getDestination() # Might not be a list in all circumstances
+
+				# make it a list if it isn't
+				if not isinstance(expDstList, list):
+					expDstList = [expDstList]
+
+				fromFile = generic_path.Path(expSrc.replace("$(EPOCROOT)", epocroot))
+
+				# For each destination in the destination list, add an export target, perform it if required.
+				# This ensures that make knows the dependency situation but that the export is made
+				# before any other part of the metadata requires it.  It also helps with the build
+				# from clean situation where we can't use order only prerequisites.
+				for expDst in expDstList:
+					toFile = generic_path.Path(expDst.replace("$(EPOCROOT)", epocroot))
+					try:
+						if export.getAction() == "copy":
+							# export the file
+							exportwhatlog += self.CopyExport(fromFile, toFile, bldinf_filename)
+						else:
+							members = self.UnzipExport(fromFile, toFile,
+									str(exportPlatform['SBS_BUILD_DIR']),
+									bldinf_filename)
+							
+							exportwhatlog += ("<archive zipfile='" + str(fromFile) + "'>\n")
+							if members != None:
+								exportwhatlog += members
+							exportwhatlog += "</archive>\n"
+					except MetaDataError, e:
+						if self.__Raptor.keepGoing:
+							self.__Raptor.Error("%s",e.Text, bldinf=bldinf_filename)
+						else:
+							raise e
+			exportwhatlog+="</whatlog>\n"
+			self.__Raptor.PrintXML("%s",exportwhatlog)
+
+	def CopyExport(self, _source, _destination, bldInfFile):
+		"""Copy the source file to the destination file (create a directory
+		   to copy into if it does not exist). Don't copy if the destination
+		   file exists and has an equal or newer modification time."""
+		source = generic_path.Path(str(_source).replace('%20',' '))
+		destination = generic_path.Path(str(_destination).replace('%20',' '))
+		dest_str = str(destination)
+		source_str = str(source)
+
+		exportwhatlog="<export destination='" + dest_str + "' source='" + \
+				source_str + "'/>\n"
+
+		try:
+
+
+			destDir = destination.Dir()
+			if not destDir.isDir():
+				os.makedirs(str(destDir))
+				shutil.copyfile(source_str, dest_str)
+				return exportwhatlog
+
+			sourceMTime = 0
+			destMTime = 0
+			try:
+				sourceMTime = os.stat(source_str)[stat.ST_MTIME]
+				destMTime = os.stat(dest_str)[stat.ST_MTIME]
+			except OSError, e:
+				if sourceMTime == 0:
+					message = "Source of export does not exist:  " + str(source)
+					if not self.__Raptor.keepGoing:
+						raise MetaDataError(message)
+					else:
+						self.__Raptor.Error(message, bldinf=bldInfFile)
+
+			if destMTime == 0 or destMTime < sourceMTime:
+				if os.path.exists(dest_str):
+					os.chmod(dest_str,stat.S_IREAD | stat.S_IWRITE)
+				shutil.copyfile(source_str, dest_str)
+				self.__Raptor.Info("Copied %s to %s", source_str, dest_str)
+			else:
+				self.__Raptor.Info("Up-to-date: %s", dest_str)
+
+
+		except Exception,e:
+			message = "Could not export " + source_str + " to " + dest_str + " : " + str(e)
+			if not self.__Raptor.keepGoing:
+				raise MetaDataError(message)
+			else:
+				self.__Raptor.Error(message, bldinf=bldInfFile)
+
+		return exportwhatlog
+
+
+	def UnzipExport(self, _source, _destination, _sbs_build_dir, bldinf_filename):
+		"""Unzip the source zipfile into the destination directory
+		   but only if the markerfile does not already exist there
+		   or it does exist but is older than the zipfile.
+		   the markerfile is comprised of the name of the zipfile
+		   with the ".zip" removed and ".unzipped" added.
+		"""
+
+		# Insert spaces into file if they are there
+		source = str(_source).replace('%20',' ')
+		destination = str(_destination).replace('%20',' ')
+		sanitisedSource = raptor_utilities.sanitise(source)
+		sanitisedDestination = raptor_utilities.sanitise(destination)
+
+		destination = str(_destination).replace('%20',' ')
+		exportwhatlog = ""
+
+
+		try:
+			if not _destination.isDir():
+				os.makedirs(destination)
+
+			# Form the directory to contain the unzipped marker files, and make the directory if require.
+			markerfiledir = generic_path.Path(_sbs_build_dir)
+			if not markerfiledir.isDir():
+				os.makedirs(str(markerfiledir))
+
+			# Form the marker file name and convert to Python string
+			markerfilename = str(generic_path.Join(markerfiledir, sanitisedSource + sanitisedDestination + ".unzipped"))
+
+			# Don't unzip if the marker file is already there or more uptodate
+			sourceMTime = 0
+			destMTime = 0
+			try:
+				sourceMTime = os.stat(source)[stat.ST_MTIME]
+				destMTime = os.stat(markerfilename)[stat.ST_MTIME]
+			except OSError, e:
+				if sourceMTime == 0:
+					raise MetaDataError("Source zip for export does not exist:  " + source)
+			if destMTime != 0 and destMTime >= sourceMTime:
+				# This file has already been unzipped. Print members then return
+				exportzip = zipfile.ZipFile(source, 'r')
+				files = exportzip.namelist()
+				files.sort()
+
+				for file in files:
+					if not file.endswith('/'):
+						expfilename = str(generic_path.Join(destination, file))
+						exportwhatlog += "<member>" + expfilename + "</member>\n"
+
+				self.__Raptor.PrintXML("<clean bldinf='" + bldinf_filename + "' mmp='' config=''>\n")
+				self.__Raptor.PrintXML("<zipmarker>" + markerfilename + "</zipmarker>\n")
+				self.__Raptor.PrintXML("</clean>\n")
+
+				return exportwhatlog
+
+			exportzip = zipfile.ZipFile(source, 'r')
+			files = exportzip.namelist()
+			files.sort()
+			filecount = 0
+			for file in files:
+				expfilename = str(generic_path.Join(destination, file))
+				if file.endswith('/'):
+					try:
+						os.makedirs(expfilename)
+					except OSError, e:
+						pass # errors to do with "already exists" are not interesting.
+				else:
+					try:
+						os.makedirs(os.path.split(expfilename)[0])
+					except OSError, e:
+						pass # errors to do with "already exists" are not interesting.
+
+					try:
+						if os.path.exists(expfilename):
+							os.chmod(expfilename,stat.S_IREAD | stat.S_IWRITE)
+						expfile = open(expfilename, 'wb')
+						expfile.write(exportzip.read(file))
+						expfile.close()
+						
+						# Resurrect any file execution permissions present in the archived version
+						if (exportzip.getinfo(file).external_attr >> 16L) & 0100:
+							os.chmod(expfilename, stat.S_IMODE(os.stat(expfilename).st_mode) | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)						
+						
+						# Each file keeps its modified time the same as what it was before unzipping
+						accesstime = time.time()
+						datetime = exportzip.getinfo(file).date_time
+						timeTuple=(int(datetime[0]), int(datetime[1]), int(datetime[2]), int(datetime[3]), \
+									int(datetime[4]), int(datetime[5]), int(0), int(0), int(0))
+						modifiedtime = time.mktime(timeTuple)
+						os.utime(expfilename,(accesstime, modifiedtime))
+
+						filecount += 1
+						exportwhatlog+="<member>" + expfilename + "</member>\n"
+					except IOError, e:
+						message = "Could not unzip %s to %s: file %s: %s" %(source, destination, expfilename, str(e))
+						if not self.__Raptor.keepGoing:
+							raise MetaDataError(message)
+						else:
+							self.__Raptor.Error(message, bldinf=bldinf_filename)
+
+			markerfile = open(markerfilename, 'wb+')
+			markerfile.close()
+			self.__Raptor.PrintXML("<clean bldinf='" + bldinf_filename + "' mmp='' config=''>\n")
+			self.__Raptor.PrintXML("<zipmarker>" + markerfilename +	"</zipmarker>\n")
+			self.__Raptor.PrintXML("</clean>\n")
+
+		except IOError:
+			self.__Raptor.Warn("Problem while unzipping export %s to %s: %s",source,destination,str(e))
+
+		self.__Raptor.Info("Unzipped %d files from %s to %s", filecount, source, destination)
+		return exportwhatlog
+
+	def ProcessTEMs(self, componentNode, buildPlatform):
+		"""Add Template Extension Makefile nodes for a given platform
+		   to a skeleton bld.inf node.
+
+		This happens after exports have been handled.
+		"""
+		if buildPlatform["ISFEATUREVARIANT"]:
+			return	# feature variation does not run extensions at all
+		
+		if buildPlatform["TESTCODE"]:
+			extensions = componentNode.component.bldinf.getTestExtensions(buildPlatform)
+		else:
+			extensions = componentNode.component.bldinf.getExtensions(buildPlatform)
+
+		self.__Raptor.Debug("%i template extension makefiles for %s",
+							len(extensions), str(componentNode.component.bldinf.filename))
+
+		for i,extension in enumerate(extensions):
+			if self.__Raptor.projects:
+				if not extension.nametag in self.__Raptor.projects:
+					self.__Raptor.Debug("Skipping %s", extension.getMakefile())
+					continue
+				elif extension.nametag in self.projectList:
+					self.projectList.remove(extension.nametag)
+
+			extensionSpec = raptor_data.Specification("extension" + str(i))
+
+			interface = buildPlatform["extension"]
+			customInterface = False
+
+			# is there an FLM replacement for this extension?
+			if extension.interface:
+				try:
+					interface = self.__Raptor.cache.FindNamedInterface(extension.interface, buildPlatform["CACHEID"])
+					customInterface = True
+				except KeyError:
+					# no, there isn't an FLM
+					pass
+
+			extensionSpec.SetInterface(interface)
+
+			var = raptor_data.Variant()
+			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
+			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
+			var.AddOperation(raptor_data.Set("PLATFORM_PATH", buildPlatform["PLATFORM"].lower()))
+			var.AddOperation(raptor_data.Set("CFG", "$(VARIANTTYPE)"))
+			var.AddOperation(raptor_data.Set("CFG_PATH", "$(VARIANTTYPE)"))
+			var.AddOperation(raptor_data.Set("GENERATEDCPP", "$(OUTPUTPATH)"))
+			var.AddOperation(raptor_data.Set("TEMPLATE_EXTENSION_MAKEFILE", extension.getMakefile()))
+			var.AddOperation(raptor_data.Set("TEMCOUNT", str(i)))
+
+			# Extension inputs are added to the build spec.
+			# '$'s are escaped so that they are not expanded by Raptor or
+			# by Make in the call to the FLM
+			# The Extension makefiles are supposed to expand them themselves
+			# Path separators need not be parameterised anymore
+			# as bash is the standard shell
+			standardVariables = extension.getStandardVariables()
+			for standardVariable in standardVariables.keys():
+				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
+				value = standardVariables[standardVariable].replace('$(', '$$$$(')
+				value = value.replace('$/', '/').replace('$;', ':')
+				var.AddOperation(raptor_data.Set(standardVariable, value))
+
+			# . . . as with the standard variables but the names and number
+			# of options are not known in advance so we add them to
+			# a "structure" that is self-describing
+			var.AddOperation(raptor_data.Set("O._MEMBERS", ""))
+			options = extension.getOptions()
+			for option in options:
+				self.__Raptor.Debug("Set %s=%s", option, options[option])
+				value = options[option].replace('$(EPOCROOT)', '$(EPOCROOT)/')
+				value = value.replace('$(', '$$$$(')
+				value = value.replace('$/', '/').replace('$;', ':')
+				value = value.replace('$/', '/').replace('$;', ':')
+
+				if customInterface:
+					var.AddOperation(raptor_data.Set(option, value))
+				else:
+					var.AddOperation(raptor_data.Append("O._MEMBERS", option))
+					var.AddOperation(raptor_data.Set("O." + option, value))
+
+			extensionSpec.AddVariant(var)
+			componentNode.AddChild(extensionSpec)
+
+
+	def ProcessMMPs(self, componentNode, buildPlatform):
+		"""Add project nodes for a given platform to a skeleton bld.inf node.
+
+		This happens after exports have been handled.
+		"""
+		gnuList = []
+		makefileList = []
+
+
+		component = componentNode.component
+
+
+		if buildPlatform["TESTCODE"]:
+			MMPList = component.bldinf.getTestMMPList(buildPlatform)
+		else:
+			MMPList = component.bldinf.getMMPList(buildPlatform)
+
+		bldInfFile = component.bldinf.filename
+
+		for mmpFileEntry in MMPList['mmpFileList']:
+			component.AddMMP(mmpFileEntry.filename) # Tell the component another mmp is specified (for this platform)
+
+			projectname = mmpFileEntry.filename.File().lower()
+
+			if self.__Raptor.projects:
+				if not projectname in self.__Raptor.projects:
+					self.__Raptor.Debug("Skipping %s", str(mmpFileEntry.filename))
+					continue
+				elif projectname in self.projectList:
+					self.projectList.remove(projectname)
+
+			foundmmpfile = (mmpFileEntry.filename).FindCaseless()
+
+			if foundmmpfile == None:
+				self.__Raptor.Error("Can't find mmp file '%s'", str(mmpFileEntry.filename), bldinf=str(bldInfFile))
+				continue
+
+			mmpFile = MMPFile(foundmmpfile,
+								   self.__gnucpp,
+								   component.bldinf,
+								   component.depfiles,
+								   log = self.__Raptor)
+
+			mmpFilename = mmpFile.filename
+
+			self.__Raptor.Info("Processing %s for platform %s",
+							   str(mmpFilename),
+							   " + ".join([x.name for x in buildPlatform["configs"]]))
+
+			# Run the Parser
+			# The backend supplies the actions
+			content = mmpFile.getContent(buildPlatform)
+			backend = MMPRaptorBackend(self.__Raptor, str(mmpFilename), str(bldInfFile))
+			parser  = MMPParser(backend)
+			parseresult = None
+			try:
+				parseresult = parser.mmp.parseString(content)
+			except ParseException,e:
+				self.__Raptor.Debug(e) # basically ignore parse exceptions
+
+			if (not parseresult) or (parseresult[0] != 'MMP'):
+				self.__Raptor.Error("The MMP Parser didn't recognise the mmp file '%s'",
+					                str(mmpFileEntry.filename), 
+					                bldinf=str(bldInfFile))
+				self.__Raptor.Debug(content)
+				self.__Raptor.Debug("The parse result was %s", parseresult)
+			else:
+				backend.finalise(buildPlatform)
+
+			# feature variation only processes FEATUREVARIANT binaries
+			if buildPlatform["ISFEATUREVARIANT"] and not backend.featureVariant:
+				continue
+			
+			# now build the specification tree
+			mmpSpec = raptor_data.Specification(generic_path.Path(getSpecName(mmpFilename)))
+			var = backend.BuildVariant
+
+			var.AddOperation(raptor_data.Set("PROJECT_META", str(mmpFilename)))
+
+			# If it is a TESTMMPFILE section, the FLM needs to know about it
+			if buildPlatform["TESTCODE"] and (mmpFileEntry.testoption in
+					["manual", "auto"]):
+
+				var.AddOperation(raptor_data.Set("TESTPATH",
+						mmpFileEntry.testoption.lower() + ".bat"))
+
+			# The output path for objects, stringtables and bitmaps specified by
+			# this MMP.  Adding in the requested target extension prevents build
+			# "fouling" in cases where there are several mmp targets which only differ
+			# by the requested extension. e.g. elocl.01 and elocl.18
+			var.AddOperation(raptor_data.Append("OUTPUTPATH","$(UNIQUETARGETPATH)",'/'))
+
+			# If the bld.inf entry for this MMP had the BUILD_AS_ARM option then
+			# tell the FLM.
+			if mmpFileEntry.armoption:
+				var.AddOperation(raptor_data.Set("ALWAYS_BUILD_AS_ARM","1"))
+
+			# what interface builds this node?
+			try:
+				interfaceName = buildPlatform[backend.getTargetType()]
+				mmpSpec.SetInterface(interfaceName)
+			except KeyError:
+				self.__Raptor.Error("Unsupported target type '%s' in %s",
+								    backend.getTargetType(),
+								    str(mmpFileEntry.filename),
+								    bldinf=str(bldInfFile))
+				continue
+
+			# Although not part of the MMP, some MMP-based build specs additionally require knowledge of their
+			# container bld.inf exported headers
+			for export in componentNode.component.bldinf.getExports(buildPlatform):
+				destination = export.getDestination()
+				if isinstance(destination, list):
+					exportfile = str(destination[0])
+				else:
+					exportfile = str(destination)
+
+				if re.search('\.h',exportfile,re.IGNORECASE):
+					var.AddOperation(raptor_data.Append("EXPORTHEADERS", str(exportfile)))
+
+			# now we have something worth adding to the component
+			mmpSpec.AddVariant(var)
+			componentNode.AddChild(mmpSpec)
+			
+			# if there are APPLY variants then add them to the mmpSpec too
+			for applyVar in backend.ApplyVariants:
+				try:
+					mmpSpec.AddVariant(self.__Raptor.cache.FindNamedVariant(applyVar))
+				except KeyError:
+					self.__Raptor.Error("APPLY unknown variant '%s' in %s",
+								        applyVar,
+								        str(mmpFileEntry.filename),
+								        bldinf=str(bldInfFile))
+
+			# resources, stringtables and bitmaps are sub-nodes of this project
+			# (do not add these for feature variant builds)
+			
+			if not buildPlatform["ISFEATUREVARIANT"]:
+				# Buildspec for Resource files
+				for i,rvar in enumerate(backend.ResourceVariants):
+					resourceSpec = raptor_data.Specification('resource' + str(i))
+					resourceSpec.SetInterface(buildPlatform['resource'])
+					resourceSpec.AddVariant(rvar)
+					mmpSpec.AddChild(resourceSpec)
+
+				# Buildspec for String Tables
+				for i,stvar in enumerate(backend.StringTableVariants):
+					stringTableSpec = raptor_data.Specification('stringtable' + str(i))
+					stringTableSpec.SetInterface(buildPlatform['stringtable'])
+					stringTableSpec.AddVariant(stvar)
+					mmpSpec.AddChild(stringTableSpec)
+
+				# Buildspec for Bitmaps
+				for i,bvar in enumerate(backend.BitmapVariants):
+					bitmapSpec = raptor_data.Specification('bitmap' + str(i))
+					bitmapSpec.SetInterface(buildPlatform['bitmap'])
+					bitmapSpec.AddVariant(bvar)
+					mmpSpec.AddChild(bitmapSpec)
+
+		# feature variation does not run extensions at all
+		# so return without considering .*MAKEFILE sections
+		if buildPlatform["ISFEATUREVARIANT"]:
+			return
+			
+		# Build spec for gnumakefile
+		for g in MMPList['gnuList']:
+			projectname = g.getMakefileName().lower()
+
+			if self.__Raptor.projects:
+				if not projectname in self.__Raptor.projects:
+					self.__Raptor.Debug("Skipping %s", str(g.getMakefileName()))
+					continue
+				elif projectname in self.projectList:
+					self.projectList.remove(projectname)
+
+			self.__Raptor.Debug("%i gnumakefile extension makefiles for %s",
+						len(gnuList), str(componentNode.component.bldinf.filename))
+			var = raptor_data.Variant()
+			gnuSpec = raptor_data.Specification("gnumakefile " + str(g.getMakefileName()))
+			interface = buildPlatform["ext_makefile"]
+			gnuSpec.SetInterface(interface)
+			gnumakefilePath = raptor_utilities.resolveSymbianPath(str(bldInfFile), g.getMakefileName())
+			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
+			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
+			var.AddOperation(raptor_data.Set("EXTMAKEFILENAME", g.getMakefileName()))
+			var.AddOperation(raptor_data.Set("DIRECTORY",g.getMakeDirectory()))
+			var.AddOperation(raptor_data.Set("CFG","$(VARIANTTYPE)"))
+			standardVariables = g.getStandardVariables()
+			for standardVariable in standardVariables.keys():
+				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
+				value = standardVariables[standardVariable].replace('$(', '$$$$(')
+				value = value.replace('$/', '/').replace('$;', ':')
+				var.AddOperation(raptor_data.Set(standardVariable, value))
+			gnuSpec.AddVariant(var)
+			componentNode.AddChild(gnuSpec)
+
+		# Build spec for makefile
+		for m in MMPList['makefileList']:
+			projectname = m.getMakefileName().lower()
+
+			if self.__Raptor.projects:
+				if not projectname in self.__Raptor.projects:
+					self.__Raptor.Debug("Skipping %s", str(m.getMakefileName()))
+					continue
+				elif projectname in self.projectList:
+					projectList.remove(projectname)
+
+			self.__Raptor.Debug("%i makefile extension makefiles for %s",
+						len(makefileList), str(componentNode.component.bldinf.filename))
+			var = raptor_data.Variant()
+			gnuSpec = raptor_data.Specification("makefile " + str(m.getMakefileName()))
+			interface = buildPlatform["ext_makefile"]
+			gnuSpec.SetInterface(interface)
+			gnumakefilePath = raptor_utilities.resolveSymbianPath(str(bldInfFile), m.getMakefileName())
+			var.AddOperation(raptor_data.Set("EPOCBLD", "$(OUTPUTPATH)"))
+			var.AddOperation(raptor_data.Set("PLATFORM", buildPlatform["PLATFORM"]))
+			var.AddOperation(raptor_data.Set("EXTMAKEFILENAME", m.getMakefileName()))
+			var.AddOperation(raptor_data.Set("DIRECTORY",m.getMakeDirectory()))
+			var.AddOperation(raptor_data.Set("CFG","$(VARIANTTYPE)"))
+			var.AddOperation(raptor_data.Set("USENMAKE","1"))
+			standardVariables = m.getStandardVariables()
+			for standardVariable in standardVariables.keys():
+				self.__Raptor.Debug("Set %s=%s", standardVariable, standardVariables[standardVariable])
+				value = standardVariables[standardVariable].replace('$(', '$$$$(')
+				value = value.replace('$/', '/').replace('$;', ':')
+				var.AddOperation(raptor_data.Set(standardVariable, value))
+			gnuSpec.AddVariant(var)
+			componentNode.AddChild(gnuSpec)
+
+
+	def ApplyOSVariant(self, aBuildUnit, aEpocroot):
+		# Form path to kif.xml and path to buildinfo.txt
+		kifXmlPath = generic_path.Join(aEpocroot, "epoc32", "data","kif.xml")
+		buildInfoTxtPath = generic_path.Join(aEpocroot, "epoc32", "data","buildinfo.txt")
+
+		# Start with osVersion being None. This variable is a string and does two things:
+		# 1) is a representation of the OS version
+		# 2) is potentially the name of a variant
+		osVersion = None
+		if kifXmlPath.isFile(): # kif.xml exists so try to read it
+			osVersion = getOsVerFromKifXml(str(kifXmlPath))
+			if osVersion != None:
+				self.__Raptor.Info("OS version \"%s\" determined from file \"%s\"" % (osVersion, kifXmlPath))
+
+		# OS version was not determined from the kif.xml, e.g. because it doesn't exist
+		# or there was a problem parsing it. So, we fall over to using the buildinfo.txt
+		if osVersion == None and buildInfoTxtPath.isFile():
+			osVersion = getOsVerFromBuildInfoTxt(str(buildInfoTxtPath))
+			if osVersion != None:
+				self.__Raptor.Info("OS version \"%s\" determined from file \"%s\"" % (osVersion, buildInfoTxtPath))
+
+		# If we determined a non-empty string for the OS Version, attempt to apply it
+		if osVersion and osVersion in self.__Raptor.cache.variants:
+			self.__Raptor.Info("applying the OS variant to the configuration \"%s\"." % aBuildUnit.name)
+			aBuildUnit.variants.append(self.__Raptor.cache.variants[osVersion])
+		else:
+			self.__Raptor.Info("no OS variant for the configuration \"%s\"." % aBuildUnit.name)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/pdll_arm.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,121 @@
+#
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+#
+
+from raptor_tests import AntiTargetSmokeTest
+
+def run():
+	t = AntiTargetSmokeTest()
+	t.usebash = True
+	
+	command = "sbs -b smoke_suite/test_resources/simple_dll/pbld.inf -c %s -f -"
+	maintargets = [
+		"$(EPOCROOT)/epoc32/release/%s/udeb/createstaticpdll.dll.sym",
+		"$(EPOCROOT)/epoc32/release/%s/urel/createstaticpdll.dll.sym",
+		"$(EPOCROOT)/epoc32/release/%s/udeb/createstaticpdll.dll",
+		"$(EPOCROOT)/epoc32/release/%s/urel/createstaticpdll.dll"
+		]
+	armv5targets = [
+		"$(EPOCROOT)/epoc32/release/%s/lib/createstaticpdll.dso",
+		"$(EPOCROOT)/epoc32/release/%s/lib/createstaticpdll{000a0000}.dso"
+		]
+	abiv1libtargets = [
+		"$(EPOCROOT)/epoc32/release/%s/lib/createstaticpdll.lib",
+		"$(EPOCROOT)/epoc32/release/%s/lib/createstaticpdll{000a0000}.lib"
+		]
+	buildtargets =  [
+		"createstaticpdll_dll/%s/udeb/CreateStaticDLL.o",
+		"createstaticpdll_dll/%s/urel/CreateStaticDLL.o"
+		]
+	mustmatch = [
+		r".*\busrt\d_\d\.lib\b.*",
+		r".*\bscppnwdl\.dso\b.*"
+		]
+	mustnotmatch = [
+		".*ksrt.*"
+		]
+	
+	# Note that ABIv1 import libraries are only generated for RVCT-based armv5
+	# builds on Windows
+	
+	t.id = "0104a"
+	t.name = "pdll_armv5_rvct"
+	t.command = command % "armv5"
+	t.targets = map(lambda p: p % "armv5", maintargets + armv5targets)[:]	# Shallow, as we optionally extend later and then re-use
+	t.addbuildtargets('smoke_suite/test_resources/simple_dll/pbld.inf', map(lambda p: p % "armv5", buildtargets))
+	t.mustmatch = mustmatch
+	t.mustnotmatch = mustnotmatch
+	t.run("linux")
+	if t.result == AntiTargetSmokeTest.SKIP:
+		t.targets.extend(map(lambda x: x % "armv5", abiv1libtargets))
+		t.run("windows")
+		
+	t.id = "0104b"
+	t.name = "pdll_armv5_clean"
+	t.command = command % "armv5" + " clean"
+	t.targets = []
+	t.mustmatch = []
+	t.mustnotmatch = []
+	t.run()
+	
+	t.id = "0104c"
+	t.name = "pdll_armv5_gcce"
+	t.command = command % "gcce_armv5"
+	t.targets = map(lambda p: p % "armv5", maintargets + armv5targets)
+	t.antitargets = map(lambda p: p % "armv5", abiv1libtargets)
+	t.addbuildtargets('smoke_suite/test_resources/simple_dll/pbld.inf', map(lambda p: p % "armv5", buildtargets))
+	t.mustmatch = mustmatch
+	t.mustnotmatch = mustnotmatch
+	t.run()
+
+	t.id = "0104d"
+	t.name = "pdll_armv5_gcce_clean"
+	t.command = command % "gcce_armv5" + " clean"
+	t.targets = []
+	t.mustmatch = []
+	t.mustnotmatch = []
+	t.run()
+
+	t.id = "0104e"
+	t.name = "pdll_armv7_rvct"
+	t.command = command % "armv7"
+	t.targets = map(lambda p: p % "armv7", maintargets)[:]	# Shallow, as we optionally extend later and then re-use
+	t.addbuildtargets('smoke_suite/test_resources/simple_dll/pbld.inf', map(lambda p: p % "armv7", buildtargets))
+	t.mustmatch = mustmatch
+	t.mustnotmatch = mustnotmatch
+	t.run()
+	
+	t.id = "0104f"
+	t.name = "pdll_armv7_clean"
+	t.command = command % "armv7" + " clean"
+	t.targets = []
+	t.mustmatch = []
+	t.mustnotmatch = []
+	t.run()
+	
+	t.id = "0104g"
+	t.name = "pdll_armv7_gcce"
+	t.command = command % "arm.v7.udeb.gcce4_3_2 -c arm.v7.urel.gcce4_3_2"
+	t.targets = map(lambda p: p % "armv7", maintargets)
+	t.antitargets = map(lambda p: p % "armv7", abiv1libtargets)
+	t.addbuildtargets('smoke_suite/test_resources/simple_dll/pbld.inf', map(lambda p: p % "armv7", buildtargets))
+	t.mustmatch = mustmatch
+	t.mustnotmatch = mustnotmatch
+	t.run()
+
+	t.id = "104"
+	t.name = "pdll_arm"
+	t.print_result()
+	return t
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/pdll_winscw.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,39 @@
+#
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+#
+
+from raptor_tests import SmokeTest
+
+def run():
+	t = SmokeTest()
+	t.id = "105"
+	t.name = "pdll_winscw"
+	t.command = "sbs -b smoke_suite/test_resources/simple_dll/pbld.inf -c winscw"
+	t.targets = [
+		"$(EPOCROOT)/epoc32/release/winscw/udeb/createstaticpdll.lib",
+		"$(EPOCROOT)/epoc32/release/winscw/udeb/createstaticpdll.dll",
+		"$(EPOCROOT)/epoc32/release/winscw/urel/createstaticpdll.dll",
+		"$(EPOCROOT)/epoc32/release/winscw/urel/createstaticpdll.dll.map"
+		]
+	t.addbuildtargets('smoke_suite/test_resources/simple_dll/pbld.inf', [
+		"createstaticpdll_dll/winscw/udeb/CreateStaticDLL.o",
+		"createstaticpdll_dll/winscw/udeb/createstaticpdll.UID.CPP",
+		"createstaticpdll_dll/winscw/udeb/createstaticpdll_UID_.o",
+		"createstaticpdll_dll/winscw/urel/CreateStaticDLL.o",
+		"createstaticpdll_dll/winscw/urel/createstaticpdll.UID.CPP",
+		"createstaticpdll_dll/winscw/urel/createstaticpdll_UID_.o"
+	])
+	t.run()
+	return t
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/qt.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,55 @@
+#
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+# This test case requires install of Qt. 
+
+from raptor_tests import SmokeTest
+
+def run():
+	t = SmokeTest()
+
+	t.description = "Ensure Raptor builds Qt applications successfully"	
+
+	t.id = "00xx"	# TO DO: update ID
+	t.name = "qt_helloworld"
+	t.command = "cd smoke_suite/test_resources/qt && qmake -spec symbian-sbsv2 && sbs"
+	t.targets = [
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/bld.inf",
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/helloworldqt.loc",
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/helloworldqt.rss",
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/helloworldqt_reg.rss",
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/helloworldqt_template.pkg",
+			"$(SBS_HOME)/test/smoke_suite/test_resources/qt/Makefile",
+			"$(EPOCROOT)/epoc32/release/armv5/udeb/helloworldqt.exe",
+			"$(EPOCROOT)/epoc32/release/armv5/udeb/helloworldqt.exe.map",
+			"$(EPOCROOT)/epoc32/release/armv5/urel/helloworldqt.exe",
+			"$(EPOCROOT)/epoc32/release/armv5/urel/helloworldqt.exe.map",
+			"$(EPOCROOT)/epoc32/release/winscw/udeb/helloworldqt.exe",
+			"$(EPOCROOT)/epoc32/release/winscw/urel/helloworldqt.exe",
+			"$(EPOCROOT)/epoc32/release/winscw/urel/helloworldqt.exe.map"
+		]
+	t.addbuildtargets('smoke_suite/test_resources/qt/bld.inf', [
+		"helloworldqt_exe/armv5/udeb/helloworld.o",
+		"helloworldqt_exe/armv5/udeb/helloworld.o.d",
+		"helloworldqt_exe/armv5/urel/helloworld.o",
+		"helloworldqt_exe/armv5/urel/helloworld.o.d",
+		"helloworldqt_exe/winscw/udeb/helloworld.o",
+		"helloworldqt_exe/winscw/udeb/helloworld.o.d",	
+		"helloworldqt_exe/winscw/urel/helloworld.o",
+		"helloworldqt_exe/winscw/urel/helloworld.o.d"
+	])
+	t.run("windows")
+
+	return t
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/qt/helloworld.cpp	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,13 @@
+#include <QApplication>
+#include <QPushButton>
+
+int main(int argc, char *argv[])
+{
+    QApplication app(argc, argv);
+
+    QPushButton hello("Hello world!");
+    hello.resize(100, 30);
+
+    hello.show();
+    return app.exec();
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/qt/helloworldqt.pro	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,8 @@
+
+TEMPLATE = app
+TARGET = 
+DEPENDPATH += .
+INCLUDEPATH += .
+
+# Input
+SOURCES += helloworld.cpp
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/simple_dll/CREATESTATICPDLLARM.def	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,6 @@
+EXPORTS
+	_ZN10CMessenger11ShowMessageEv @ 1 NONAME
+	_ZN10CMessenger5NewLCER12CConsoleBaseRK7TDesC16 @ 2 NONAME
+	_ZTI10CMessenger @ 3 NONAME
+	_ZTV10CMessenger @ 4 NONAME
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/simple_dll/CREATESTATICPDLLWINS.def	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,4 @@
+EXPORTS
+	?ShowMessage@CMessenger@@QAEXXZ @ 1 NONAME ; ?ShowMessage@CMessenger@@QAEXXZ
+	?NewLC@CMessenger@@SAPAV1@AAVCConsoleBase@@ABVTDesC16@@@Z @ 2 NONAME ; ?NewLC@CMessenger@@SAPAV1@AAVCConsoleBase@@ABVTDesC16@@@Z
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/simple_dll/CreateStaticPDLL.mmp	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,50 @@
+/*
+* Copyright (c) 2000-2009 Nokia Corporation and/or its subsidiary(-ies).
+* All rights reserved.
+* This component and the accompanying materials are made available
+* under the terms of the License "Eclipse Public License v1.0"
+* which accompanies this distribution, and is available
+* at the URL "http://www.eclipse.org/legal/epl-v10.html".
+*
+* Initial Contributors:
+* Nokia Corporation - initial contribution.
+*
+* Contributors:
+*
+* Description: 
+*
+*/
+
+TARGET        createstaticpdll.dll
+TARGETTYPE    pdll
+
+UID             0xE800004C
+CAPABILITY   All -TCB
+
+
+VENDORID 0x70000001
+
+SOURCEPATH    .
+SOURCE        CreateStaticDLL.cpp
+
+#ifdef ARMV6
+SOURCE        armv6_specific.cpp
+#endif
+#ifdef ARMV7
+SOURCE        armv7_specific.cpp
+#endif
+
+USERINCLUDE   .
+SYSTEMINCLUDE /epoc32/include
+
+LIBRARY       euser.lib
+
+#if defined(WINS)
+    deffile ./CREATESTATICPDLLWINS.def
+#elif defined(MARM)
+    deffile ./CREATESTATICPDLLARM.def
+#endif
+nostrictdef
+
+#include "../inc/macrotests.mmh"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/test_resources/simple_dll/pbld.inf	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,25 @@
+/*
+* Copyright (c) 2000-2009 Nokia Corporation and/or its subsidiary(-ies).
+* All rights reserved.
+* This component and the accompanying materials are made available
+* under the terms of the License "Eclipse Public License v1.0"
+* which accompanies this distribution, and is available
+* at the URL "http://www.eclipse.org/legal/epl-v10.html".
+*
+* Initial Contributors:
+* Nokia Corporation - initial contribution.
+*
+* Contributors:
+*
+* Description: 
+* Component description file
+*
+*/
+
+
+PRJ_PLATFORMS
+ARMV5 ARMV6 ARMV7 ARMV5SMP WINSCW
+
+PRJ_MMPFILES
+
+CreateStaticPDLL.mmp
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/smoke_suite/whatcomp.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,65 @@
+#
+# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description: 
+#
+
+from raptor_tests import SmokeTest
+import generic_path
+import os
+
+def run():
+	t = SmokeTest()
+	t.usebash = True
+	result = SmokeTest.PASS
+
+	abs_epocroot = os.path.abspath(os.environ["EPOCROOT"]).replace("\\","/")
+	cwd = os.getcwd().replace("\\","/")
+
+	relative_epocroot = os.path.relpath(abs_epocroot,cwd).replace("\\","/")
+
+	
+
+	description = """This tests the whatcomp filter.  As a byproduct it uses (and thus smoke-tests) sbs_filter.py"""
+	command = "sbs -b smoke_suite/test_resources/simple/bld.inf -c %s -m ${SBSMAKEFILE} -f ${SBSLOGFILE} what  && " + \
+		  "EPOCROOT=%s sbs_filter --filters FilterWhatComp < ${SBSLOGFILE} &&" % relative_epocroot + \
+		  "EPOCROOT=%s sbs_filter --filters FilterWhatComp < ${SBSLOGFILE}"  % abs_epocroot
+	targets = [
+		]	
+	buildtargets = [
+		]
+	mustmatch = [
+		"-- abld -w",
+		"Chdir .*/smoke_suite/test_resources/simple",
+		relative_epocroot + "/epoc32/release/armv5/urel/test.exe",
+		relative_epocroot + "/epoc32/release/armv5/urel/test.exe.map",
+		abs_epocroot + "/epoc32/release/armv5/urel/test.exe",
+		abs_epocroot + "/epoc32/release/armv5/urel/test.exe.map",
+	] 
+	mustnotmatch = [
+	"error: no (CHECK|WHAT) information found"
+	]
+	warnings = 0
+	
+	t.id = "0106"
+	t.name = "filter_whatcomp_sbs_filter"
+	t.description = description
+	t.command = command % "arm.v5.urel.gcce4_4_1"
+	t.targets = targets
+	t.mustmatch = mustmatch
+	t.mustnotmatch = mustnotmatch
+	t.warnings = warnings
+	t.run()
+
+	t.print_result()
+	return t
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbsv2/raptor/test/timing_tests/parse_time_exports_mmps.py	Sun Jan 17 23:00:39 2010 +0000
@@ -0,0 +1,77 @@
+
+from raptor_tests import SmokeTest, ReplaceEnvs
+import os
+
+def generate_files():
+	try:
+		os.makedirs(ReplaceEnvs("$(SBS_HOME)/test/timing_tests/test_resources/parse_time"))
+	except:
+		pass
+	bldinf_path = ReplaceEnvs("$(SBS_HOME)/test/timing_tests/test_resources/parse_time/bld.inf")
+	bldinf = open(bldinf_path, "w")
+	bldinf_content = """prj_mmpfiles
+"""
+	test_dir = ReplaceEnvs("$(SBS_HOME)/test/timing_tests/test_resources/parse_time")
+	for number in range(0, 250):
+		mmp_path = ("parse_timing_" + str(number).zfill(3) + ".mmp")
+		mmp_file = open((test_dir + "/" + mmp_path), "w")
+		mmp_file.write("""targettype	none
+""")
+		mmp_file.close()
+		bldinf_content += (mmp_path + "\n")
+		
+	bldinf_content += "\nprj_exports\n"
+
+	for number1 in range(0, 10):
+		source_dir = ("export_source_" + str(number1))
+		try:
+			os.mkdir(test_dir + "/" + source_dir)
+		except:
+			pass
+		
+		for number2 in range (0, 10):
+			source_file = ("/file_" + str(number2) + ".txt ")
+			export_file = open((test_dir + "/" + source_dir + source_file), "w")
+			export_file.write(str(number2))
+			export_file.close()
+			
+			for number3 in range (0, 10):
+				dest_dir = ("epoc32/include/export_destination_" + \
+						str(number1) + str(number2) + str(number3))
+				
+				for number4 in range(0, 10):
+					bldinf_content += source_dir + source_file + dest_dir + \
+							"/export_destination_" + str(number4) + "\n"
+	bldinf.write(bldinf_content)
+	bldinf.close()
+	
+	
+def delete_files():
+	import shutil
+	
+	test_dir = ReplaceEnvs("$(SBS_HOME)/test/timing_tests/test_resources/parse_time")
+	objects = os.listdir(test_dir)
+	for object in objects:
+		object_path = (test_dir + "/" + object)
+		if os.path.isfile(object_path):
+			os.remove(object_path)
+		else:
+			shutil.rmtree(object_path)
+	
+
+def run():
+	
+	generate_files()
+	
+	t = SmokeTest()
+	
+	t.id = "1"
+	t.name = "parse_time_exports_mmps"
+	t.description = """Test to measure time taken to parse a large number of
+			exports and mmps"""
+	t.command = "sbs -b timing_tests/test_resources/parse_time/bld.inf -n " + \
+			"-c armv5_urel --toolcheck=off --timing"
+	t.run()
+	
+	delete_files()
+	return t
--- a/sbsv2/raptor/test/unit_suite/raptor_data_unit.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/test/unit_suite/raptor_data_unit.py	Sun Jan 17 23:00:39 2010 +0000
@@ -30,24 +30,18 @@
 		
 		
 	def SetEnv(self, name, value):
-		# set environment variable and remember the old value
-		
-		try:
-			old = os.environ[name]
-			self.envStack[name] = old
-			os.environ[name] = value
-		except KeyError:
-			self.envStack[name] = None    # was not defined
+		# set environment variable and remember the old value (if there is one)		
+		if os.environ.has_key(name):
+			self.envStack[name] = os.environ[name]
+		os.environ[name] = value
 		
 			
 	def RestoreEnv(self, name):
 		# put environment back to its state before SetEnv
-		saved = self.envStack[name]
-		
-		if saved == None:
+		if self.envStack.has_key(name):
+			os.environ[name] = self.envStack[name]
+		else:
 			del os.environ[name]    # was not defined
-		else:
-			os.environ[name] = saved
 			
 			
 	def testSimpleSpecification(self):
@@ -359,6 +353,32 @@
 		# test the Resolve wrt EPOCROOT
 		varcfg = eval.Resolve("VARIANT_CFG")
 		self.assertEqual(varcfg, "/C/variant/variant.cfg")
+		
+	def testProblematicEnvironment(self):
+		# ask for environment variable values that will break makefile parsing due to
+		# backslashes forming line continuation characters
+		self.SetEnv("ENVVAR_BSLASH_END1", "C:\\test1a\\;C:\\test1b\\")
+		self.SetEnv("ENVVAR_BSLASH_END2", "C:\\test2a\\;C:\\test2b\\\\")
+		self.SetEnv("ENVVAR_BSLASH_END3", "C:\\test3a\\;C:\\test3b\\\\\\")
+		var = raptor_data.Variant("my.var")
+		var.AddOperation(raptor_data.Env("ENVVAR_BSLASH_END1"))
+		var.AddOperation(raptor_data.Env("ENVVAR_BSLASH_END2"))
+		var.AddOperation(raptor_data.Env("ENVVAR_BSLASH_END3"))
+
+		aRaptor = raptor.Raptor()
+		eval = aRaptor.GetEvaluator(None, var.GenerateBuildUnits(aRaptor.cache)[0])
+		self.RestoreEnv("ENVVAR_BSLASH_END1")
+		self.RestoreEnv("ENVVAR_BSLASH_END2")
+		self.RestoreEnv("ENVVAR_BSLASH_END3")
+		
+		value = eval.Get("ENVVAR_BSLASH_END1")
+		self.assertEqual(value, "C:\\test1a\\;C:\\test1b\\\\")
+		
+		value = eval.Get("ENVVAR_BSLASH_END2")
+		self.assertEqual(value, "C:\\test2a\\;C:\\test2b\\\\")
+		
+		value = eval.Get("ENVVAR_BSLASH_END3")
+		self.assertEqual(value, "C:\\test3a\\;C:\\test3b\\\\\\\\")
 	
 	def testMissingEnvironment(self):
 		# ask for an environment variable that is not set
--- a/sbsv2/raptor/util/install-windows/raptorinstallermaker.py	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/util/install-windows/raptorinstallermaker.py	Sun Jan 17 23:00:39 2010 +0000
@@ -30,40 +30,51 @@
 tempdir = ""
 
 parser = optparse.OptionParser()
-parser.add_option("-s", "--sbs_home", dest="sbs_home",
+parser.add_option("-s", "--sbs-home", dest="sbshome",
+                  help="Path to use as SBS_HOME environment variable. If not present the script exits.")
+parser.add_option("-w", "--win32-support", dest="win32support",
                   help="Path to use as SBS_HOME environment variable. If not present the script exits.")
 
 (options, args) = parser.parse_args()
 
-if options.sbs_home == None:
+if options.sbshome == None:
 	print "ERROR: no SBS_HOME passed in. Exiting..."
 	sys.exit(2)
 
+if options.win32support == None:
+	print "ERROR: no win32support directory specified. Unable to proceed. Exiting..."
+	sys.exit(2)
+else:
+	# Required irectories inside the win32-support repository
+	win32supportdirs = ["bv", "cygwin", "mingw", "python252"]
+	for dir in win32supportdirs:
+		if not os.path.isdir(os.path.join(options.win32support, dir)):
+			print "ERROR: directory %s does not exist. Cannot build installer. Exiting..."
+			sys.exit(2)
 
 def parseconfig(xmlFile="raptorinstallermaker.xml"):
 	pass
 
-def generateinstallerversionheader(sbs_home = None):
-	os.environ["SBS_HOME"] = sbs_home
-	os.environ["PATH"] = os.path.join(os.environ["SBS_HOME"], "bin") + os.pathsep + os.environ["PATH"]
+def generateinstallerversionheader(sbshome = None):
+	shellenv = os.environ.copy()
+	shellenv["PYTHONPATH"] = os.path.join(os.environ["SBS_HOME"], "python")
 	
-	versioncommand = "sbs -v"
+	raptorversioncommand = "python -c \"import raptor_version; print raptor_version.numericversion()\""
 	
-	# Raptor version string looks like this
-	# sbs version 2.5.0 [2009-02-20 release]
+	# Raptor version is obtained from raptor_version module's numericversion function.
 	sbs_version_matcher = re.compile(".*(\d+\.\d+\.\d+).*", re.I)
 	
 	# Create Raptor subprocess
-	sbs = subprocess.Popen(versioncommand, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-	
+	versioncommand = subprocess.Popen(raptorversioncommand, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=shellenv)
+	raptorversion = ""
 	# Get all the lines matching the RE
-	for line in sbs.stdout.readlines():
+	for line in versioncommand.stdout.readlines():
 		res = sbs_version_matcher.match(line)
 		if res:
 			raptorversion = res.group(1)
 			print "Successfully determined Raptor version %s" % raptorversion
 
-	sbs.wait() # Wait for process to end
+	versioncommand.wait() # Wait for process to end
 	
 	raptorversion_nsis_header_string = "# Raptor version file\n\n!define RAPTOR_VERSION %s\n" % raptorversion
 	
@@ -94,11 +105,16 @@
 	global tempdir
 	print "Cleaning up temporary directory %s" % tempdir
 	shutil.rmtree(tempdir,True)
+	try:
+		os.remove("raptorversion.nsh")
+		print "Successfully deleted raptorversion.nsh."
+	except:
+		print "ERROR: failed to remove raptorversion.nsh - remove manually if needed."
 	print "Done."
 
 makensispath = unzipnsis(".\\NSIS.zip")
-generateinstallerversionheader(options.sbs_home)
-nsiscommand = makensispath + " /DRAPTOR_LOCATION=%s raptorinstallerscript.nsi" % options.sbs_home
+generateinstallerversionheader(options.sbshome)
+nsiscommand = makensispath + " /DRAPTOR_LOCATION=%s /DWIN32SUPPORT=%s raptorinstallerscript.nsi" % (options.sbshome, options.win32support)
 print "nsiscommand = %s" % nsiscommand
 runmakensis(nsiscommand)
 cleanup()
--- a/sbsv2/raptor/util/install-windows/raptorinstallerscript.nsi	Fri Jan 15 16:09:36 2010 +0000
+++ b/sbsv2/raptor/util/install-windows/raptorinstallerscript.nsi	Sun Jan 17 23:00:39 2010 +0000
@@ -114,6 +114,14 @@
     File /r /x distribution.policy.s60 ${RAPTOR_LOCATION}\schema\*.*
     SetOutPath "$INSTDIR\win32"
     File /r /x distribution.policy.s60 ${RAPTOR_LOCATION}\win32\*.*
+    SetOutPath "$INSTDIR\win32\bv"
+    File /r /x distribution.policy.s60 /x .hg ${WIN32SUPPORT}\bv\*.*
+    SetOutPath "$INSTDIR\win32\cygwin"
+    File /r /x distribution.policy.s60 /x .hg ${WIN32SUPPORT}\cygwin\*.*
+    SetOutPath "$INSTDIR\win32\mingw"
+    File /r /x distribution.policy.s60 /x .hg ${WIN32SUPPORT}\mingw\*.*
+    SetOutPath "$INSTDIR\win32\python252"
+    File /r /x distribution.policy.s60 /x .hg ${WIN32SUPPORT}\python252\*.*
     
     SetOutPath "$INSTDIR"
     File ${RAPTOR_LOCATION}\RELEASE-NOTES.txt