Merge
authorShabe Razvi <shaber@symbian.org>
Tue, 16 Feb 2010 17:58:52 +0000 (2010-02-16)
changeset 901 1bec8957803b
parent 900 ed28a33746f2 (current diff)
parent 898 79766bc6c419 (diff)
child 902 6df3172848e9
Merge
common/build.xml
common/common_props.ant.xml
common/tools/cachefiles.pl
--- a/common/build.xml	Tue Feb 16 17:57:06 2010 +0000
+++ b/common/build.xml	Tue Feb 16 17:58:52 2010 +0000
@@ -24,14 +24,16 @@
         <then>
             <exec executable="perl" outputproperty="sf.spec.job.root.drive" logerror="true" failonerror="true">
                 <arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
+                <arg value="-space"/>
             </exec>
         </then>
     </if>
     
     <!-- Import common properties -->
     <import file="${sf.common.config.dir}/common_props.ant.xml" />
-
+    
     <property name="sf.spec.job.rootdir" value="${sf.spec.job.root.drive}/${sf.spec.job.root.path}"/>
+    <property name="sf.spec.sourcesync.sourcespecdir" value="${sf.project.location}"/>
     
     <!-- setup Helium internal properties from their equivalent in the project spec -->
     <property name="build.name" value="${sf.spec.job.name}"/>
@@ -76,18 +78,8 @@
     <!-- Import functionality distributed into other file(s) -->
     <import file="${sf.common.config.dir}/build.postbuild.xml"/>
 	
-	<!-- Import test functionality distributed into other file(s) -->
+    <!-- Import test functionality distributed into other file(s) -->
     <import file="${sf.common.config.dir}/build.test.xml"/>
-             
-    <!-- setup conditional Helium internal properties -->
-    <if><istrue value="${sf.spec.sourcesync.usecache}"/>
-        <then>
-            <property name="sf.spec.sourcesync.sourcespecdir" value="${temp.build.dir}"/>
-        </then>
-        <else>
-            <property name="sf.spec.sourcesync.sourcespecdir" value="${sf.project.location}"/>
-        </else>
-    </if>
     
     <!-- helium 7 compatability. needs to be defined -->
     <if>
@@ -371,25 +363,46 @@
         </if>
         <stopwatch name="sf-syncsource" action="elapsed"/>
     </target>
-
+    
     <target name="sf-get-source" depends="sf-generate-source-spec">
+        <stopwatch name="sf-get-source"/>
         <ant antfile="${temp.build.dir}/source-spec.ant.xml" />
+        <stopwatch name="sf-get-source" action="elapsed"/>
     </target>
     
     <target name="sf-generate-source-spec">
-        <!-- Generate the sources.csv if hg cache is activated -->
-        <if><istrue value="${sf.spec.sourcesync.usecache}"/>
+        <!-- If we've not had a cache specified, but we should use one, then work one out... -->
+        <if>
+            <and>    
+                <istrue value="${sf.spec.sourcesync.usecache}"/>
+                <not><isset property="sf.spec.sourcesync.cachelocation"/></not>
+            </and>
             <then>
-                <echo message="Generating Hg local cache..." />
-                <exec executable="perl" dir="${build.log.dir}" failonerror="true" output="${build.log.dir}/${build.id}_hgcache.log">
-                    <arg value="${sf.common.config.dir}/tools/cachefiles.pl"/>
-                    <arg value="${sf.spec.sourcesync.cachelocation}"/>
-                    <arg value="${sf.project.location}/${sf.spec.sourcesync.sourcespecfile}"/>
-                    <arg value="${temp.build.dir}/${sf.spec.sourcesync.sourcespecfile}"/>
+                <!-- Iterate through drives to look for an existing cache -->
+                <exec executable="perl" outputproperty="sf.spec.sourcesync.all.drives" logerror="true" failonerror="true">
+                    <arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
+                    <arg value="-all"/>
                 </exec>
+                <for list="${sf.spec.all.drives}" param="physical.drive">
+                    <sequential>
+                        <available property="sf.spec.sourcesync.cachelocation" value="@{physical.drive}/${sf.spec.sourcesync.cache.path}" file="@{physical.drive}/${sf.spec.sourcesync.cache.path}" type="dir"/>
+                    </sequential>
+                </for>
+                <if>
+                    <not><isset property="sf.spec.sourcesync.cachelocation"/></not>
+                    <then>
+                        <!-- No existing cache - locate the preferred drive for creating one -->
+                        <exec executable="perl" outputproperty="sf.spec.sourcesync.largest.drive" logerror="true" failonerror="true">
+                            <arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
+                            <arg value="-capacity"/>
+                        </exec>
+                        <property name="sf.spec.sourcesync.cachelocation" value="${sf.spec.sourcesync.largest.drive}/${sf.spec.sourcesync.cache.path}"/>
+                        <mkdir dir="${sf.spec.sourcesync.cachelocation}"/>
+                    </then>
+                </if>
             </then>
         </if>
-
+        
         <!-- TODO: 1. Same file name souce-spec.ant.xml is used for all packages
         for multiple package builds, this needs to be linked with package name. -->
         <!-- TODO: 2. Change fmpp data to be a full property rather than relative path -->
--- a/common/common_props.ant.xml	Tue Feb 16 17:57:06 2010 +0000
+++ b/common/common_props.ant.xml	Tue Feb 16 17:58:52 2010 +0000
@@ -49,8 +49,8 @@
     <property name="sf.spec.sourcesync.enable" value="true"/>
     <property name="sf.spec.sourcesync.usecache" value="false"/>
     <property name="sf.spec.sourcesync.sourcespecfile" value="sources.csv"/>
-    <property name="sf.spec.sourcesync.cachelocation" value="${sf.spec.job.rootdir}/hgcache"/>
-    <property name="sf.spec.sourcesync.bug419" value="true"/> <!--Temporary workaround for bug 419 - Does source sync manually -->
+    <property name="sf.spec.sourcesync.cache.path" value="hgcache"/> <!-- Path relative to root of some drive for location of hg cache -->
+    <property name="sf.spec.sourcesync.local.development.area" value="//v800008/Builds01"/> <!-- Location of a "development area" which should be cached on build machines (in addition to developer.symbian.org content) -->
     
     <property name="sf.spec.baseline.enable" value="true"/>
     <property name="sf.spec.baseline.select" value="auto"/> <!-- auto|explicit|location -->
@@ -69,7 +69,7 @@
     <property name="sf.spec.publish.diamonds.port" value="80"/>
     <property name="sf.spec.publish.diamonds.path" value="/diamonds/builds/"/>
     <property name="sf.spec.publish.diamonds.tag" value=""/>
-	<property name="sf.spec.ats_worker.drive" value="C:\apps\ATS3\bin"/>
+    <property name="sf.spec.ats_worker.drive" value="C:\apps\ATS3\bin"/>
     
     <property name="sf.spec.tagafterbuild.enable" value="false"/>
 
--- a/common/templates/source-spec.ant.xml.ftl	Tue Feb 16 17:57:06 2010 +0000
+++ b/common/templates/source-spec.ant.xml.ftl	Tue Feb 16 17:58:52 2010 +0000
@@ -1,126 +1,165 @@
 <?xml version="1.0"?>
 <project name="SF-SOURCESPEC" default="all" xmlns:hlm="http://www.nokia.com/helium">
 
+<#assign dollar = "$"/>
+
+    <!-- Convert \s in cache location, because otherwise they disappear entirely when used in a regex replacement! -->
+    <propertyregex property="sf.spec.sourcesync.cachelocation.for.regex" input="${dollar}{sf.spec.sourcesync.cachelocation}" regexp="\\" replace="/" global="true" defaultValue="${dollar}{sf.spec.sourcesync.cachelocation}"/>
+    
 <#assign fileset = "" />
 <#assign sync_list = "" />
 <#assign bom_list  = "" />
 <#assign change_list  = "" />
-<#assign dollar = "$"/>
 <#assign count = 0 />
-<#if ("${ant['sf.spec.sourcesync.archive']}")?? && "${ant['sf.spec.sourcesync.archive']}" == "true">
-  <#assign fast_sync = true />
-<#else>
-  <#assign fast_sync = false />
-</#if>
-<#if ("${ant['sf.spec.sourcesync.bug419']}")?? && "${ant['sf.spec.sourcesync.bug419']}" == "true">
-  <#assign bug419 = true />
-<#else>
-  <#assign bug419 = false />
-</#if>
-
-    <!-- remove previous version of BOM file (if exists)  -->
-    <target name="reset-bom-sources-csv">
-        <delete file="${ant['build.drive']}/output/logs/BOM/sources.csv" quiet="true"/>
-    </target>
 
 <#list data as csv_file>
   <#list csv_file as pkg_detail>
     <target name="sf-prebuild-${count}">
-        <sequential>
-            <!-- create sf\layer dir  -->
-            <mkdir dir="${ant['build.drive']}${pkg_detail.dst}"/>
-            <delete dir="${ant['build.drive']}${pkg_detail.dst}" failonerror="false" />
-            <!-- Don't use hg archive with tags, as we can have wildcards in the tags... -->
-            <#if fast_sync && ("${pkg_detail.type}"!="tag") > 
-              <!-- Identify the version on the cache first -->
-              <exec executable="hg" dir="${pkg_detail.source}" outputproperty="sf.sourcesync.${count}.checksum">
-                  <arg value="identify"/>
-                  <arg value="-i"/>
-                  <arg value="-r"/>
-                  <arg value="${pkg_detail.pattern}"/>
-              </exec>
-              <!-- hg archive on the version we found -->
-              <exec executable="hg" dir="${pkg_detail.source}">
-                  <arg value="archive"/>
-                  <arg value="-r"/>
-                  <arg value="${dollar}{sf.sourcesync.${count}.checksum}"/>
-                  <arg value="${ant['build.drive']}${pkg_detail.dst}"/>
-              </exec>
-            <#else>
-            <exec executable="hg" dir="${ant['build.drive']}">
-                <arg value="clone"/>
-                <arg value="-U"/>
-                <arg value="${pkg_detail.source}"/>
-                <arg value="${ant['build.drive']}${pkg_detail.dst}"/>
-            </exec>
-            
-            <#if bug419 >
-              <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" outputproperty="sf.sourcesync.${count}.checksum">
-                  <arg value="identify"/>
-                  <arg value="-i"/>
-                  <arg value="-r"/>
-                  <arg value="${pkg_detail.pattern}"/>
-              </exec>
-              <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}">
-                  <arg value="update"/>
-                  <arg value="-r"/>
-                  <arg value="${dollar}{sf.sourcesync.${count}.checksum}"/>
-              </exec>            
-            <#else>
-            <hlm:scm verbose="true" scmUrl="scm:hg:${pkg_detail.source}">
-                <!--hlm:checkout basedir="${ant['build.drive']}${pkg_detail.dst}"/-->
-                <#if "${pkg_detail.type}"=="tag" >
-                <hlm:tags basedir="${ant['build.drive']}${pkg_detail.dst}" reference="hg.tags.id${dollar}{refid}"/>
-                <hlm:update basedir="${ant['build.drive']}${pkg_detail.dst}">
-                <hlm:latestTag pattern="${pkg_detail.pattern}">
-                        <hlm:tagSet refid="hg.tags.id${dollar}{refid}" />
-                </hlm:latestTag>
-                </hlm:update>
-                </#if>
-                <#if "${pkg_detail.type}"== "changeset" || "${pkg_detail.type}"=="branch">
-                <hlm:update basedir="${ant['build.drive']}${pkg_detail.dst}">
-                     <hlm:tag name="${pkg_detail.pattern}"/>
-                </hlm:update>
-                </#if>
-            </hlm:scm>
-            </#if>
+        
+        <!-- Create sf\layer dir on build dir -->
+        <mkdir dir="${ant['build.drive']}${pkg_detail.dst}"/>
+        <delete dir="${ant['build.drive']}${pkg_detail.dst}" failonerror="true" />
+        
+        <if>
+            <istrue value="${dollar}{sf.spec.sourcesync.usecache}"/>
+            <then>
+                <!-- Work out cache location from source location -->
+                <propertyregex property="sf.spec.sourcesync.cachelocation.${count}" input="${pkg_detail.source}" regexp="^http://developer.symbian.org/" casesensitive="false" replace="${dollar}{sf.spec.sourcesync.cachelocation.for.regex}/Live/"/>
+                <propertyregex property="sf.spec.sourcesync.cachelocation.${count}" input="${pkg_detail.source}" regexp="^${ant['sf.spec.sourcesync.local.development.area']}/" casesensitive="false" replace="${dollar}{sf.spec.sourcesync.cachelocation.for.regex}/LocalDev/"/>
+            </then>
+        </if>
+	
+        <if>
+            <and>
+                <isset property="sf.spec.sourcesync.cachelocation.${count}"/>
+                <available file="${dollar}{sf.spec.sourcesync.cachelocation.${count}}" type="dir"/>
+            </and>
+            <then>
+                <!-- Package in cache already -->
+                <echo message="Pull from ${pkg_detail.source} to ${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                <exec executable="hg" dir="${dollar}{sf.spec.sourcesync.cachelocation.${count}}" failonerror="false" resultproperty="sf.spec.sourcesync.cache.pull.error.code.${count}">
+                    <arg value="pull"/>
+                    <arg value="${pkg_detail.source}"/>
+                </exec>
+                <if>
+                    <equals arg1="0" arg2="${dollar}{sf.spec.sourcesync.cache.pull.error.code.${count}}"/>
+                    <then>
+                        <echo message="Clone from ${dollar}{sf.spec.sourcesync.cachelocation.${count}} to ${ant['build.drive']}${pkg_detail.dst}"/>
+                        <exec executable="hg" dir="${ant['build.drive']}/" failonerror="true">
+                            <arg value="clone"/>
+                            <arg value="-U"/>
+                            <arg value="--uncompressed"/>
+                            <arg value="${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                            <arg value="${ant['build.drive']}${pkg_detail.dst}"/>
+                        </exec>
+                    </then>
+                    <else>
+                        <!-- Uh oh - the cache is corrupted somehow -->
+                        <!-- (Try to) recover the cache repo -->
+                        <forget>
+                            <exec executable="hg" dir="${dollar}{sf.spec.sourcesync.cachelocation.${count}}" failonerror="false" resultproperty="sf.spec.sourcesync.cache.recover.error.code.${count}">
+                                <arg value="recover"/>
+                            </exec>
+                            <if>
+                                <not><equals arg1="0" arg2="${dollar}{sf.spec.sourcesync.cache.recover.error.code.${count}}"/></not>
+                                <then>
+                                    <echo message="Trashing ${dollar}{sf.spec.sourcesync.cachelocation.${count}} as broken"/>
+                                    <delete dir="${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                                </then>
+                            </if>
+                        </forget>
+                        <!-- In the meantime, by-pass it for this build -->
+                        <echo message="Clone from ${pkg_detail.source} to ${ant['build.drive']}${pkg_detail.dst}"/>
+                        <exec executable="hg" dir="${ant['build.drive']}/" failonerror="true">
+                            <arg value="clone"/>
+                            <arg value="-U"/>
+                            <arg value="${pkg_detail.source}"/>
+                            <arg value="${ant['build.drive']}${pkg_detail.dst}"/>
+                        </exec>
+                    </else>
+                </if>
+                <!-- Update to required revision -->
+                <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" failonerror="true">
+                    <arg value="update"/>
+                    <arg value="-r"/>
+                    <arg value="${pkg_detail.pattern}"/>
+                </exec>
+                <!-- Record the changeset selected, for the BOM -->
                 <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" outputproperty="sf.sourcesync.${count}.checksum">
-                <arg value="identify"/>
-                <arg value="-i"/>
-            </exec>
-          </#if>  
-        </sequential>
-    </target>
-
-    <target name="sf-bom-info-${count}">
-        <sequential>
-            <!-- record info on source code repo/rev in BOM file  -->
-            <echo message="dir ${ant['build.drive']}${pkg_detail.dst} : ${dollar}{sf.sourcesync.${count}.checksum}"/>
-            <exec executable="cmd" output="${ant['build.drive']}/output/logs/BOM/sources.csv" append="true">
-                <arg value="/c"/>
-                <arg value="echo"/>
-                <arg value="${pkg_detail.source},${pkg_detail.dst},changeset,${dollar}{sf.sourcesync.${count}.checksum},${pkg_detail.sysdef}"/>
-            </exec>
-        </sequential>
+                    <arg value="identify"/>
+                    <arg value="-i"/>
+                </exec>
+            </then>
+            <else>
+                <echo message="Clone from ${pkg_detail.source} to ${ant['build.drive']}${pkg_detail.dst}"/>
+                <exec executable="hg" dir="${ant['build.drive']}/" failonerror="true">
+                    <arg value="clone"/>
+                    <arg value="-U"/>
+                    <arg value="${pkg_detail.source}"/>
+                    <arg value="${ant['build.drive']}${pkg_detail.dst}"/>
+                </exec>
+                <!-- Update to required version -->
+                <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" failonerror="true">
+                    <arg value="update"/>
+                    <arg value="-r"/>
+                    <arg value="${pkg_detail.pattern}"/>
+                </exec>
+                <!-- Record the changeset selected, for the BOM -->
+                <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" outputproperty="sf.sourcesync.${count}.checksum">
+                    <arg value="identify"/>
+                    <arg value="-i"/>
+                </exec>
+                <if>
+                    <isset property="sf.spec.sourcesync.cachelocation.${count}"/>
+                    <then>
+                        <forget>
+                            <nice newpriority="1"/>
+                            <!-- Init cache -->
+                            <mkdir dir="${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                            <delete dir="${dollar}{sf.spec.sourcesync.cachelocation.${count}}" failonerror="true" />
+                            <echo message="Initialise cache at ${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                            <!-- Clone source to get the right default repo -->
+                            <exec executable="hg" dir="${ant['build.drive']}/" failonerror="false">
+                                <arg value="clone"/>
+                                <arg value="-r"/>
+                                <arg value="null"/>
+                                <arg value="${pkg_detail.source}"/>
+                                <arg value="${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                            </exec>
+                            <!-- Set the speed-up flag on the cache repo -->
+                            <echo file="${dollar}{sf.spec.sourcesync.cachelocation.${count}}/.hg/hgrc" append="true" message="${dollar}{line.separator}"/>
+                            <echo file="${dollar}{sf.spec.sourcesync.cachelocation.${count}}/.hg/hgrc" append="true" message="[server]${dollar}{line.separator}"/>
+                            <echo file="${dollar}{sf.spec.sourcesync.cachelocation.${count}}/.hg/hgrc" append="true" message="uncompressed=True${dollar}{line.separator}"/>
+                            <echo message="Push from ${ant['build.drive']}${pkg_detail.dst} to ${dollar}{sf.spec.sourcesync.cachelocation.${count}} in background"/>
+                            <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" failonerror="false">
+                                <arg value="push"/>
+                                <arg value="-f"/>
+                                <arg value="${dollar}{sf.spec.sourcesync.cachelocation.${count}}"/>
+                            </exec>
+                        </forget>
+                    </then>
+                </if>
+            </else>
+        </if>
     </target>
-
+    
+    <target name="sf-bom-info-${count}">
+        <!-- record info on source code repo/rev in BOM file  -->
+        <echo file="${ant['build.drive']}/output/logs/BOM/sources.csv" append="true" message="${pkg_detail.source},${pkg_detail.dst},changeset,${dollar}{sf.sourcesync.${count}.checksum},${pkg_detail.sysdef}${dollar}{line.separator}"/>
+    </target>
+    
     <target name="sf-bom-change-info-${count}">
-        <sequential>
-      		<if><not><isset property="sf.sourcesync.${count}.checksum"/></not>
-      			<then>
-              <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" outputproperty="sf.sourcesync.${count}.checksum">
-                <arg value="identify"/>
-                <arg value="-i"/>
-              </exec>
-      			</then>
-      		</if>
-      		  <echo message="Writing BOM changes since ${dollar}{sf.previous.pdk.tag} for ${pkg_detail.dst}" />
-      		  <echo file="${ant['build.drive']}/output/logs/BOM/changes.txt" append="true" message="${dollar}{line.separator}${pkg_detail.source}${dollar}{line.separator}${pkg_detail.dst}${dollar}{line.separator}${dollar}{line.separator}" />
-            <#if fast_sync > 
-              <exec executable="hg" dir="${pkg_detail.source}" output="${ant['build.drive']}/output/logs/BOM/changes.txt" append="true">
-            <#else>      		  
-              <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" output="${ant['build.drive']}/output/logs/BOM/changes.txt" append="true">
-            </#if>
+        <if><not><isset property="sf.sourcesync.${count}.checksum"/></not>
+            <then>
+                <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" outputproperty="sf.sourcesync.${count}.checksum">
+                    <arg value="identify"/>
+                    <arg value="-i"/>
+                </exec>
+            </then>
+        </if>
+        <echo message="Writing BOM changes since ${dollar}{sf.previous.pdk.tag} for ${pkg_detail.dst}" />
+        <echo file="${ant['build.drive']}/output/logs/BOM/changes.txt" append="true" message="${dollar}{line.separator}${pkg_detail.source}${dollar}{line.separator}${pkg_detail.dst}${dollar}{line.separator}${dollar}{line.separator}" />
+            <exec executable="hg" dir="${ant['build.drive']}${pkg_detail.dst}" output="${ant['build.drive']}/output/logs/BOM/changes.txt" append="true">
                 <arg value="log"/>
                 <arg value="-r"/>
                 <arg value="${dollar}{sf.sourcesync.${count}.checksum}:${dollar}{sf.previous.pdk.tag}"/>
@@ -129,44 +168,35 @@
                 <arg value="${pkg_detail.pattern}"/>
                 </#if>    
             </exec>
-          </sequential>
     </target>
-
+    
     <#assign fileset = "${fileset}" + "<fileset dir=\"${ant['build.drive']}${pkg_detail.dst}\" includes=\"${pkg_detail.sysdef}\"/>\r\n        " />       
-    <#assign sync_list = "${sync_list}" + "<runtarget target=\"sf-prebuild-${count}\"/>\r\n    "/>
-    <#assign bom_list = "${bom_list}" + "<runtarget target=\"sf-bom-info-${count}\"/>\r\n  "/>
-    <#assign change_list = "${change_list}" + "<runtarget target=\"sf-bom-change-info-${count}\"/>\r\n  "/>
+    <#assign sync_list = "${sync_list}" + "<runtarget target=\"sf-prebuild-${count}\"/>\r\n            "/>
+    <#assign bom_list = "${bom_list}" + "<runtarget target=\"sf-bom-info-${count}\"/>\r\n        "/>
+    <#assign change_list = "${change_list}" + "<runtarget target=\"sf-bom-change-info-${count}\"/>\r\n        "/>
     <#assign count = count + 1 />
   </#list>
 </#list>
-
+    
     <path id="system.definition.files">
         <fileset dir="${dollar}{sf.common.config.dir}/sysdefs" includes="*.sysdef.xml"/>
         ${fileset}
     </path>
-
-<target name="all" depends="reset-bom-sources-csv">
-  <parallel threadCount="${ant['env.NUMBER_OF_PROCESSORS']}">
-    ${sync_list}
-  </parallel>
-
-  <echo message="Adding BOM header"/>
-      <mkdir dir="${ant['build.drive']}/output/logs/BOM/"/>
-      <exec executable="cmd" output="${ant['build.drive']}/output/logs/BOM/sources.csv" append="true">
-      <arg value="/c"/>
-      <arg value="echo"/>
-      <arg value="source,dst,type,pattern,sysdef"/>
-  </exec>
-  
-  ${bom_list}
-  <runtarget target="sf-bom-change-info" />
-</target>
-
-<target name="sf-bom-change-info">
-  <mkdir dir="${ant['build.drive']}/output/logs/BOM/"/>
-  <delete file="${ant['build.drive']}/output/logs/BOM/changes.txt" quiet="true"/>
-
-  ${change_list}
-</target>
-
+    
+    <target name="all">
+        <parallel threadsPerProcessor="1" failonany="true">
+            ${sync_list}
+        </parallel>
+        
+        <echo message="Adding BOM header"/>
+        <mkdir dir="${ant['build.drive']}/output/logs/BOM/"/>
+        <echo file="${ant['build.drive']}/output/logs/BOM/sources.csv" message="source,dst,type,pattern,sysdef${dollar}{line.separator}"/>
+        
+        ${bom_list}
+        
+        <delete file="${ant['build.drive']}/output/logs/BOM/changes.txt" quiet="true"/>
+        
+        ${change_list}
+    </target>
+    
 </project>
--- a/common/tools/cachefiles.pl	Tue Feb 16 17:57:06 2010 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,128 +0,0 @@
-#!/usr/bin/perl
-
-
-use strict;
-
-#my $cache = "d:/HG_cache/";
-#my $master = "//v800008/Builds01/";
-my $cache = shift @ARGV;
-my $csv = shift @ARGV;
-my $generated = shift @ARGV;
-my @recover;
-my @nospace;
-my $exitcode = 0;
-
-if(defined $cache && defined $generated && defined $csv)
-{ 
-  print "Cache:$cache\nIn:$csv\nOut:$generated\n";
-  
-  # Format the cache directory path
-  if ( $cache !~ /(.*)[\\\/]$/ )
-  {
-	$cache .= "/";
-  }
-  
-  open(IN, "<$csv") or die "Couldn't open $csv for reading";
-  open(OUT,">$generated") or die "Couldn't open $generated for writing";
-  my $header = <IN>;
-  print OUT $header;
-  while( my $line = <IN>)
-  {
-    my @args = split(',',$line);
-    my $repo = shift @args;
-	my $master = "";
-	if ( $repo =~ m/^(.*\/)(.*\/(oss|rnd|sfl)\/.*\/)$/i )
-	{
-		$master = $1;
-		$repo = $2;
-	}
-    if(-d $master.$repo.".hg")
-    {
-  #    print "Found:\t".$master.$repo.".hg\n";
-      my $cmd;
-      if(-d $cache.$repo.".hg") # update
-      {
-        $cmd = "hg pull -R $cache$repo $master$repo";
-      }
-      else #clone
-      {
-        #taken from the normal clone script...
-        my @dirs = split ('\/', $cache.$repo);
-        my $destdir = pop @dirs;
-        my $path = "";    
-        foreach my $dir (@dirs)
-          {
-          $path = ($path eq "") ? $dir : "$path/$dir";
-          if (!-d $path)
-            {
-            mkdir $path;
-            }
-          }
-            
-        $cmd = "hg clone -U $master$repo $cache$repo";
-      }  
-      if(cache($cmd))
-        {
-          print OUT $cache.$repo.",".join(',', @args);
-        }
-      else
-        {
-           print OUT $master.$repo.",".join(',', @args);
-		   $exitcode = 1;
-        }
-    }
-    else
-    {
-      print "Error: cannot find ".$master.$repo.".hg\n";
-	  $exitcode = 1;
-    }
-  }
-  
-  close OUT;
-  close IN;
-}
-else
-{
-  print "Usage: <cache_path> <source_csv> <generated_csv>";
-  $exitcode = 1;
-}
-
-foreach my $line (@recover)
-{
-  print "WARNING: HG Recover: $line\n";
-}
-foreach my $line (@nospace)
-{
-  print "WARNING: No Space: $line\n";
-}
-
-exit $exitcode;
-
-sub cache($cmd)
-{
-  my $cmd = shift;
-  print "$cmd\n";
-  
-  open(CMD, "$cmd 2>&1 |") or die "Couldn't execute $cmd";
-  while(my $line = <CMD>)
-  {
-#    print $line;
-    # parse the output for failures. On fail return 0;
-    if($line =~ m/abort/i)
-    {
-      print $line;
-      if($line =~ m/hg\s+recover/i)
-      {
-        push(@recover, $cmd);
-      }
-      elsif($line =~ m/No\s+space/i)
-      {
-        push(@nospace, $cmd);
-      }
-      close CMD;
-      return 0;
-    }    
-  }
-  close CMD;
-  return 1;
-}
\ No newline at end of file
--- a/common/tools/envinfo.pl	Tue Feb 16 17:57:06 2010 +0000
+++ b/common/tools/envinfo.pl	Tue Feb 16 17:58:52 2010 +0000
@@ -79,7 +79,7 @@
 # Metrowerk Compiler
 my $mwcc_ver = 'N.A.';
 my $mwcc_out = `mwccsym2 -version`;
-$mwcc_ver = $1 if ($mwcc_out =~ /^Version (.*) \(Build/m);
+$mwcc_ver = $1 if ($mwcc_out =~ /^Version (.*)/m);
 push @environment_info, {name=>'mwccsym2', version=>$mwcc_ver};
 
 # RVCT 2.2
--- a/common/tools/findPhysicalDrive.pl	Tue Feb 16 17:57:06 2010 +0000
+++ b/common/tools/findPhysicalDrive.pl	Tue Feb 16 17:58:52 2010 +0000
@@ -12,23 +12,64 @@
 # Contributors:
 #
 # Description:
-# Find and output the drive letter mapped to the physical volume with the
-# largest amount of free space
-# 
+# Search physical drives to find either:
+#  * The one with the largest amount of free space
+#  * The one with the greatest capacity
+#  * The list of all such drives
 
 use strict;
 
+use Getopt::Long;
+
+# Read option arguments
+my $option;
+my $ok = GetOptions(
+	'capacity' => \$option->{capacity},
+	'space' => \$option->{space},
+	'all' => \$option->{all},
+	'help|?' => \$option->{help},
+);
+
+if (defined $option->{help})
+{
+	usage();
+	exit;
+}
+
+if (!$ok || @ARGV || 1 != scalar grep { defined $option->{$_} } keys %$option)
+{
+	warn "Exactly one option must be supplied to indicate the required output\n$ok\n@ARGV\n";
+	usage();
+	exit(1);
+}
+
 # Use Windows command to list physical volumes on the machine
 # (No substed drives, or mapped network drives)
-my @drives = map {chomp;$_} `echo list volume | diskpart`;
+my @details = map {chomp;$_} `echo list volume | diskpart`;
 
-my %drives;
-for my $driveLine (@drives)
+my @drives;
+my %space;
+my %capacity;
+for my $driveLine (@details)
 {
 	# If this line of output is actually about a healthy HD volume...
-	if ($driveLine =~ m{^\s+Volume \d+\s+([A-Z]).*?(Partition|RAID-5)\s+\d+ [A-Z]+\s+Healthy} )
+	if ($driveLine =~ m{^\s+Volume \d+\s+([A-Z]).*?(Partition|RAID-5)\s+(\d+) ([A-Z]+)\s+Healthy} )
 	{
-		my $letter = $1;
+		my ($letter, $capacityValue, $capacityUnit) = ($1, $3, $4);
+		
+		my %multiplier = (
+			MB => 1000000,
+			GB => 1000000000,
+			TB => 1000000000000,
+		);
+
+		if (not exists $multiplier{$capacityUnit})
+		{
+			warn "Don't know how to interpret $capacityValue $capacityUnit\n";
+			next;
+		}
+		$capacityValue *= $multiplier{$capacityUnit};
+
 		# Ignore the system drive
 		next if ($driveLine =~ m{System\s*$});
 
@@ -38,14 +79,45 @@
 		my $bytesFree = $bytesFree[-1];
 
 		# Record info for this volume
-		$drives{$letter} = $bytesFree;
+		push @drives, $letter;
+		$space{$bytesFree} = $letter;
+		$capacity{$capacityValue} = $letter;
 	}
 }
 
-die "Unable to find any suitable drives at all\n" unless %drives;
+die "Unable to find any suitable drives at all\n" unless %space;
 
-# Switch keys and values
-%drives = reverse %drives;
-# Sort by space to find the volume with the largest amount of space and print out the corresponding letter
-print "$drives{(reverse sort keys %drives)[0]}:\n";
+if ($option->{all})
+{
+	print join ",", map { "$_:" } @drives;
+	print "\n";
+	exit;
+}
+elsif ($option->{capacity})
+{
+	# Sort by capacity to find the largest volume and print out the corresponding letter
+	print "$capacity{(reverse sort keys %capacity)[0]}:\n";
+}
+elsif ($option->{space})
+{
+	# Sort by space to find the volume with the largest amount of space and print out the corresponding letter
+	print "$space{(reverse sort keys %space)[0]}:\n";
+}
 
+exit;
+
+sub usage
+{
+	$0 =~ m{[\\/]([^\\/]*)$};
+	print <<EOT;
+
+Usage: $1 -all | -capacity | -space | -help
+
+  -all          Outputs all physical drives in the system (separated by ',').
+  -capacity     Outputs physical drive of greatest capacity in the system.
+  -space        Outputs physical drive with greatest free space in the system.
+  -help         Outputs this help message.
+
+EOT
+}
+
--- a/sf-platform/platform_props.ant.xml	Tue Feb 16 17:57:06 2010 +0000
+++ b/sf-platform/platform_props.ant.xml	Tue Feb 16 17:58:52 2010 +0000
@@ -15,6 +15,8 @@
   <property name="sf.spec.toolsbaseline.select"         value="location" />
   <property name="sf.spec.toolsbaseline.location"       value="\\v800008\Builds01\PDT\PDT_b50" /> <!-- for TEF etc -->
 
+  <property name="sf.spec.syncsource.usecache"     value="true"/>
+
   <!-- generate dir list(s) of epoc32 tree -->
   <property name="sf.spec.dirlist.enable"  value="true"/>