<?xml version="1.0" encoding="UTF-8"?>
<project name="SF-COMMON-CONFIG" xmlns:hlm="http://www.nokia.com/helium">
<property environment="env"/> <!-- make environment variables available via env -->
<dirname property="sf.common.config.dir" file="${ant.file.SF-COMMON-CONFIG}"/>
<!-- If we're told that we've been invoked by Hudson, then use the executor number to determine the drive to use -->
<if>
<isset property="sf.hudson.executor.number"/>
<then>
<!-- This property could be set differently according to the varying set-ups of build parks -->
<property name="sf.hudson.executor.drive.letter.mapping" value="zyxwvutsrqponmlk"/>
<!-- Pick the Nth letter from the string using a regex -->
<!-- (Would be nice if ant gave us a more obvious way to look up an item from an array...) -->
<!-- Default string is arranged to use z: for executor 0, y: for executor 1, etc, etc. At present, SF build machines have a maximum of 4 executors -->
<propertyregex property="sf.spec.job.drive" input="${sf.hudson.executor.drive.letter.mapping}" regexp="\w{${sf.hudson.executor.number}}(\w)" select="\1:"/>
</then>
</if>
<!-- If we've not has a physical drive specified, then we'd better be able to work one out... -->
<if>
<and>
<not><isset property="sf.spec.job.rootdir"/></not>
<not><isset property="sf.spec.job.root.drive"/></not>
</and>
<then>
<exec executable="perl" outputproperty="sf.spec.job.root.drive" logerror="true" failonerror="true">
<arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
<arg value="-space"/>
</exec>
</then>
</if>
<!-- Import common properties -->
<import file="${sf.common.config.dir}/common_props.ant.xml" />
<property name="sf.spec.job.rootdir" value="${sf.spec.job.root.drive}/${sf.spec.job.root.path}"/>
<property name="sf.spec.sourcesync.sourcespecdir" value="${sf.project.location}"/>
<!-- setup Helium internal properties from their equivalent in the project spec -->
<property name="build.name" value="${sf.spec.job.name}"/>
<property name="core.build.version" value="${sf.spec.job.codeline}"/>
<property name="build.number" value="${sf.spec.job.number}"/>
<property name="build.drive" value="${sf.spec.job.drive}"/>
<property name="email.from" value="${sf.spec.email.from}"/>
<property name="local.free.space" value="${sf.spec.job.freespace}"/>
<property name="network.free.space" value="${sf.spec.publish.network.freespace}"/>
<property name="network.drive" value="${sf.spec.publish.networkdrive}"/>
<property name="prep.root.dir" value="${sf.spec.job.rootdir}"/>
<property name="publish" value="${sf.spec.publish.enable}"/>
<property name="publish.root.dir" value="${sf.spec.publish.rootdir}"/>
<property name="diamonds.host" value="${sf.spec.publish.diamonds.server}"/>
<property name="diamonds.port" value="${sf.spec.publish.diamonds.port}"/>
<property name="diamonds.path" value="${sf.spec.publish.diamonds.path}"/>
<property name="build.family" value="${sf.spec.job.name}"/>
<property name="build.system" value="${sf.spec.build.system}"/>
<property name="base_release.path" value="${sf.spec.baseline.location}"/>
<property name="base_release.getenv_options" value="${sf.spec.baseline.getenv_options}"/>
<property name="ccm.user.password" value="set_to_any_value_to_skip_scm_checks__for_helium_5"/>
<property name="ats.drive" value="${sf.spec.ats_worker.drive}"/>
<property name="skip.sbs.layer.generation" value="true"/> <!-- helium 7: needs to be predefined before hlm import -->
<!-- override helium 7 defaults to operate like helium 5.
*.log.dir props dont exist yet so need to be set now, otherwise they
become immutable after helium.ant.xml import -->
<property name="prep.log.dir" value="${build.drive}/output/logs"/>
<property name="compile.log.dir" value="${build.drive}/output/logs"/>
<property name="post.log.dir" value="${build.drive}/output/logs"/>
<property name="archive.log.dir" value="${build.drive}/output/logs"/>
<property name="signals.log.dir" value="${build.drive}/output/logs"/>
<if>
<isset property="sf.spec.sbs.numberofjobs"/>
<then>
<property name="number.of.threads" value="${sf.spec.sbs.numberofjobs}"/>
</then>
</if>
<property name="sf.diamonds.tags" value="_${sf.spec.job.codeline},${sf.spec.publish.diamonds.tag}"/>
<!-- SF-specific Helium properties not meant to be exposed in the project spec -->
<property name="diamonds.listener.configuration.file" location="${sf.common.config.dir}/diamonds/config.xml.ftl"/>
<!-- Import all core HELIUM targets -->
<import file="${helium.dir}/helium.ant.xml" />
<!-- Import common references -->
<import file="${sf.common.config.dir}/common_refs.ant.xml" />
<!-- Import functionality distributed into other file(s) -->
<import file="${sf.common.config.dir}/build.postbuild.xml"/>
<!-- Import test functionality distributed into other file(s) -->
<import file="${sf.common.config.dir}/build.test.xml"/>
<!-- helium 7+ compatability. needs to be defined -->
<if>
<not><or>
<equals arg1="${helium.version}" arg2="5.0"/>
<equals arg1="${last.major.helium.version}" arg2="5.0"/>
</or></not>
<then>
<property name="sf.using.newer.than.helium5" value="true"/>
<property name="schema.new" value="true"/>
<property name="skip.sbs.parser.exception" value="true"/>
<!-- Never fail on sbs compile errors -->
<hlm:signalInput id="compileSignalInput" failbuild="never">
<hlm:notifierListRef refid="defaultFailNotifier" />
</hlm:signalInput>
<hlm:signalInput id="raptorErrorSignalInput" failbuild="never">
<hlm:notifierListRef refid="defaultFailNotifier" />
</hlm:signalInput>
</then>
</if>
<!--
- END OF PROPERTY DEFINITIONS, TARGETS START HERE
-->
<target name="sf-prep" depends="sf-prep-announce,prep-drive,init-build-area,create-bom,log-build-env">
<!-- Test for the disk space we would like for this build -->
<fail>
<condition>
<not>
<or>
<isset property="sf.suppress.drive.space.check"/>
<hasfreespace partition="${build.drive}" needed="${sf.drive.space.needed}"/>
</or>
</not>
</condition>
Insufficient space to run this build to completion. (Was looking for ${sf.drive.space.needed}.) To suppress this check, set the property sf.suppress.drive.space.check to any value.
</fail>
<tempfile property="prep.dynamic.config" suffix="ant.xml" destdir="${temp.build.dir}"/>
<runtarget target="sf-check-env"/>
<stopwatch name="sf-prep" action="elapsed"/>
</target>
<target name="sf-prep-announce">
<stopwatch name="sf-prep"/>
<echo>[SF-PREP]</echo>
</target>
<target name="sf-check-env">
<!-- Dario's environment dumper -->
<exec executable="perl" dir="${build.drive}">
<arg value="${sf.common.config.dir}/tools/envinfo.pl"/>
</exec>
<!-- Conditional location of BuildEnv.xml. Project always preferred to default -->
<if><available file="${sf.project.location}/BuildEnv.xml"/>
<then>
<property name="sf.buildenv.location" value="${sf.project.location}"/>
</then>
<else>
<property name="sf.buildenv.location" value="${sf.common.config.dir}/tools"/>
</else>
</if>
<echo message="Build Env against ${sf.buildenv.location}/BuildEnv.xml"/>
<exec executable="perl" dir="${sf.common.config.dir}/tools" resultproperty="sf.checkenv.exit">
<arg value="CheckBuildEnv.pl"/>
<arg value="--xml=${sf.buildenv.location}/BuildEnv.xml"/>
<arg value="--dbg=${build.log.dir}/BuildEnvironmentCheck.log"/>
<arg value="--sbs='${sf.spec.sbs.config}'"/>
<arg value="--tools='${sf.spec.sbs.tools.config}'"/>
<arg value="Common"/>
</exec>
<!-- antcall target="sf-log-to-brag">
<param name="sf.brag.phase" value="Prebuild"/>
<param name="sf.brag.step" value="Build Environment Check"/>
<param name="sf.brag.log" value="${build.log.dir}/BuildEnvironmentCheck.log"/>
<param name="sf.brag.id" value="BuildEnvironmentCheck"/>
</antcall -->
<fail>
<condition>
<not>
<or>
<equals arg1="${sf.checkenv.exit}" arg2="0"/>
<isset property="sf.suppress.buildenv.check"/>
</or>
</not>
</condition>
Build Environment Check failed (${sf.checkenv.exit}). To stop this failure aborting the build set property sf.suppress.buildenv.check to any value.
</fail>
</target>
<target name="sf-summary" depends="">
<echo>[SF-SUMMARY]</echo>
<echo message="Generating build summary"/>
<exec executable="perl" dir="${sf.common.config.dir}/tools/summary" failonerror="true">
<arg value="brag_script.pl"/>
<arg value="--buildid=${sf.spec.job.name}_${sf.spec.job.codeline}.${sf.spec.job.number}"/>
<arg value="--basedir=${sf.spec.job.rootdir}"/>
</exec>
</target>
<target name="sf-build-all" depends="sf-prebuild-noprep,sf-build-noprep,sf-postbuild">
<echo>[SF-BUILD-ALL]</echo>
</target>
<target name="sf-build-and-pack" depends="sf-build-noprep,sf-postbuild">
<echo>[SF-BUILD-AND-PACK]</echo>
</target>
<target name="sf-build" depends="sf-prebuild,sf-build-noprep,sf-postbuild">
<echo>[SF-BUILD]</echo>
</target>
<target name="sf-scrub-build" description="Cleans up everything left behind after a build. Danger!">
<if>
<available file="${sf.spec.job.drive}/"/>
<then>
<!-- Un-subst the drive -->
<hlm:unsubst drive="${sf.spec.job.drive}"/>
</then>
</if>
<!-- Delete build directory -->
<delete dir="${prep.build.dir}"/>
</target>
<!-- Note: Not used from Helium 7 onwards -->
<target name="generate-layers">
<echo message="canno-file:${canonical.sysdef.file}"/>
<echo message="raptor-filters:raptor_${sysdef.configuration}"/>
<!-- All we want is a sysdef with the config name appended, so just copy it -->
<echo message="INFO: Skip GenXML and copy sysdef to sysdef+config name"/>
<copy file="${canonical.sysdef.file}" tofile="${build.drive}/output/build/canonical_system_definition_${sysdef.configuration}_temp.xml"/>
<copy file="${build.drive}/output/build/canonical_system_definition_${sysdef.configuration}_temp.xml" tofile="${build.drive}/output/build/canonical_system_definition_${sysdef.configuration}.xml">
<filterchain>
<linecontainsregexp negate="true">
<regexp pattern="^\s*$"/>
</linecontainsregexp>
</filterchain>
</copy>
<copy file="${build.drive}/output/build/canonical_system_definition_${sysdef.configuration}.xml" todir="${build.log.dir}/BOM/"/>
<delete file="${build.drive}/output/build/canonical_system_definition_${sysdef.configuration}_temp.xml"/>
</target>
<target name="sf-prebuild" depends="sf-prep,sf-prebuild-noprep"/>
<target name="sf-prebuild-noprep" depends="sf-prebuild-announce,sf-diamonds-connect,sf-diamonds-envinfo,sf-diamonds-tag-build,sf-get-utils,sf-getenvs,sf-syncsource,sf-diamondize-bom">
<stopwatch name="sf-prebuild" action="elapsed"/>
</target>
<target name="sf-prebuild-announce">
<stopwatch name="sf-prebuild"/>
<echo>[SF-PREBUILD]</echo>
</target>
<target name="sf-diamonds-connect">
<if>
<istrue value="${sf.spec.publish.enable}"/>
<then>
<echo>[SF-DIAMONDS-CONNECT]</echo>
<runtarget target="diamonds"/>
<echo message="Save Diamonds build id value (${diamonds.build.id}) in a file"/>
<echo message="diamonds.build.id=${diamonds.build.id}" file="${build.log.dir}/diamonds_build_id.properties"/>
</then>
</if>
</target>
<target name="sf-diamonds-envinfo">
<if>
<istrue value="${sf.spec.publish.enable}"/>
<then>
<echo>[SF-DIAMONDS-ENVINFO]</echo>
<echo message="Send ${build.log.dir}/diamonds_envinfo.xml to Diamonds"/>
<exec executable="python">
<arg value="${sf.common.config.dir}/tools/brag/send_xml_to_diamonds.py"/>
<arg value="-u"/>
<arg value="http://${diamonds.host}${diamonds.build.id}"/>
<arg value="-f"/>
<arg value="${build.log.dir}/diamonds_envinfo.xml"/>
</exec>
</then>
</if>
</target>
<target name="sf-diamonds-tag-build">
<if>
<istrue value="${sf.spec.publish.enable}"/>
<then>
<echo>[SF-DIAMONDS-TAG-BUILD]</echo>
<echo message="Generate ${build.drive}/output/logs/sf-diamonds-tag-build.xml with Diamonds tags for this build"/>
<exec executable="perl" dir="${sf.common.config.dir}/tools" failonerror="true">
<arg value="${sf.common.config.dir}/tools/generate_diamonds_tags_xml.pl"/>
<arg value="--tags=${sf.diamonds.tags}"/>
<arg value="--output=${build.drive}/output/logs/sf-diamonds-tag-build.xml"/>
</exec>
</then>
</if>
</target>
<target name="sf-record-proj-conf-bom">
<!-- create BOM dir -->
<mkdir dir="${build.drive}/output/logs/BOM"/>
<!-- record config and project information in BOM files -->
<exec executable="hg" dir="${sf.config.dir}" outputproperty="sf.job.bom.config.repo">
<arg value="showconfig"/>
<arg value="paths.default"/>
</exec>
<propertyregex input="${sf.job.bom.config.repo}" regexp="\\" replace="/" global="true" property="sf.job.bom.config.repo" override="true"/>
<exec executable="hg" dir="${sf.config.dir}" outputproperty="sf.job.bom.config.checksum">
<arg value="identify"/>
<arg value="-i"/>
</exec>
<echo message="${sf.job.bom.config.repo},sf-config,${sf.job.bom.config.checksum}" file="${build.drive}/output/logs/BOM/config.csv"/>
<exec executable="hg" dir="${sf.project.location}" outputproperty="sf.job.bom.project.repo">
<arg value="showconfig"/>
<arg value="paths.default"/>
</exec>
<propertyregex input="${sf.job.bom.project.repo}" regexp="\\" replace="/" global="true" property="sf.job.bom.project.repo" override="true"/>
<exec executable="hg" dir="${sf.project.location}" outputproperty="sf.job.bom.project.checksum">
<arg value="identify"/>
<arg value="-i"/>
</exec>
<echo message="${sf.job.bom.project.repo},build/config,${sf.job.bom.project.checksum}" file="${build.drive}/output/logs/BOM/project.csv"/>
</target>
<target name="sf-get-utils">
<stopwatch name="sf-get-utils"/>
<echo message="Cloning utilities repository to ${build.drive}/utilities"/>
<exec executable="hg">
<arg value="clone"/>
<arg value="-rdefault"/>
<arg value="http://developer.symbian.org/oss/MCL/utilities"/>
<arg value="${build.drive}/utilities"/>
</exec>
<stopwatch name="sf-get-utils" action="elapsed"/>
</target>
<target name="sf-getenvs">
<stopwatch name="sf-getenvs"/>
<!-- create BOM dir -->
<mkdir dir="${build.drive}/output/logs/BOM"/>
<!-- tools baseline must come before epoc baseline to allow clean listing -->
<touch file="${build.drive}/output/logs/BOM/toolsbaseline.txt"/>
<if>
<istrue value="${sf.spec.toolsbaseline.enable}" />
<then>
<stopwatch name="sf-getenvs (tools)"/>
<!-- record toolsbaseline information in BOM file -->
<echo message="${sf.spec.toolsbaseline.location}" file="${build.drive}/output/logs/BOM/toolsbaseline.txt"/>
<!-- wrapper around preparation-getenv but with diff params -->
<runtarget target="sf-getenv-tools"/>
<echo message="INFO Getting tools environment listing"/>
<exec executable="perl" dir="${build.log.dir}/" failonerror="true" output="${build.log.dir}/listdir_${build.id}_toolsbaseline.log">
<arg value="${sf.common.config.dir}/tools/listdir.pl"/>
<arg value="${build.drive}/epoc32"/>
</exec>
<stopwatch name="sf-getenvs (tools)" action="elapsed"/>
</then>
</if>
<touch file="${build.drive}/output/logs/BOM/baseline.txt"/>
<if>
<istrue value="${sf.spec.baseline.enable}" />
<then>
<stopwatch name="sf-getenvs (baseline)"/>
<!-- record baseline information in BOM file -->
<echo message="${sf.spec.baseline.location}" file="${build.drive}/output/logs/BOM/baseline.txt"/>
<!-- run internal target preparation-getenv -->
<runtarget target="preparation-getenv"/>
<stopwatch name="sf-getenvs (baseline)" action="elapsed"/>
</then>
</if>
<stopwatch name="sf-getenvs" action="elapsed"/>
</target>
<target name="sf-getenv-tools">
<antcall target="preparation-getenv" inheritAll="false">
<param name="base_release.path" value="${sf.spec.toolsbaseline.location}"/>
<param name="base_release.getenv_options" value="${sf.spec.toolsbaseline.getenv_options}"/>
</antcall>
</target>
<target name="sf-syncsource">
<stopwatch name="sf-syncsource"/>
<touch file="${build.log.dir}/BOM/sources.csv"/>
<runtarget target="sf-get-source"/>
<if>
<istrue value="${sf.spec.sourcesync.enable}" />
<then>
<runtarget target="sf-package-source"/>
<runtarget target="sf-unpack-rnd"/>
</then>
</if>
<stopwatch name="sf-syncsource" action="elapsed"/>
</target>
<target name="sf-get-source" depends="sf-generate-source-spec">
<stopwatch name="sf-get-source"/>
<ant antfile="${temp.build.dir}/source-spec.ant.xml" />
<stopwatch name="sf-get-source" action="elapsed"/>
</target>
<target name="sf-generate-source-spec">
<!-- If we've not had a cache specified, but we should use one, then work one out... -->
<if>
<and>
<istrue value="${sf.spec.sourcesync.usecache}"/>
<not><isset property="sf.spec.sourcesync.cachelocation"/></not>
</and>
<then>
<!-- Iterate through drives to look for an existing cache -->
<exec executable="perl" outputproperty="sf.spec.sourcesync.all.drives" logerror="true" failonerror="true">
<arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
<arg value="-all"/>
</exec>
<for list="${sf.spec.all.drives}" param="physical.drive">
<sequential>
<available property="sf.spec.sourcesync.cachelocation" value="@{physical.drive}/${sf.spec.sourcesync.cache.path}" file="@{physical.drive}/${sf.spec.sourcesync.cache.path}" type="dir"/>
</sequential>
</for>
<if>
<not><isset property="sf.spec.sourcesync.cachelocation"/></not>
<then>
<!-- No existing cache - locate the preferred drive for creating one -->
<exec executable="perl" outputproperty="sf.spec.sourcesync.largest.drive" logerror="true" failonerror="true">
<arg value="${sf.common.config.dir}/tools/findPhysicalDrive.pl"/>
<arg value="-capacity"/>
</exec>
<property name="sf.spec.sourcesync.cachelocation" value="${sf.spec.sourcesync.largest.drive}/${sf.spec.sourcesync.cache.path}"/>
<mkdir dir="${sf.spec.sourcesync.cachelocation}"/>
</then>
</if>
</then>
</if>
<!-- TODO: 1. Same file name souce-spec.ant.xml is used for all packages
for multiple package builds, this needs to be linked with package name. -->
<!-- TODO: 2. Change fmpp data to be a full property rather than relative path -->
<delete file="${temp.build.dir}/source-spec.ant.xml"/>
<property name="sf.test.csv.file" value="nul:"/>
<if>
<istrue value="${sf.spec.sourcesync.enable}" />
<then>
<property name="sf.source.csv.file" value="${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}"/>
</then>
</if>
<property name="sf.source.csv.file" value="nul:"/>
<fmpp sourceFile="${sf.common.config.dir}/templates/source-spec.ant.xml.ftl"
outputFile="${temp.build.dir}/source-spec.ant.xml">
<data expandProperties="yes">
ant: antProperties()
data: [
csv(${sf.test.csv.file}, {separator:','})
csv(${sf.source.csv.file}, {separator:','})
]
</data>
</fmpp>
</target>
<!-- generate dir list using passed location and name
if a baseline list is available then generate deltas too -->
<target name="sf-list-dir">
<property name="sf.currentlist.name" value="${sf.list.name}"/>
<property name="sf.dir.location" value="${build.drive}/epoc32"/>
<property name="sf.dir.exclude" value="${build.drive}/epoc32/build"/>
<if>
<istrue value="${sf.spec.dirlist.enable}"/>
<then>
<echo message="Dirlist name: ${sf.currentlist.name} requested for ${sf.dir.location} excluding ${sf.dir.exclude}"/>
<exec executable="python" dir="${build.log.dir}/" failonerror="true" output="${build.log.dir}/listdir_${build.id}_${sf.currentlist.name}.log">
<arg value="${sf.common.config.dir}/tools/listdir.py"/>
<arg value="${sf.dir.location}"/>
<arg value="${sf.dir.exclude}"/>
</exec>
</then>
</if>
</target>
<target name="sf-delta-dir">
<property name="sf.currentlist_a.name" value="${sf.list_a.name}"/>
<property name="sf.currentlist_b.name" value="${sf.list_b.name}"/>
<property name="sf.dir.location" value="${build.drive}/epoc32"/>
<if>
<istrue value="${sf.spec.dirdelta.enable}"/>
<then>
<echo message="Delta requested for ${sf.currentlist_a.name} vs ${sf.currentlist_b.name} "/>
<exec executable="perl" dir="${build.log.dir}/" failonerror="true" output="${build.log.dir}/listdir_${build.id}_${sf.currentlist_b.name}_delta.log">
<arg value="${sf.common.config.dir}/tools/difflist.pl"/>
<arg value="${build.log.dir}/listdir_${build.id}_${sf.currentlist_a.name}.log"/>
<arg value="${build.log.dir}/listdir_${build.id}_${sf.currentlist_b.name}.log"/>
</exec>
</then>
</if>
</target>
<target name="sf-intersect-dir">
<property name="sf.currentlist_a.name" value="${sf.list_a.name}"/>
<property name="sf.currentlist_b.name" value="${sf.list_b.name}"/>
<property name="sf.dir.location" value="${build.drive}/epoc32"/>
<if>
<istrue value="${sf.spec.dirdelta.enable}"/>
<then>
<echo message="Inersection requested for ${sf.currentlist_a.name} vs ${sf.currentlist_b.name} "/>
<exec executable="perl" dir="${build.log.dir}/" failonerror="true" output="${build.log.dir}/listdir_${build.id}_${sf.currentlist_b.name}_intersect.log">
<arg value="${sf.common.config.dir}/tools/difflist.pl"/>
<arg value="${build.log.dir}/listdir_${build.id}_${sf.currentlist_a.name}.log"/>
<arg value="${build.log.dir}/listdir_${build.id}_${sf.currentlist_b.name}.log"/>
<arg value="-I"/> <!-- run difflist in intersect mode -->
</exec>
</then>
</if>
</target>
<!-- This target is overridden in platform builds, but used directly in package builds -->
<target name="sf-build-noprep" depends="sf-truclean,sf-model-from-project,sf-compile,sf-postbuild">
<echo>[SF-BUILD-NOPREP]</echo>
</target>
<target name="create-canonical-sysdef-file">
<copy file="${build.drive}/${sf.spec.systemdefinition.location}" tofile="${canonical.sysdef.file}" failonerror="true" verbose="true"/>
</target>
<target name="sf-truclean">
<stopwatch name="sf-truclean"/>
<if>
<istrue value="${sf.spec.build.clean.enable}"/>
<then>
<echo message="Executing truclean step."/>
<fmpp sourceFile="${sf.common.config.dir}/templates/truclean.ant.xml.ftl"
outputFile="${temp.build.dir}/truclean.ant.xml">
<data expandProperties="yes">
ant: antProperties()
data: csv(${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}, {separator:','})
</data>
</fmpp>
<ant antfile="${temp.build.dir}/truclean.ant.xml"/>
</then>
<else>
<echo message="Skipping truclean step."/>
</else>
</if>
<stopwatch name="sf-truclean" action="elapsed"/>
</target>
<target name="sf-model-from-project">
<stopwatch name="sf-model-from-project"/>
<if>
<isset property="sf.spec.systemdefinition.source"/>
<then>
<echo message="Using ${sf.spec.systemdefinition.source} as model file"/>
<echo message="Copying to ${build.drive}/definition.xml"/>
<copy file="${sf.project.location}/${sf.spec.systemdefinition.source}" tofile="${build.drive}${sf.spec.systemdefinition.location}"/>
</then>
<else>
<!-- Collate the system definition from the packages listed in sources.csv -->
<echo message="Building system model from ${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}"/>
<exec executable="perl" dir="${build.drive}/" output="${build.drive}${sf.spec.systemdefinition.location}" logError="true" failonerror="true">
<arg value="${sf.common.config.dir}/tools/csvToSysDef.pl"/>
<arg value="${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}"/>
<arg value="${sf.config.dir}/../build/config/${sf.spec.job.codeline}"/>
</exec>
</else>
</if>
<stopwatch name="sf-model-from-project" action="elapsed"/>
</target>
<target name="sf-diamondize-bom" depends="sf-record-proj-conf-bom">
<fmpp sourceFile="${sf.common.config.dir}/templates/build-info.xml.ftl"
outputFile="${build.drive}/output/logs/sf-diamondize-bom.xml">
<data expandProperties="yes">
ant: antProperties()
config: csv(${build.drive}/output/logs/BOM/config.csv,{separator:',',headers:[loc,dst,rev]})
project: csv(${build.drive}/output/logs/BOM/project.csv,{separator:',',headers:[loc,dst,rev]})
baseline: slicedText(${build.drive}/output/logs/BOM/baseline.txt,{trim})
toolsbaseline: slicedText(${build.drive}/output/logs/BOM/toolsbaseline.txt,{trim})
sources: csv(${build.drive}/output/logs/BOM/sources.csv,{separator:',',headers:[loc,dst,type,rev,sysdef]})
</data>
</fmpp>
<!-- Make a copy in BOM dir -->
<copy file="${build.drive}/output/logs/sf-diamondize-bom.xml" tofile="${build.log.dir}/BOM/build-info.xml"/>
</target>
<target name="sf-preprocess-package-config">
<echo message="${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}"/>
<exec executable="perl" dir="${build.drive}/" failonerror="false" output="${build.log.dir}/zipconfig.log">
<arg value="${sf.common.config.dir}/tools/populateziptemplate.pl"/>
<arg value="${sf.spec.sourcesync.sourcespecdir}/${sf.spec.sourcesync.sourcespecfile}"/>
<arg value="${sf.common.config.dir}/templates/zip.cfg.xml.ftl.template"/>
<arg value="${temp.build.dir}/zip.cfg.xml.ftl"/>
<arg value="${build.log.dir}/rnd_excludefile.txt"/>
</exec>
<antcall target="sf-log-to-brag">
<param name="sf.brag.phase" value="Prebuild"/>
<param name="sf.brag.step" value="Preprocess package config"/>
<param name="sf.brag.log" value="${build.log.dir}/zipconfig.log"/>
<param name="sf.brag.id" value="PreprocessPackageConfig"/>
</antcall>
</target>
<target name="sf-zip-content">
<!-- If it's the file we use for everything, and it's not up to date... -->
<if>
<and>
<equals arg1="${zip.config.file}" arg2="${temp.build.dir}/zip.cfg.xml.ftl"/>
<not>
<and>
<uptodate targetfile="${temp.build.dir}/zip.cfg.xml.ftl">
<srcfiles dir="${sf.common.config.dir}">
<include name="tools/populateziptemplate.pl"/>
<include name="templates/zip.cfg.xml.ftl.template"/>
</srcfiles>
</uptodate>
<uptodate targetfile="${temp.build.dir}/zip.cfg.xml.ftl">
<srcfiles dir="${sf.spec.sourcesync.sourcespecdir}">
<include name="${sf.spec.sourcesync.sourcespecfile}"/>
</srcfiles>
</uptodate>
</and>
</not>
</and>
<then>
<!-- (Re)build the packaging config file -->
<runtarget target="sf-preprocess-package-config"/>
</then>
</if>
<stopwatch name="sf-zip-content (${zip.target.name})"/>
<if><available file="${zip.config.file}"/>
<then>
<runtarget target="preprocess-zip-config" />
<property name="zip.${zip.target.name}.log.file" location="${build.log.dir}/${build.id}_${zip.target.name}_zip.log" />
<property name="zips.${zip.target.name}.spec.name" value="${zip.target.name}" />
<if><isset property="sf.using.newer.than.helium5"/>
<then>
<hlm:zipContentMacro type="${zip.target.name}" file="${zip.config.file}" failonemptyconfig="false"/>
</then>
<else>
<hlm:zipContentMacro type="${zip.target.name}" file="${zip.config.file}" />
</else>
</if>
</then>
<else>
<echo message="ERROR ${zip.config.file} not available for zipping!"/>
</else>
</if>
<stopwatch name="sf-zip-content (${zip.target.name})" action="elapsed"/>
</target>
<target name="sf-package-source" >
<!-- Firstly zip up rnd by package -->
<antcall target="sf-zip-content">
<param name="zip.config.file" value="${temp.build.dir}/zip.cfg.xml.ftl"/>
<param name="zip.target.name" value="rnd" />
</antcall>
<antcall target="sf-zip-content">
<param name="zip.config.file" value="${temp.build.dir}/zip.cfg.xml.ftl"/>
<param name="zip.target.name" value="rnd-internal" />
</antcall>
<if>
<istrue value="${sf.spec.package.src.enable}"/>
<then>
<echo message="INFO Packaging Source Repos"/>
<antcall target="sf-zip-content">
<param name="zip.config.file" value="${temp.build.dir}/zip.cfg.xml.ftl"/>
<param name="zip.target.name" value="src" />
</antcall>
<!-- Then zip up src zips by layer -->
<antcall target="sf-zip-content">
<param name="zip.config.file" value="${temp.build.dir}/zip.cfg.xml.ftl"/>
<param name="zip.target.name" value="src-by-layer" />
</antcall>
</then>
</if>
</target>
<!-- Unpack the rnd zips ready to be used in the build -->
<target name="sf-unpack-rnd">
<echo message="Unpacking any available RnD binaries"/>
<for param="rndZip">
<fileset dir="${build.drive}/output/zips/">
<include name="binaries_*.zip"/> <!-- Internal rnd bins -->
<include name="release/bin_*.zip"/> <!-- Releaseable rnd bins -->
</fileset>
<sequential>
<exec executable="7za" dir="${build.drive}" output="${build.log.dir}/unzip_${build.id}_binaries_rnd.log" append="true">
<arg value="x"/>
<arg value="-y"/> <!-- Need to unpack in overwrite mode, due to TEF -->
<arg value="-i!epoc32\*"/> <!-- Only unzip items into the epoc32 tree -->
<arg value="@{rndZip}"/>
</exec>
</sequential>
</for>
</target>
<target name="sf-compile-options">
<hlm:sbsMakeOptions engine="gmake" id="sbs.fullbuild.options"/>
<hlm:sbsMakeOptions engine="gmake" id="sbs.toolsbuild.options"/>
</target>
<!-- Turn a log file into a BRAG XML file - parameters to be passed are sf.brag.* -->
<target name="sf-log-to-brag">
<mkdir dir="${build.log.dir}/summary"/>
<echo message="Generating ${sf.brag.id}_BRAG.xml from ${sf.brag.log}"/>
<exec executable="perl" failonerror="false" output="${build.log.dir}/summary/${sf.brag.id}_BRAG.xml" logError="true">
<arg value="${sf.common.config.dir}/tools/brag/logToBRAG.pl"/>
<arg value="--phase=${sf.brag.phase}"/>
<arg value="--step=${sf.brag.step}"/>
<arg value="--rules=${sf.common.config.dir}/tools/brag/rules.${sf.brag.id}.tsv"/>
<arg value="${sf.brag.log}"/>
</exec>
<!-- And copy the XSL so the output can be transformed by a browser -->
<copy file="${sf.common.config.dir}/tools/brag/brag.xsl" toDir="${build.log.dir}/summary/"/>
</target>
</project>