Fix merging bug where downstream items somtimes got ignored if they appear after an upstream-only item. Also, reduce the severity of the validation error if a tech-domain is not recognised on a non-Foundation package.
<?xml version="1.0" encoding="UTF-8"?>
<!--
============================================================================
Name : publish.ant.xml
Part of : Helium
Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
All rights reserved.
This component and the accompanying materials are made available
under the terms of the License "Eclipse Public License v1.0"
which accompanies this distribution, and is available
at the URL "http://www.eclipse.org/legal/epl-v10.html".
Initial Contributors:
Nokia Corporation - initial contribution.
Contributors:
Description:
============================================================================
-->
<!--* @package publishing -->
<project name="publish" xmlns:hlm="http://www.nokia.com/helium">
<description>
Zipping, delta zipping and copying targets
</description>
<!-- Tools configuration for basic Helium tools. -->
<hlm:envdata id="helium.tools.envdata">
<hlm:executable name="7za" versionArgs="" versionRegex="7-Zip (\S+)"/>
<hlm:executable name="ant" versionArgs="-version" versionRegex="Apache Ant version (\S+)"/>
<hlm:executable name="armcc" versionArgs="" versionRegex="RVCT(.+)" output="stderr"/>
<hlm:executable name="ccm" versionArgs="version -c" versionRegex="(\S+)"/>
<hlm:executable name="codescanner" versionArgs="" versionRegex="version (\S+)"/>
<hlm:executable name="ctc" versionArgs="" versionRegex="This is CTC\+\+ \(v(\S+)\)"/>
<hlm:executable name="java" versionArgs="-version" versionRegex="java version "(\S+)\""/>
<hlm:executable name="perl" versionArgs="-v" versionRegex="This is perl, v(\S+)"/>
<hlm:executable name="python" versionArgs="--version" versionRegex="Python (\S+)"/>
<hlm:executable name="sbs" versionArgs="-version" versionRegex="sbs version (.+)$"/>
<hlm:executable name="unzip" versionArgs="-v" versionRegex="UnZip (\S+)"/>
<hlm:executable name="emake" versionArgs="--version" versionRegex="Electric Make version (\S+)"/>
<hlm:executable name="ectool" versionArgs="--version" versionRegex="ElectricCommander Tool version (\S+)"/>
</hlm:envdata>
<!--* @property zip.config.file
The ZIP configuration XML file.
@type string
@editable required
@scope public
-->
<!--* @property zips.ee.spec.name
The name of the configuration section for EE zipping.
@type string
@editable optional
@scope public
-->
<!--* @property zips.localised.spec.name
The name of the configuration section for localisation zipping.
@type string
@editable optional
@scope public
-->
<!--* @property zips.localised.spec.name
The name of the configuration section for subcon zipping.
@type string
@editable optional
@scope public
-->
<!--* @property delta.zip.file.size
Define the file size of the zipped up released file, this is then used by delta zipping code to determine if a delta zip is required or a full zip should be created. The delta zip will only be published if it is less than 1/2 the size of this property
@type string
@editable required
@scope public
-->
<!--* @property file-comparison.left_side
The fileset id used on the left side of comparison.
@type string
@editable required
@scope public
-->
<!--* @property file-comparison.right_side
The fileset id used on the right side of comparison.
@type string
@editable required
@scope public
-->
<!--* Set to true to generate zip file containing binaries generated by build.
@type boolean
@editable required
@scope public
@since 11.0
-->
<property name="archiving.enabled" value="true"/>
<!--* @property internal.archiving.enabled
Set to run archiving targets if archiving.enabled set to true.
@type boolean
@scope private
-->
<!--* @property skip-zips
Set to true to generate zip file containing binaries generated by build. - deprecated: Start using archiving.enabled property.
@type boolean
@editable required
@scope public
@deprecated since 11.0
-->
<!--* @property publish.enabled
Set to true to publish build artificats.
@type boolean
@editable required
@scope public
@since 11.0
-->
<!--* @property internal.publish.enabled
Set to run publishing targets if publish.enabled set to true.
@type boolean
@scope private
-->
<!--* @property publish
Set to true to publish build artificats.. - deprecated: Start using publish.enabled property.
@type boolean
@editable required
@scope public
@deprecated since 11.0
-->
<!--* @property archive.wa.enabled
Set to true to archive or zip the synergy workarea.
@type boolean
@editable required
@scope public
@since 11.0
-->
<!--* @property internal.archive.wa.enabled
Set to run synergy wa archiving targets if archive.wa.enabled set to true.
@type boolean
@scope private
-->
<!--* @property zip.wa
Set to true to arhive or zip the synergy workarea. - deprecated: Start using archive.wa.enabled property.
@type boolean
@editable required
@scope public
@deprecated since 11.0
-->
<!-- Check, is zipping binaries enabled -->
<condition property="internal.archiving.enabled">
<and>
<not>
<isfalse value="${archiving.enabled}" />
</not>
<not>
<isset property="skip-zips"/>
</not>
</and>
</condition>
<!-- Check, is publish enabled -->
<condition property="internal.publish.enabled">
<or>
<istrue value="${publish.enabled}"/>
<isset property="publish"/>
</or>
</condition>
<!-- Check, is publish enabled -->
<condition property="internal.archive.wa.enabled">
<or>
<istrue value="${archive.wa.enabled}"/>
<isset property="zip.wa"/>
</or>
</condition>
<import file="internal-api/internal-api.ant.xml" />
<import file="nwiki.ant.xml" />
<import file="synergy.ant.xml" />
<import file="test/test2_publishing_to_diamonds.ant.xml" />
<!-- Set the maximum number of threads running for archiving (for a parallel job).
@type integer
-->
<property name="archive.max.number.of.threads" value="8" />
<fileset id="publish.files" dir="${build.output.dir}">
<include name="logs/**/*" />
<exclude name="logs/**/*_ant_build.log" />
<include name="development_flash_images/**/*" />
<include name="release_flash_images/**/*" />
<include name="sisfiles/**/*" />
<include name="build_area/**/*" />
<include name="relnotes/**/*" />
<include name="test_results/**/*" />
<include name="utilities/**/*" />
<include name="images/**/*" />
<include name="variant_images/**/*" />
<include name="BaseForNightlyBuild.txt" />
</fileset>
<!-- above fileset divided into following 4 filesets-->
<fileset id="publish.files1" dir="${build.output.dir}">
<include name="logs/**/*" />
<exclude name="logs/**/*_ant_build.log" />
</fileset>
<fileset id="publish.files2" dir="${build.output.dir}">
<include name="development_flash_images/**/*" />
</fileset>
<fileset id="publish.files3" dir="${build.output.dir}">
<include name="sisfiles/**/*" />
<include name="build_area/**/*" />
<include name="relnotes/**/*" />
<include name="test_results/**/*" />
<include name="utilities/**/*" />
<include name="variant_images/**/*" />
<include name="images/**/*" />
<include name="BaseForNightlyBuild.txt" />
</fileset>
<fileset id="publish.files4" dir="${build.output.dir}">
<include name="release_flash_images/**/*" />
</fileset>
<!-- list of filesets -->
<!-- The fileset id which will be upload
@type string
-->
<property name="publish.fileset.ids" value="publish.files1,publish.files2,publish.files3,publish.files4" />
<fileset id="helium.files" dir="${helium.dir}">
<include name="*.bat" />
<include name="**/*.ant.xml" />
<include name="tools/common/**/*" />
<include name="tools/release/**/*" />
</fileset>
<!--
If publish property is set, copy the publish.files fileset to the publish.dir directory using
number.of.threads parallel threads
-->
<macrodef name="publishMacro" uri="http://www.nokia.com/helium">
<sequential>
<if>
<isset property="internal.publish.enabled" />
<then>
<for list="${publish.fileset.ids}" param="fileset.id" parallel="true" threadCount='${number.of.threads}'>
<sequential>
<copy todir="${publish.dir}" preservelastmodified="true" failonerror="false">
<fileset refid="@{fileset.id}" />
</copy>
<pathconvert pathsep=" " property="@{fileset.id}.diamonds.files">
<map from="${build.output.dir}" to="${publish.dir}"/>
<fileset refid="@{fileset.id}"/>
</pathconvert>
<property name="diamonds.files" value="" />
<var name="diamonds.files" value="${@{fileset.id}.diamonds.files} ${diamonds.files}"/>
</sequential>
</for>
</then>
</if>
</sequential>
</macrodef>
<!-- Creates the publish directory. Directory location is defined by property: ${publish.dir} -->
<target name="prep-publish" if="internal.publish.enabled">
<mkdir dir="${publish.dir}" />
</target>
<!-- Parses the zip configuration file. Expands all the ant properties. -->
<target name="preprocess-zip-config">
<!-- Path to the parsed file of zip specification file
@type string
@scope private
-->
<property name="zip.config.file.parsed" location="${temp.build.dir}/zip.cfg.xml.parsed" />
<copy file="${zip.config.file}" tofile="${zip.config.file.parsed}" overwrite="true">
<filterchain>
<expandproperties />
</filterchain>
</copy>
</target>
<!-- This macro merges all the generated metadata files together.
It uses the archiving configuration to find the output archive directory.
The it merges all the <config.name>.metadata.xml files together.
e.g:
<pre>
<hlm:mergeMetadataMacro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}"/>
</pre>
-->
<macrodef name="mergeMetadataMacro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="config" />
<sequential>
<hlm:python>
<![CDATA[
import configuration
import symrec
import os
import sys
import logging
import ant
LOGGER = logging.getLogger("metadatamerger")
LOGGER.setLevel(level=logging.INFO)
logging.basicConfig(level=logging.INFO)
def merge_filelist(merger, filelist):
for filename in filelist:
try:
LOGGER.info("Merging %s" % filename)
merger.merge(filename)
os.unlink(filename)
except Exception, exc:
LOGGER.warning("Warning: %s" % exc)
try:
builder = configuration.NestedConfigurationBuilder(open(ant.get_property(r'@{file}')), 'r')
configSet = builder.getConfiguration()
configs = configSet.getConfigurations(ant.get_property(r'@{config}'))
if len(configs) > 0:
filelist = []
for config in configs:
if config.get_boolean("grace.metadata", False):
metadata = os.path.join(config['archives.dir'], config['name']+ ".metadata.xml")
if os.path.exists(metadata):
LOGGER.info("Found %s" % metadata)
filelist.append(metadata)
merger = None
metadata_main = os.path.join(configs[0]['archives.dir'], "release_metadata.xml")
if os.path.exists(metadata_main):
merger = symrec.MetadataMerger(metadata_main)
merge_filelist(merger, filelist)
LOGGER.info(str("Writing %s" % metadata_main))
merger.save()
elif len(filelist) > 0:
input = filelist.pop(0)
merger = symrec.MetadataMerger(input)
merge_filelist(merger, filelist)
LOGGER.info(str("Writing %s" % metadata_main))
merger.save(metadata_main)
os.unlink(input)
except Exception, e:
LOGGER.error('ERROR: %s' % e)
sys.exit(-1)
]]>
</hlm:python>
</sequential>
</macrodef>
<!-- This macro allows you to add or update one archive definition inside the release metadata files.
e.g.:
<pre>
<hlm:updateMetadataMacro file="<path>/release_metadata.xml" archive="<path>/archive.zip" />
</pre>
-->
<macrodef name="updateMetadataMacro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="archive" />
<attribute name="filters" default=""/>
<sequential>
<hlm:python>
<![CDATA[
import symrec
import os
import traceback
import fileutils
import ant
import sys
import logging
LOGGER = logging.getLogger("metadataupdater")
LOGGER.setLevel(level=logging.INFO)
logging.basicConfig(level=logging.INFO)
if ant.get_property(r'@{file}') is None:
raise Exception('file attribute is not defined.')
if ant.get_property(r'@{archive}') is None:
raise Exception('archive attribute is not defined.')
filters = None
if ant.get_property(r'@{filters}') is not None:
filters = ant.get_property(r'@{filters}').split(r',')
try:
filename = ant.get_property(r'@{file}')
archive = ant.get_property(r'@{archive}')
if not os.path.exists(filename):
raise Exception("Could not find file: %s" % filename)
if not os.path.exists(archive):
raise Exception("Could not find file: %s" % archive)
LOGGER.info(str("Opening %s" % filename))
md = symrec.ReleaseMetadata(filename)
if os.path.basename(archive) not in md.keys():
LOGGER.info(str("Adding %s to metadata" % os.path.basename(archive)))
md.add_package(os.path.basename(archive), md5checksum=fileutils.getmd5(archive), size=os.path.getsize(archive), filters=filters)
else:
LOGGER.info(str("Updating %s to metadata" % os.path.basename(archive)))
result = md[os.path.basename(archive)]
result['md5checksum'] = unicode(fileutils.getmd5(archive))
result['size'] = unicode(os.path.getsize(archive))
if filters is not None:
result['filters'] = filters
md[os.path.basename(archive)] = result
md.save()
except Exception, e:
LOGGER.error('ERROR: %s' % e)
sys.exit(-1)
]]>
</hlm:python>
</sequential>
</macrodef>
<!-- This macro update the metadata file generated by the config provided by file.
e.g:
<pre>
<hlm:updateMD5Macro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}"/>
</pre>
-->
<macrodef name="updateMD5Macro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="config" />
<sequential>
<hlm:python>
<![CDATA[
import configuration
import archive
import os
import symrec
import ant
import logging
import sys
LOGGER = logging.getLogger("metadataMD5updater")
LOGGER.setLevel(level=logging.INFO)
logging.basicConfig(level=logging.INFO)
# Reading the config from Ant
try:
config_filename = ant.get_property(r'@{file}')
spec_name = ant.get_property(r'@{config}')
# Loading the config file.
builder = configuration.NestedConfigurationBuilder(open(config_filename, 'r'))
configSet = builder.getConfiguration()
configs = configSet.getConfigurations(spec_name)
if len(configs) > 0:
if os.path.exists(os.path.join(configs[0]['archives.dir'], "release_metadata.xml")):
md5update = symrec.MD5Updater(os.path.join(configs[0]['archives.dir'], "release_metadata.xml"))
md5update.update()
md5update.save()
else:
LOGGER.warning(str('WARNING: Could not find %s.' % os.path.join(configs[0]['archives.dir'], "release_metadata.xml")))
else:
LOGGER.warning('WARNING: No config.')
except Exception, e:
LOGGER.error('ERROR: %s' % e)
sys.exit(-1)
]]>
</hlm:python>
</sequential>
</macrodef>
<condition property="archive.using.ec">
<or>
<equals arg1="${build.system}" arg2="ec-helium" />
<equals arg1="${build.system}" arg2="sbs-ec" />
</or>
</condition>
<!-- Zips files using a type and zip config file as args -->
<macrodef name="zipContentMacro" uri="http://www.nokia.com/helium">
<attribute name="type" />
<attribute name="file" />
<attribute name="ec" default="${archive.using.ec}"/>
<attribute name="failonemptyconfig" default="true"/>
<attribute name="phase" default="archive"/>
<sequential>
<if>
<not>
<isset property="zip.@{type}.log.file" />
</not>
<then>
<property name="zip.@{type}.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_zip.log" />
</then>
</if>
<mkdir dir="${@{phase}.log.dir}"/>
<mkdir dir="${post.log.dir}"/>
<property name="zip.@{type}.nopolicy.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_archive.nopolicy.log" />
<property name="zip.@{type}.policy.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_archive.policy.log" />
<hlm:taskRecorder output="${zip.@{type}.log.file}">
<!-- Path to the parsed file of zip specification file
@type string
@scope private
-->
<property name="zip.config.file.parsed" location="${temp.build.dir}/zip.cfg.xml.parsed" />
<copy file="@{file}" tofile="${zip.config.file.parsed}" overwrite="true">
<filterchain>
<expandproperties />
</filterchain>
</copy>
<hlm:updateArchiveConfig configtype="${zips.@{type}.spec.name}" configfileparsed="${zip.config.file.parsed}" usingec="@{ec}" failonemptyconfig="@{failonemptyconfig}"/>
<if>
<istrue value="@{ec}" />
<then>
<echo>emake.root.to.append=${emake.root.to.append}</echo>
<hlm:emakeMacro name="archive-full-@{type}" makefile="${build.drive}/ZIP_${zips.@{type}.spec.name}.make" target="all" dir="${build.drive}/" annodetail="basic,history,file,waiting" root="${emake.root.to.append}" failonerror="false" phase="archive"/>
</then>
<else>
<if>
<isgreaterthan arg1="${number.of.threads}" arg2="${archive.max.number.of.threads}"/>
<then>
<ant antfile="${build.drive}/ZIP_${zips.@{type}.spec.name}.xml">
<!-- how many parallel threads can run
@type string
-->
<property name="number.of.threads" value="${archive.max.number.of.threads}"/>
</ant>
</then>
<else>
<ant antfile="${build.drive}/ZIP_${zips.@{type}.spec.name}.xml" />
</else>
</if>
</else>
</if>
<hlm:python>
import archive
import configuration
builder = configuration.NestedConfigurationBuilder(open(r'${zip.config.file.parsed}', 'r'))
configSet = builder.getConfiguration()
configs = configSet.getConfigurations(r'${zips.@{type}.spec.name}')
prebuilder = archive.ArchivePreBuilder(configuration.ConfigurationSet(configs), r'${zips.@{type}.spec.name}')
prebuilder.cleanupSubstDrives()
</hlm:python>
<hlm:mergeMetadataMacro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}" />
<hlm:updateMD5Macro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}" />
</hlm:taskRecorder>
<hlm:assertFileExists file="${zip.@{type}.log.file}" />
<copy file="${zip.@{type}.log.file}" tofile="${zip.@{type}.nopolicy.log.file}" verbose="true">
<filterchain>
<linecontainsregexp negate="true">
<regexp pattern="POLICY_(ERROR|WARNING|INFO)" />
</linecontainsregexp>
</filterchain>
</copy>
<hlm:metadatarecord database="${metadata.dbfile}">
<hlm:textmetadatainput>
<fileset casesensitive="false" file="${zip.@{type}.nopolicy.log.file}" />
<metadatafilterset refid="filterset.archive.nopolicy" />
</hlm:textmetadatainput>
</hlm:metadatarecord>
<hlm:signalMacro logfile="${zip.@{type}.nopolicy.log.file}"
signal.input="archiveErrorSignalInput" />
<!-- Extracting policy errors from archiving. -->
<copy file="${zip.@{type}.log.file}" tofile="${zip.@{type}.policy.log.file}" verbose="true">
<filterchain>
<linecontainsregexp>
<regexp pattern="POLICY_(ERROR|WARNING|INFO)" />
</linecontainsregexp>
<tokenfilter>
<replaceregex pattern=".*(POLICY_(?:ERROR|WARNING|INFO))" replace="\1" flags="gi" />
</tokenfilter>
</filterchain>
</copy>
<hlm:assertFileExists file="${zip.@{type}.policy.log.file}" />
<hlm:metadatarecord database="${metadata.dbfile}">
<hlm:textmetadatainput>
<fileset casesensitive="false" file="${zip.@{type}.policy.log.file}" />
<metadatafilterset refid="filterset.archive.policy" />
</hlm:textmetadatainput>
</hlm:metadatarecord>
<hlm:signalMacro logfile="${zip.@{type}.policy.log.file}"
signal.input="archivePolicyErrorSignalInput" />
</sequential>
</macrodef>
<!--
1. To update the archive configuration file with substituted drives information for UNC paths (if any).
2. To create the .make file to zip the sources using EC.
-->
<scriptdef name="updateArchiveConfig" language="jython" uri="http://www.nokia.com/helium">
<attribute name="configtype" />
<attribute name="configfileparsed" />
<attribute name="usingec" />
<attribute name="failonemptyconfig" />
<![CDATA[
import archive
import configuration
import logging
import os
failonemptyconfig = True
if attributes.get('failonemptyconfig'):
failonemptyconfig = str(attributes.get('failonemptyconfig')).lower() == "true"
config_parsed_filename = str(attributes.get('configfileparsed'))
config_type = str(attributes.get('configtype'))
is_it_ec = str(attributes.get('usingec'))
builder = configuration.NestedConfigurationBuilder(open(config_parsed_filename, 'r'))
configSet = builder.getConfiguration()
configs = configSet.getConfigurations(config_type)
outputtype = 'ant'
outputext = '.xml'
if is_it_ec == "true":
outputtype = 'make'
outputext = '.make'
if len(configs) > 0 or not failonemptyconfig:
prebuilder = archive.ArchivePreBuilder(configuration.ConfigurationSet(configs), config_type, outputtype)
if os.sep == '\\':
toAppendEmakeRoot = prebuilder.checkRootDirValue(builder, config_parsed_filename, project.getProperty('build.drive'), config_type)
if toAppendEmakeRoot is not None:
project.setProperty("emake.root.to.append", str(toAppendEmakeRoot))
prebuilder.writeTopLevel(os.path.join(project.getProperty('build.drive') + os.sep, 'ZIP_' + config_type + outputext), project.getProperty('temp.build.dir'), config_parsed_filename)
else:
raise Exception('There are no archive configs to build. Looked for %s' % config_type)
]]>
</scriptdef>
<!-- Zips the Engineering English content. -->
<target name="zip-ee" if="internal.archiving.enabled">
<hlm:zipContentMacro type="ee" file="${zip.config.file}" />
</target>
<!-- Zips the subcon content. -->
<target name="zip-subcon" if="internal.archiving.enabled">
<hlm:zipContentMacro type="subcon" file="${zip.config.file}" />
</target>
<!-- Zips the subcon rom content. -->
<target name="zip-subcon-roms" if="internal.archiving.enabled">
<hlm:zipContentMacro type="subcon_roms" file="${zip.config.file}" />
</target>
<!-- Zips the localised content. -->
<target name="zip-localised" if="internal.archiving.enabled">
<hlm:zipContentMacro type="localised" file="${zip.config.file}" />
</target>
<!-- Zips the trace rom content -->
<target name="zip-trace-roms" if="internal.archiving.enabled">
<hlm:zipContentMacro type="trace_roms" file="${zip.config.file}" />
</target>
<!-- Publishes the content defined by the filesets at the top of this project.
The files are split into 4 filesets to parallelize the publish operation:
* publish.files1
* publish.files2
* publish.files3
* publish.files4
All content is copied to the location defined by publish.dir. Also verify the source and target content after publish.
-->
<target name="publish" depends="prep-publish" if="internal.publish.enabled">
<hlm:publishMacro />
<!--<resourcecount property="total.changes.in.fileset">
<fileset id="publish.files.location" dir="${build.output.dir}">
<exclude name="logs/**/*_ant_build.log" />
<exclude name="*" />
<exclude name="build/**/*" />
<exclude name="ec/**/*" />
<exclude name="temp_build_files/**/*" />
<exclude name="uda/**/*" />
<different targetdir="${publish.dir}" ignoreFileTimes="true" ignoreContents="yes" />
</fileset>
</resourcecount>
<if>
<not>
<equals arg1="${total.changes.in.fileset}" arg2="0" />
</not>
<then>
<echo message="ERROR: There are ${total.changes.in.fileset} changes in the publish directory. Uploading the changes again..." />
<hlm:publishMacro />
</then>
</if>-->
</target>
<!-- Basic publish target that publishes all important content. -->
<target name="publish-generic" depends="prep-publish,publish-tools" if="internal.publish.enabled">
<hlm:publishMacro />
</target>
<!-- Publishes tools needed to run commands from a published build area. -->
<target name="publish-tools" depends="prep-publish" if="internal.publish.enabled">
<echoproperties destfile="${publish.dir}/build_properties.txt">
<propertyset>
<propertyref name="publish.dir" />
<propertyref name="publish.release.dir" />
</propertyset>
</echoproperties>
<copy todir="${publish.dir}" preservelastmodified="true" failonerror="false">
<fileset refid="helium.files" />
</copy>
<copy todir="${publish.dir}" preservelastmodified="true" failonerror="false">
<fileset dir="${helium.dir}/tools/publish/publish_dir_files" includes="*" />
</copy>
</target>
<!-- The same as a basic publish. -->
<target name="publish-variants" depends="prep-publish" if="internal.publish.enabled">
<hlm:publishMacro />
</target>
<!-- Notifies the lxr indexing engine that the build area is ready to be indexed. -->
<target name="lxr">
<copy todir="${lxr.publish.dir}" preservelastmodified="true" failonerror="false" flatten="true">
<fileset refid="publish.files" />
</copy>
<echo file="${lxr.publish.dir}\${build.id}_ready.txt" />
</target>
<!-- Reports on the status of the build, through other targets. -->
<target name="report" depends="binary-sizes-log"/>
<!-- Delta zips -->
<!-- Output location
@type string
@scope private
-->
<property name="delta.zip.location" value="${build.output.dir}/build_area/delta_zip" />
<!-- Temp location
@type string
@scope private
-->
<property name="delta.zip.temp.location" value="${temp.build.dir}/delta_zip" />
<!-- The zip file
@type string
@scope private
-->
<property name="delta.zip.file" value="${delta.zip.location}/${build.id}_delta_zip.zip" />
<!-- Define the file size of the zipped up released file, this is then used by delta zipping code to determine if a delta zip is required or a full zip should be created. The delta zip will only be published if it is less than 1/2 the size of this property
@type string
-->
<property name="delta.zip.file.size" value="11000000" />
<!--default value of approx 11GB-->
<!-- Special Instructions file for SymDEC
@type string
@scope private
-->
<property name="delta.zip.delete.file" value="${delta.zip.location}/specialInstructions.xml" />
<!-- Temp file
@type string
@scope private
-->
<property name="delta.ant.file" value="${delta.zip.temp.location}/delta_zip.ant.xml" />
<!-- Output location of md5 file
@type string
@scope private
-->
<property name="md5.signature.file" value="${delta.zip.location}/${build.id}.md5" />
<!-- Number of partial md5 files to use during creation
@type string
@scope private
-->
<property name="md5.split" value="100" />
<!-- old.md5.file needs to be specified by the user -->
<!-- Patch evalid tool. @scope private-->
<target name="patch-evalid">
<patch patchfile="${helium.dir}/tools/publish/evalidmd5.patch" originalfile="${build.drive}/epoc32/tools/evalidmd5.pm" />
</target>
<!-- Revert evalid patch. @scope private-->
<target name="unpatch-evalid">
<patch reverse="true" patchfile="${helium.dir}/tools/publish/evalidmd5.patch" originalfile="${build.drive}/epoc32/tools/evalidmd5.pm" />
</target>
<!-- Copy extra tool for evalid. @scope private-->
<target name="pre-build-md5" depends="patch-evalid">
<copy todir="${build.drive}\epoc32\tools" file="${helium.dir}/tools/publish/evalid_multiple.pl" />
</target>
<!-- Revert patches. @scope private-->
<target name="post-build-md5" depends="unpatch-evalid" />
<!-- Generate a list of checksums for all files in the build area using EBS system. @scope private-->
<target name="build-md5-ebs" depends="pre-build-md5">
<mkdir dir="${delta.zip.location}" />
<hlm:python failonerror="true">
import os
import delta_zip
builder = delta_zip.MD5SignatureBuilderEBS("${build.drive}", r"${md5.split}", r"${delta.zip.temp.location}", r"${delta.exclude.commasep}", r"${delta.zip.temp.location}\list_files.txt")
builder.build(r"${md5.signature.file}")
</hlm:python>
<antcall target="post-build-md5" />
</target>
<!-- Generate a list of checksums for all files in the build area using EC system. @scope private-->
<target name="build-md5-ec" depends="pre-build-md5">
<mkdir dir="${delta.zip.location}" />
<echo>delta.zip.temp.location: ${delta.zip.temp.location}</echo>
<hlm:python failonerror="true">
import os
import delta_zip
builder = delta_zip.MD5SignatureBuilderEC("${build.drive}", r"${md5.split}", r"${delta.zip.temp.location}", r"${delta.exclude.commasep}", "${ec.cluster.manager}", "${ec.build.class}", r"${delta.zip.temp.location}\list_files.txt")
builder.build("${md5.signature.file}")
</hlm:python>
<antcall target="post-build-md5" />
</target>
<!-- Generate a list of checksums for all files in the build area -->
<target name="build-md5">
<!-- Comma seperated excluded directories from the zip
@type string
@scope public
-->
<property name="delta.exclude.commasep" value="epoc32_save.zip,output/**/*,delta_zips/**/*,temp/**/*,epoc32/**/*.sym,epoc32/BUILD/**/*" />
<echo>build MD5 started</echo>
<if>
<isset property="build.system.ec-helium" />
<then>
<echo>build EC MD5</echo>
<antcall target="build-md5-ec" />
</then>
<else>
<echo>build EBS MD5</echo>
<antcall target="build-md5-ebs" />
</else>
</if>
</target>
<!-- Compare builds using md5 files and generate zips -->
<target name="generate-delta" if="old.md5.file.present">
<hlm:python>
import delta_zip
delta = delta_zip.DeltaZipBuilder(r'${build.drive}', r'${delta.zip.temp.location}', r'${old.md5.file}', r'${md5.signature.file}')
delta.create_delta_zip(r'${delta.zip.file}', r'${delta.zip.delete.file}', r'${number.of.threads}', r'${delta.ant.file}')
</hlm:python>
<ant antfile="${delta.ant.file}" />
<hlm:python outputproperty="old.release.label">
import ant
import os
md5file = os.path.basename(ant.get_property(r'${old.md5.file}'))
print md5file.replace('.md5', '')
</hlm:python>
<move file="${delta.zip.delete.file}" todir="${delta.zip.temp.location}" />
<move file="${md5.signature.file}" todir="${delta.zip.temp.location}" />
<exec executable="C:\APPS\symrec\symrec" failonerror="${failonerror}">
<arg value="create" />
<arg value="-ser" />
<arg value="${release.grace.service}" />
<arg value="-pro" />
<arg value="${release.grace.product}" />
<arg value="-rel" />
<arg value="${release.label}" />
<arg value="-relroot" />
<arg value="${delta.zip.location}" />
<arg value="-dep" />
<arg value="${release.grace.service}/${release.grace.product}/${old.release.label}" />
</exec>
<move file="${delta.zip.temp.location}/specialInstructions.xml" tofile="${delta.zip.delete.file}" />
<move file="${delta.zip.temp.location}/${build.id}.md5" tofile="${md5.signature.file}" />
</target>
<!-- Set prop old.md5.file to md5 file from last build in publish dir -->
<target name="delta-use-last-build">
<!-- Location of current build md5 file
@type string
@scope private
-->
<property name="md5.current.build" value="${publish.dir}/build_area/delta_zip/${build.id}.md5" />
<hlm:python outputproperty="old.md5.file">
import re
import ant
import os
previousbuildnumber = 1
buildnum = r"${build.number}"
while(previousbuildnumber > 0 and previousbuildnumber != ''):
previousbuildnumber = ant.get_previous_build_number(buildnum)
try:
md5lastbuild = re.sub(r'\.(${build.number})([\/_.])', r'.' + previousbuildnumber + r'\2', r'${md5.current.build}')
if os.path.exists(md5lastbuild):
print md5lastbuild
break
except ValueError:
print "Old MD5 cannot be found"
buildnum = previousbuildnumber
if not os.path.exists(md5lastbuild):
print "Old MD5 cannot be found"
</hlm:python>
<echo>old.md5.file: ${old.md5.file}</echo>
<available file="${old.md5.file}" property="old.md5.file.present" />
<echo>${old.md5.file.present}</echo>
</target>
<!-- Creates a delta from the prevous build to the current one -->
<target name="delta-zip" depends="delta-use-last-build,build-md5,generate-delta" description="Generate a delta between two build areas" />
<!-- Upload the delta and config into Grace -->
<target name="delta-zip-grace-upload">
<fileset id="grace.delta.zips.id" dir="${delta.zip.location}">
<include name="**/*.zip" />
<include name="**/*.xml" />
</fileset>
<antcall target="grace-upload">
<reference refid="grace.delta.zips.id" torefid="release.zips" />
</antcall>
</target>
<!-- Publishes the Ant build log.
First the logging should be stopped, before the file is copied. Hence this target should be called
by the "final" target last.
-->
<target name="publish-build-log" depends="prep-publish" if="internal.publish.enabled">
<copy todir="${publish.dir}/logs" preservelastmodified="true" failonerror="false">
<fileset dir="${build.log.dir}" includes="**/*.log" />
</copy>
</target>
<!-- Copy the debug log to the build area if available-->
<target name="copy-debug-logs">
<echo message="log4j.dir:${log4j.cache.dir}" />
<if>
<available file="${log4j.cache.dir}" />
<then>
<copy todir="${build.log.dir}/debug" failonerror="false">
<fileset dir="${log4j.cache.dir}" includes="hlm_*.log" />
</copy>
</then>
</if>
</target>
<!-- Target to simulate that the build is failed because of processing
ant output
-->
<target name="raise-error">
<echo message="ERROR: Build failed due to exceptions" />
</target>
<!-- Update the build status by processing the ant build output log -->
<target name="build-status">
<hlm:metadatarecord database="${metadata.dbfile}">
<hlm:antmetadatainput>
<fileset casesensitive="false" file="${build.log}" />
<metadatafilterset refid="filterset.ant.output" />
</hlm:antmetadatainput>
</hlm:metadatarecord>
<hlm:signalMacro logfile="${build.log}"
signal.input="exceptionSignalInput" />
</target>
<property name="ExecListener.file" value="${build.cache.log.dir}/${build.id}_executables.csv"/>
<!-- Logs details about the tools used in the environment.
This includes name, path, version. -->
<target name="log-environment">
<hlm:environment output="${build.log.dir}/${build.id}_environment.xml">
<hlm:envdata refid="helium.tools.envdata"/>
</hlm:environment>
</target>
<!-- Exception handler to process the ant output log -->
<target name="hlm-exception-handler">
<if>
<available file="${build.log}"/>
<then>
<runtarget target="do-exception-handler" />
</then>
</if>
</target>
<target name="do-exception-handler" depends="raise-error, build-status, build-log-summary, copy-debug-logs, publish-build-log" />
<!-- Does any wrap-up at the end of the build. This should be the last target for every top-level target. -->
<target name="final" depends="build-status, build-log-summary, copy-debug-logs, publish-build-log, log-environment" />
<!-- This target will zip the WA depending on the ado mapping file -->
<target name="zip-wa" depends="ido-create-ado-mapping" if="internal.archive.wa.enabled">
<tempfile property="zipwa.dynamic.config" suffix=".xml" deleteonexit="false" destdir="${temp.build.dir}"/>
<fmpp sourceFile="${helium.dir}/tools/common/templates/ido/zip-ant-wa-copy.xml.ftl" outputFile="${zipwa.dynamic.config}">
<data expandProperties="yes">
inputfile: antProperty(ado.mapping.file)
ant: antProperties()
data: eval('
java.io.FileInputStream pin = new java.io.FileInputStream(filename);
java.util.Properties props = new java.util.Properties();
props.load(pin);
return props;
', {filename:get(inputfile)})
</data>
</fmpp>
<ant antfile="${zipwa.dynamic.config}"/>
</target>
</project>