--- a/buildframework/helium/tools/publish/publish.ant.xml Thu Mar 25 12:47:55 2010 +0000
+++ b/buildframework/helium/tools/publish/publish.ant.xml Thu Mar 25 14:10:11 2010 +0000
@@ -20,17 +20,41 @@
============================================================================
-->
+<!--* @package publishing -->
<project name="publish" xmlns:hlm="http://www.nokia.com/helium">
<description>
Zipping, delta zipping and copying targets
</description>
+
+ <!--* @property delta.zip.file.size
+ Define the file size of the zipped up released file, this is then used by delta zipping code to determine if a delta zip is required or a full zip should be created. The delta zip will only be published if it is less than 1/2 the size of this property
+ @type string
+ @editable required
+ @scope public
+ -->
- <import file="subcon.ant.xml" />
+ <!--* @property file-comparison.left_side
+ The fileset id used on the left side of comparison.
+ @type string
+ @editable required
+ @scope public
+ -->
+
+ <!--* @property file-comparison.right_side
+ The fileset id used on the right side of comparison.
+ @type string
+ @editable required
+ @scope public
+ -->
+
<import file="internal-api/internal-api.ant.xml" />
<import file="nwiki.ant.xml" />
<import file="synergy.ant.xml" />
<import file="test/test2_publishing_to_diamonds.ant.xml" />
+ <!-- Set the maximum number of threads running for archiving (for a parallel job).
+ @type integer
+ -->
<property name="archive.max.number.of.threads" value="8" />
<fileset id="publish.files" dir="${build.output.dir}">
@@ -71,6 +95,9 @@
</fileset>
<!-- list of filesets -->
+ <!-- The fileset id which will be upload
+ @type string
+ -->
<property name="publish.fileset.ids" value="publish.files1,publish.files2,publish.files3,publish.files4" />
<fileset id="helium.files" dir="${helium.dir}">
@@ -110,6 +137,10 @@
<!-- Parses the zip configuration file. Expands all the ant properties. -->
<target name="preprocess-zip-config">
+ <!-- Path to the parsed file of zip specification file
+ @type string
+ @scope private
+ -->
<property name="zip.config.file.parsed" location="${temp.build.dir}/zip.cfg.xml.parsed" />
<copy file="${zip.config.file}" tofile="${zip.config.file.parsed}" overwrite="true">
<filterchain>
@@ -128,30 +159,36 @@
<hlm:mergeMetadataMacro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}"/>
</pre>
-->
- <scriptdef name="mergeMetadataMacro" language="jython" uri="http://www.nokia.com/helium">
+ <macrodef name="mergeMetadataMacro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="config" />
- <![CDATA[
+ <sequential>
+ <hlm:python>
+ <![CDATA[
import configuration
import symrec
import os
-import traceback
-# Nicer name
-self.setTaskName("merge-metadata")
+import sys
+import logging
+import ant
+
+LOGGER = logging.getLogger("metadatamerger")
+LOGGER.setLevel(level=logging.INFO)
+logging.basicConfig(level=logging.INFO)
def merge_filelist(merger, filelist):
for filename in filelist:
try:
- self.log(str("Merging %s" % filename))
+ LOGGER.info("Merging %s" % filename)
merger.merge(filename)
os.unlink(filename)
except Exception, exc:
- self.log("Warning: %s" % exc)
+ LOGGER.warning("Warning: %s" % exc)
try:
- builder = configuration.NestedConfigurationBuilder(open(str(attributes.get('file')), 'r'))
+ builder = configuration.NestedConfigurationBuilder(open(ant.get_property(r'@{file}')), 'r')
configSet = builder.getConfiguration()
- configs = configSet.getConfigurations(str(attributes.get('config')))
+ configs = configSet.getConfigurations(ant.get_property(r'@{config}'))
if len(configs) > 0:
filelist = []
@@ -159,7 +196,7 @@
if config.get_boolean("grace.metadata", False):
metadata = os.path.join(config['archives.dir'], config['name']+ ".metadata.xml")
if os.path.exists(metadata):
- self.log(str("Found %s" % metadata))
+ LOGGER.info("Found %s" % metadata)
filelist.append(metadata)
merger = None
@@ -167,22 +204,22 @@
if os.path.exists(metadata_main):
merger = symrec.MetadataMerger(metadata_main)
merge_filelist(merger, filelist)
- self.log(str("Writing %s" % metadata_main))
+ LOGGER.info(str("Writing %s" % metadata_main))
merger.save()
elif len(filelist) > 0:
input = filelist.pop(0)
merger = symrec.MetadataMerger(input)
merge_filelist(merger, filelist)
- self.log(str("Writing %s" % metadata_main))
+ LOGGER.info(str("Writing %s" % metadata_main))
merger.save(metadata_main)
os.unlink(input)
except Exception, e:
- self.log('ERROR: %s' % e)
- traceback.print_exc()
- # Let's propagate at the moment
- raise e
+ LOGGER.error('ERROR: %s' % e)
+ sys.exit(-1)
]]>
- </scriptdef>
+ </hlm:python>
+ </sequential>
+ </macrodef>
<!-- This macro allows you to add or update one archive definition inside the release metadata files.
e.g.:
@@ -190,39 +227,48 @@
<hlm:updateMetadataMacro file="<path>/release_metadata.xml" archive="<path>/archive.zip" />
</pre>
-->
- <scriptdef name="updateMetadataMacro" language="jython" uri="http://www.nokia.com/helium">
+ <macrodef name="updateMetadataMacro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="archive" />
- <attribute name="filters" />
+ <attribute name="filters" default=""/>
+ <sequential>
+ <hlm:python>
<![CDATA[
import symrec
import os
import traceback
import fileutils
-self.setTaskName("updateMetadataMacro")
-if attributes.get('file') is None:
+import ant
+import sys
+import logging
+
+LOGGER = logging.getLogger("metadataupdater")
+LOGGER.setLevel(level=logging.INFO)
+logging.basicConfig(level=logging.INFO)
+
+if ant.get_property(r'@{file}') is None:
raise Exception('file attribute is not defined.')
-if attributes.get('archive') is None:
+if ant.get_property(r'@{archive}') is None:
raise Exception('archive attribute is not defined.')
filters = None
-if attributes.get('filters') is not None:
- filters = str(attributes.get('filters')).split(r',')
+if ant.get_property(r'@{filters}') is not None:
+ filters = ant.get_property(r'@{filters}').split(r',')
try:
- filename = str(attributes.get('file'))
- archive = str(attributes.get('archive'))
+ filename = ant.get_property(r'@{file}')
+ archive = ant.get_property(r'@{archive}')
if not os.path.exists(filename):
raise Exception("Could not find file: %s" % filename)
if not os.path.exists(archive):
raise Exception("Could not find file: %s" % archive)
- self.log(str("Opening %s" % filename))
+ LOGGER.info(str("Opening %s" % filename))
md = symrec.ReleaseMetadata(filename)
if os.path.basename(archive) not in md.keys():
- self.log(str("Adding %s to metadata" % os.path.basename(archive)))
+ LOGGER.info(str("Adding %s to metadata" % os.path.basename(archive)))
md.add_package(os.path.basename(archive), md5checksum=fileutils.getmd5(archive), size=os.path.getsize(archive), filters=filters)
else:
- self.log(str("Updating %s to metadata" % os.path.basename(archive)))
+ LOGGER.info(str("Updating %s to metadata" % os.path.basename(archive)))
result = md[os.path.basename(archive)]
result['md5checksum'] = unicode(fileutils.getmd5(archive))
result['size'] = unicode(os.path.getsize(archive))
@@ -231,12 +277,12 @@
md[os.path.basename(archive)] = result
md.save()
except Exception, e:
- self.log('ERROR: %s' % e)
- traceback.print_exc()
- # Let's propagate at the moment
- raise e
+ LOGGER.error('ERROR: %s' % e)
+ sys.exit(-1)
]]>
- </scriptdef>
+ </hlm:python>
+ </sequential>
+ </macrodef>
<!-- This macro update the metadata file generated by the config provided by file.
e.g:
@@ -244,22 +290,28 @@
<hlm:updateMD5Macro file="${zip.config.file.parsed}" config="${zips.@{type}.spec.name}"/>
</pre>
-->
- <scriptdef name="updateMD5Macro" language="jython" uri="http://www.nokia.com/helium">
+ <macrodef name="updateMD5Macro" uri="http://www.nokia.com/helium">
<attribute name="file" />
<attribute name="config" />
+ <sequential>
+ <hlm:python>
<![CDATA[
import configuration
import archive
import os
import symrec
-import traceback
+import ant
+import logging
+import sys
-self.setTaskName("update-md5")
+LOGGER = logging.getLogger("metadataMD5updater")
+LOGGER.setLevel(level=logging.INFO)
+logging.basicConfig(level=logging.INFO)
# Reading the config from Ant
try:
- config_filename = str(attributes.get('file'))
- spec_name = str(attributes.get('config'))
+ config_filename = ant.get_property(r'@{file}')
+ spec_name = ant.get_property(r'@{config}')
# Loading the config file.
builder = configuration.NestedConfigurationBuilder(open(config_filename, 'r'))
configSet = builder.getConfiguration()
@@ -271,16 +323,16 @@
md5update.update()
md5update.save()
else:
- self.log(str('WARNING: Could not find %s.' % os.path.join(configs[0]['archives.dir'], "release_metadata.xml")))
+ LOGGER.warning(str('WARNING: Could not find %s.' % os.path.join(configs[0]['archives.dir'], "release_metadata.xml")))
else:
- self.log('WARNING: No config.')
+ LOGGER.warning('WARNING: No config.')
except Exception, e:
- self.log('ERROR: %s' % e)
- traceback.print_exc()
- # Let's propagate at the moment
- raise e
+ LOGGER.error('ERROR: %s' % e)
+ sys.exit(-1)
]]>
- </scriptdef>
+ </hlm:python>
+ </sequential>
+ </macrodef>
<condition property="archive.using.ec">
<or>
@@ -294,40 +346,53 @@
<attribute name="type" />
<attribute name="file" />
<attribute name="ec" default="${archive.using.ec}"/>
+ <attribute name="failonemptyconfig" default="true"/>
+ <attribute name="phase" default="archive"/>
<sequential>
<if>
<not>
<isset property="zip.@{type}.log.file" />
</not>
<then>
- <property name="zip.@{type}.log.file" location="${build.log.dir}/${build.id}_@{type}_zip.log" />
+ <property name="zip.@{type}.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_zip.log" />
</then>
</if>
+ <mkdir dir="${@{phase}.log.dir}"/>
+ <mkdir dir="${post.log.dir}"/>
+ <property name="zip.@{type}.nopolicy.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_archive.nopolicy.log" />
+ <property name="zip.@{type}.policy.log.file" location="${@{phase}.log.dir}/${build.id}_@{type}_archive.policy.log" />
<trycatch property="exception" reference="exception">
<try>
<!-- Don't print 'compressing' on console -->
<hlm:logtoconsole action="stop" />
<!-- Stops writing on ...ant_build.log file-->
- <hlm:startSpecificLogMacro name="${zip.@{type}.log.file}" />
+ <hlm:startSpecificLogMacro name="${zip.@{type}.log.file}" phase="@{phase}"/>
+ <!-- Path to the parsed file of zip specification file
+ @type string
+ @scope private
+ -->
<property name="zip.config.file.parsed" location="${temp.build.dir}/zip.cfg.xml.parsed" />
<copy file="@{file}" tofile="${zip.config.file.parsed}" overwrite="true">
<filterchain>
<expandproperties />
</filterchain>
</copy>
- <hlm:updateArchiveConfig configtype="${zips.@{type}.spec.name}" configfileparsed="${zip.config.file.parsed}" usingec="@{ec}" />
+ <hlm:updateArchiveConfig configtype="${zips.@{type}.spec.name}" configfileparsed="${zip.config.file.parsed}" usingec="@{ec}" failonemptyconfig="@{failonemptyconfig}"/>
<if>
<istrue value="@{ec}" />
<then>
<echo>emake.root.to.append=${emake.root.to.append}</echo>
- <hlm:emakeMacro name="archive-full-@{type}" makefile="${build.drive}/ZIP_${zips.@{type}.spec.name}.make" target="all" dir="${build.drive}" annodetail="basic,history,file,waiting" root="${emake.root.to.append}" failonerror="false" />
+ <hlm:emakeMacro name="archive-full-@{type}" makefile="${build.drive}/ZIP_${zips.@{type}.spec.name}.make" target="all" dir="${build.drive}/" annodetail="basic,history,file,waiting" root="${emake.root.to.append}" failonerror="false" phase="archive"/>
</then>
<else>
<if>
<isgreaterthan arg1="${number.of.threads}" arg2="${archive.max.number.of.threads}"/>
<then>
<ant antfile="${build.drive}/ZIP_${zips.@{type}.spec.name}.xml">
+ <!-- how many parallel threads can run
+ @type string
+ -->
<property name="number.of.threads" value="${archive.max.number.of.threads}"/>
</ant>
</then>
@@ -358,12 +423,12 @@
<finally>
<!-- Todo: metadata: Convert to metadata structure -->
<!-- Stops log back to main log. -->
- <hlm:stopSpecificLogMacro name="${zip.@{type}.log.file}" />
+ <hlm:stopSpecificLogMacro name="${zip.@{type}.log.file}" phase="@{phase}"/>
<hlm:logtoconsole action="resume" />
</finally>
</trycatch>
<hlm:assertFileExists file="${zip.@{type}.log.file}" />
- <copy file="${zip.@{type}.log.file}" tofile="${build.log.dir}/${build.id}_@{type}_archive.nopolicy.log" verbose="true">
+ <copy file="${zip.@{type}.log.file}" tofile="${zip.@{type}.nopolicy.log.file}" verbose="true">
<filterchain>
<linecontainsregexp negate="true">
<regexp pattern="POLICY_(ERROR|WARNING|INFO)" />
@@ -372,15 +437,15 @@
</copy>
<hlm:metadatarecord database="${metadata.dbfile}">
<hlm:textmetadatainput>
- <fileset casesensitive="false" file="${build.log.dir}/${build.id}_@{type}_archive.nopolicy.log" />
+ <fileset casesensitive="false" file="${zip.@{type}.nopolicy.log.file}" />
<metadatafilterset refid="filterset.archive.nopolicy" />
</hlm:textmetadatainput>
</hlm:metadatarecord>
- <hlm:signalMacro logfile="${build.log.dir}/${build.id}_@{type}_archive.nopolicy.log"
+ <hlm:signalMacro logfile="${zip.@{type}.nopolicy.log.file}"
signal.input="archiveErrorSignalInput" />
<!-- Extracting policy errors from archiving. -->
- <copy file="${zip.@{type}.log.file}" tofile="${build.log.dir}/${build.id}_@{type}_archive.policy.log" verbose="true">
+ <copy file="${zip.@{type}.log.file}" tofile="${zip.@{type}.policy.log.file}" verbose="true">
<filterchain>
<linecontainsregexp>
<regexp pattern="POLICY_(ERROR|WARNING|INFO)" />
@@ -390,14 +455,14 @@
</tokenfilter>
</filterchain>
</copy>
- <hlm:assertFileExists file="${build.log.dir}/${build.id}_@{type}_archive.policy.log" />
+ <hlm:assertFileExists file="${zip.@{type}.policy.log.file}" />
<hlm:metadatarecord database="${metadata.dbfile}">
<hlm:textmetadatainput>
- <fileset casesensitive="false" file="${build.log.dir}/${build.id}_@{type}_archive.policy.log" />
+ <fileset casesensitive="false" file="${zip.@{type}.policy.log.file}" />
<metadatafilterset refid="filterset.archive.policy" />
</hlm:textmetadatainput>
</hlm:metadatarecord>
- <hlm:signalMacro logfile="${build.log.dir}/${build.id}_@{type}_archive.policy.log"
+ <hlm:signalMacro logfile="${zip.@{type}.policy.log.file}"
signal.input="archivePolicyErrorSignalInput" />
</sequential>
</macrodef>
@@ -410,12 +475,16 @@
<attribute name="configtype" />
<attribute name="configfileparsed" />
<attribute name="usingec" />
+ <attribute name="failonemptyconfig" />
<![CDATA[
import archive
import configuration
import logging
import os
+failonemptyconfig = True
+if attributes.get('failonemptyconfig'):
+ failonemptyconfig = str(attributes.get('failonemptyconfig')).lower() == "true"
config_parsed_filename = str(attributes.get('configfileparsed'))
config_type = str(attributes.get('configtype'))
is_it_ec = str(attributes.get('usingec'))
@@ -430,14 +499,13 @@
outputtype = 'make'
outputext = '.make'
-if len(configs) > 0:
+if len(configs) > 0 or not failonemptyconfig:
prebuilder = archive.ArchivePreBuilder(configuration.ConfigurationSet(configs), config_type, outputtype)
if os.sep == '\\':
toAppendEmakeRoot = prebuilder.checkRootDirValue(builder, config_parsed_filename, project.getProperty('build.drive'), config_type)
if toAppendEmakeRoot is not None:
project.setProperty("emake.root.to.append", str(toAppendEmakeRoot))
prebuilder.writeTopLevel(os.path.join(project.getProperty('build.drive') + os.sep, 'ZIP_' + config_type + outputext), project.getProperty('temp.build.dir'), config_parsed_filename)
-
else:
raise Exception('There are no archive configs to build. Looked for %s' % config_type)
]]>
@@ -472,24 +540,6 @@
<hlm:zipContentMacro type="trace_roms" file="${zip.config.file}" />
</target>
-
- <!-- Zips the trace rom content -->
- <target name="zip-uda-roms" unless="skip-zips">
- <hlm:zipContentMacro type="uda_roms" file="${zip.config.file}" />
- </target>
-
-
- <!-- Zips .loc files into individual packages. -->
- <target name="zip-loc-files">
- <mkdir dir="${temp.loc.dir}" />
- <exec executable="perl" dir="${build.drive}/" failonerror="${failonerror}">
- <arg file="${helium.dir}/tools/publish/get_loc_files.pl" />
- <arg value="${build.drive}" />
- </exec>
- <zip destfile="${loc.output.dir}/${loc.output.filename}" basedir="${temp.loc.dir}" />
- </target>
-
-
<!-- Publishes the content defined by the filesets at the top of this project.
The files are split into 4 filesets to parallelize the publish operation:
@@ -591,51 +641,78 @@
</hlm:python>
</target>
-
<!-- Reports on the status of the build, through other targets. -->
- <target name="report" depends="binary-sizes-log" unless="env.HLM_SUBCON">
- <antcall target="rndsdk-create-api-descr-xml" />
- </target>
-
+ <target name="report" depends="binary-sizes-log"/>
<!-- Delta zips -->
+ <!-- Output location
+ @type string
+ @scope private
+ -->
<property name="delta.zip.location" value="${build.output.dir}/build_area/delta_zip" />
+ <!-- Temp location
+ @type string
+ @scope private
+ -->
<property name="delta.zip.temp.location" value="${temp.build.dir}/delta_zip" />
+ <!-- The zip file
+ @type string
+ @scope private
+ -->
<property name="delta.zip.file" value="${delta.zip.location}/${build.id}_delta_zip.zip" />
+ <!-- Define the file size of the zipped up released file, this is then used by delta zipping code to determine if a delta zip is required or a full zip should be created. The delta zip will only be published if it is less than 1/2 the size of this property
+ @type string
+ -->
<property name="delta.zip.file.size" value="11000000" />
<!--default value of approx 11GB-->
+ <!-- Special Instructions file for SymDEC
+ @type string
+ @scope private
+ -->
<property name="delta.zip.delete.file" value="${delta.zip.location}/specialInstructions.xml" />
+ <!-- Temp file
+ @type string
+ @scope private
+ -->
<property name="delta.ant.file" value="${delta.zip.temp.location}/delta_zip.ant.xml" />
+ <!-- Output location of md5 file
+ @type string
+ @scope private
+ -->
<property name="md5.signature.file" value="${delta.zip.location}/${build.id}.md5" />
+ <!-- Number of partial md5 files to use during creation
+ @type string
+ @scope private
+ -->
<property name="md5.split" value="100" />
<!-- old.md5.file needs to be specified by the user -->
- <!-- Private: Patch evalid tool -->
+ <!-- Patch evalid tool. @scope private-->
<target name="patch-evalid">
<patch patchfile="${helium.dir}/tools/publish/evalidmd5.patch" originalfile="${build.drive}/epoc32/tools/evalidmd5.pm" />
</target>
- <!-- Private: Revert evalid patch -->
+ <!-- Revert evalid patch. @scope private-->
<target name="unpatch-evalid">
<patch reverse="true" patchfile="${helium.dir}/tools/publish/evalidmd5.patch" originalfile="${build.drive}/epoc32/tools/evalidmd5.pm" />
</target>
- <!-- Private: Copy extra tool for evalid -->
+ <!-- Copy extra tool for evalid. @scope private-->
<target name="pre-build-md5" depends="patch-evalid">
<copy todir="${build.drive}\epoc32\tools" file="${helium.dir}/tools/publish/evalid_multiple.pl" />
</target>
- <!-- Private: Revert patches -->
+ <!-- Revert patches. @scope private-->
<target name="post-build-md5" depends="unpatch-evalid" />
- <!-- Private: Generate a list of checksums for all files in the build area using EBS system -->
+ <!-- Generate a list of checksums for all files in the build area using EBS system. @scope private-->
<target name="build-md5-ebs" depends="pre-build-md5">
<mkdir dir="${delta.zip.location}" />
<hlm:python failonerror="true">
@@ -648,7 +725,7 @@
<antcall target="post-build-md5" />
</target>
- <!-- Private: Generate a list of checksums for all files in the build area using EC system -->
+ <!-- Generate a list of checksums for all files in the build area using EC system. @scope private-->
<target name="build-md5-ec" depends="pre-build-md5">
<mkdir dir="${delta.zip.location}" />
<echo>delta.zip.temp.location: ${delta.zip.temp.location}</echo>
@@ -665,6 +742,10 @@
<!-- Generate a list of checksums for all files in the build area -->
<target name="build-md5">
+ <!-- Comma seperated excluded directories from the zip
+ @type string
+ @scope public
+ -->
<property name="delta.exclude.commasep" value="epoc32_save.zip,output/**/*,delta_zips/**/*,temp/**/*,epoc32/**/*.sym,epoc32/BUILD/**/*" />
<echo>build MD5 started</echo>
<if>
@@ -724,6 +805,10 @@
<!-- Set prop old.md5.file to md5 file from last build in publish dir -->
<target name="delta-use-last-build">
+ <!-- Location of current build md5 file
+ @type string
+ @scope private
+ -->
<property name="md5.current.build" value="${publish.dir}/build_area/delta_zip/${build.id}.md5" />
<hlm:python outputproperty="old.md5.file">
import re
@@ -766,7 +851,7 @@
</fileset>
<antcall target="grace-upload">
- <reference refid="grace.delta.zips.id" torefid="gracezips" />
+ <reference refid="grace.delta.zips.id" torefid="release.zips" />
</antcall>
</target>
@@ -776,16 +861,58 @@
by the "final" target last.
-->
<target name="publish-build-log" depends="prep-publish" if="publish">
- <record name="${build.log}" action="stop" append="true" />
<copy todir="${publish.dir}/logs" preservelastmodified="true" failonerror="false">
- <fileset dir="${build.log.dir}" includes="${build.id}*_ant_build.log" />
+ <fileset dir="${build.log.dir}" includes="**/*.log" />
</copy>
</target>
+ <!-- Copy the debug log to the build area if available-->
+ <target name="copy-debug-logs">
+ <echo message="log4j.dir:${log4j.cache.dir}" />
+ <if>
+ <available file="${log4j.cache.dir}" />
+ <then>
+ <copy todir="${build.log.dir}/debug" failonerror="false">
+ <fileset dir="${log4j.cache.dir}" includes="hlm_*.log" />
+ </copy>
+ </then>
+ </if>
+ </target>
+
+ <!-- Target to simulate that the build is failed because of processing
+ ant output
+ -->
+ <target name="raise-error">
+ <echo message="ERROR: Build failed due to exceptions" />
+ </target>
+
+ <!-- Update the build status by processing the ant build output log -->
+ <target name="build-status">
+ <hlm:metadatarecord database="${metadata.dbfile}">
+ <hlm:antmetadatainput>
+ <fileset casesensitive="false" file="${build.log}" />
+ <metadatafilterset refid="filterset.ant.output" />
+ </hlm:antmetadatainput>
+ </hlm:metadatarecord>
+
+ <hlm:signalMacro logfile="${build.log}"
+ signal.input="exceptionSignalInput" />
+ </target>
+
+ <!-- Exception handler to process the ant output log -->
+ <target name="hlm-exception-handler">
+ <if>
+ <available file="${build.log}"/>
+ <then>
+ <runtarget target="do-exception-handler" />
+ </then>
+ </if>
+ </target>
+
+ <target name="do-exception-handler" depends="raise-error, build-status, build-log-summary, copy-debug-logs, publish-build-log" />
<!-- Does any wrap-up at the end of the build. This should be the last target for every top-level target. -->
- <target name="final" depends="publish-build-log">
- </target>
+ <target name="final" depends="build-status, build-log-summary, copy-debug-logs, publish-build-log" />
<!-- This target will zip the WA depending on the ado mapping file -->
<target name="zip-wa" depends="ido-create-ado-mapping" if="zip.wa">