Delivery Blocks to SF
authorkelvzhu
Thu, 02 Sep 2010 15:02:14 +0800
changeset 632 934f9131337b
parent 631 9435b9008a58
Delivery Blocks to SF
releasing/blocks/README.txt
releasing/blocks/cclient/README.txt
releasing/blocks/cclient/blocks/bin/blocks.bat
releasing/blocks/cclient/blocks/bin/bundle.bat
releasing/blocks/cclient/blocks/conf/comps_template.xml
releasing/blocks/cclient/blocks/conf/directives/single-bundle/file-directives.xml
releasing/blocks/cclient/blocks/conf/directives/single-bundle/pkg-directives.xml
releasing/blocks/cclient/blocks/python/UserConfiguration/UserConfiguration.py
releasing/blocks/cclient/blocks/python/UserConfiguration/XMLConfig.py
releasing/blocks/cclient/blocks/python/UserConfiguration/XMLConfigFile.py
releasing/blocks/cclient/blocks/python/UserConfiguration/__init__.py
releasing/blocks/cclient/blocks/python/blocks.py
releasing/blocks/cclient/blocks/python/blocks_info.py
releasing/blocks/cclient/blocks/python/blockscommand.py
releasing/blocks/cclient/blocks/python/bundle.py
releasing/blocks/cclient/blocks/python/bundlecommand.py
releasing/blocks/cclient/blocks/python/cmdlineapp.py
releasing/blocks/cclient/blocks/python/cmdlinecommand.py
releasing/blocks/cclient/blocks/python/cmdlineutils.py
releasing/blocks/cclient/blocks/python/comps.py
releasing/blocks/cclient/blocks/python/elementtree.py
releasing/blocks/cclient/blocks/python/generalexceptions.py
releasing/blocks/cclient/blocks/python/localbundle.py
releasing/blocks/cclient/blocks/python/sourceslist.py
releasing/blocks/cclient/blocks/python/uitools.py
releasing/blocks/cclient/blocks/python/utils.py
releasing/blocks/cclient/blocks/python/workspacediff.py
releasing/blocks/cclient/blocks/utils/README.txt
releasing/blocks/cclient/blocks_files
releasing/blocks/cclient/buildbinaries.bat
releasing/blocks/cclient/buildpackage.py
releasing/blocks/cclient/copybinaries.bat
releasing/blocks/cclient/patches/linux/apt-cache-search.patch
releasing/blocks/cclient/patches/linux/apt-debian-system.patch
releasing/blocks/cclient/patches/linux/apt-ftp-archive-ret.patch
releasing/blocks/cclient/patches/linux/dpkg-posix.patch
releasing/blocks/cclient/patches/linux/dpkg-remove-chown.patch
releasing/blocks/cclient/patches/linux/dpkg-remove-dbcheck.patch
releasing/blocks/cclient/patches/linux/dpkg-remove-pathcheck.patch
releasing/blocks/cclient/patches/windows/apt-win.patch
releasing/blocks/cclient/patches/windows/dpkg-win.patch
releasing/blocks/cclient/smoketest.bat
releasing/blocks/framework/README.txt
releasing/blocks/framework/blocks-version
releasing/blocks/framework/setup_blocks.py
releasing/blocks/framework/setup_symbian.py
releasing/blocks/framework/src/Blocks/Packaging/BuildData.py
releasing/blocks/framework/src/Blocks/Packaging/ComponentBuilder.py
releasing/blocks/framework/src/Blocks/Packaging/DataSources/LinkInfoToBuildData.py
releasing/blocks/framework/src/Blocks/Packaging/DataSources/WhatLog.py
releasing/blocks/framework/src/Blocks/Packaging/DataSources/__init__.py
releasing/blocks/framework/src/Blocks/Packaging/DataSources/buildLogToGeneric.xsl
releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/DefaultProcessors.py
releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/RaptorDependencyProcessor.py
releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/RomPatchProcessor.py
releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/__init__.py
releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/additionalDependencyRules.xml
releasing/blocks/framework/src/Blocks/Packaging/FileMapping.py
releasing/blocks/framework/src/Blocks/Packaging/Logging.py
releasing/blocks/framework/src/Blocks/Packaging/MultiprocessPackager.py
releasing/blocks/framework/src/Blocks/Packaging/PackageModel.py
releasing/blocks/framework/src/Blocks/Packaging/PackageWriter.py
releasing/blocks/framework/src/Blocks/Packaging/Rules/Rules.py
releasing/blocks/framework/src/Blocks/Packaging/Rules/__init__.py
releasing/blocks/framework/src/Blocks/Packaging/Rules/packageDirectives.xml
releasing/blocks/framework/src/Blocks/Packaging/Rules/sourceRules.xml
releasing/blocks/framework/src/Blocks/Packaging/Rules/targetRules.xml
releasing/blocks/framework/src/Blocks/Packaging/Storage.py
releasing/blocks/framework/src/Blocks/Packaging/__init__.py
releasing/blocks/framework/src/Blocks/__init__.py
releasing/blocks/framework/src/Blocks/arfile.py
releasing/blocks/framework/src/Blocks/debfile.py
releasing/blocks/framework/src/Blocks/filelock.py
releasing/blocks/framework/src/Blocks/gpg.py
releasing/blocks/framework/src/Blocks/singleinstance.py
releasing/blocks/framework/src/SymbianUtils/Evalid.py
releasing/blocks/framework/src/SymbianUtils/Readelf.py
releasing/blocks/framework/src/SymbianUtils/__init__.py
releasing/blocks/framework/src/SymbianUtils/bin/README.txt
releasing/blocks/framework/src/plugins/filter_blocks.py
releasing/blocks/framework/symbian-version
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/README.txt	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,20 @@
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+
+General Information
+-------------------
+
+Blocks is a software packaging framework and a set of package management tools.
+
+Content:
+1. Blocks command-line client (workspace management tool for Windows/Linux) located in cclient directory
+2. Blocks Packaging Framework (Python library for Windows/Linux) located in framework directory
+
+Check README files in both directories for further information.
+
+NOTE: Blocks PFW should be installed before using command-line client because it is needed for some of the functionality.
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/README.txt	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,27 @@
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+
+Blocks Command-line Client Development Environment Setup Instructions for Windows
+---------------------------------------------------------------------------------
+
+1. Install python 2.6 with pywin32 (e.g. activepython)
+2. Install blocks PFW (instructions in framework directory)
+3. Install cygwin to "C:\cygwin-1.7" with c/c++ development tools like gcc
+4. Put dpkg sources(versions 1.14.23 or 1.14.29 should work) into dpkg directory
+  * Can be found from http://security.debian.org/debian-security/pool/updates/main/d/dpkg/dpkg_1.14.29.tar.gz
+5. Put apt sources(version 0.7.20.2 has been tested to work) into apt directory
+  * Can be found from http://ftp.de.debian.org/debian/pool/main/a/apt/apt_0.7.20.2+lenny1.tar.gz
+6. Apply windows patches from patches\windows directory:
+  * Go to apt dir and run "patch -p1 -E -i ..\patches\windows\apt-win.patch"
+  * Go to dpkg dir and run "patch -p1 -E -i ..\patches\windows\dpkg-win.patch"
+7. Compile dpkg and apt by running buildbinaries.bat (there will be some errors especially on dselect which is not needed)
+  * Make sure that the final step which copies binaries succeeds!
+8. Get needed utilities to blocks\utils directory (instructions in utils dir as README.txt)
+9. If blocks PFW is installed or in PYTHONPATH you can do quick test by running smoketest.bat
+10. Create windows zip package of blocks by running "python buildpackage.py blocks"
+  * Make sure that command succeeds without errors
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/bin/blocks.bat	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,31 @@
+@REM
+@REM Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+@REM All rights reserved.
+@REM This component and the accompanying materials are made available
+@REM under the terms of "Eclipse Public License v1.0"
+@REM which accompanies this distribution, and is available
+@REM at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@REM
+@REM Initial Contributors:
+@REM Nokia Corporation - initial contribution.
+@REM
+@REM Contributors:
+@REM
+@REM Description:
+@REM Runs blocks main python file
+@REM
+
+@ECHO OFF
+SETLOCAL
+
+SET __PYTHON__=python.exe
+IF DEFINED BLOCKS_PYTHON SET __PYTHON__="%BLOCKS_PYTHON%"
+
+SET CYGWIN=nodosfilewarning
+
+SET __BLOCKS__="%~dp0..\python\blocks.py"
+%__PYTHON__% %__BLOCKS__% %*
+
+IF DEFINED BLOCKS_TESTING exit %errorlevel%
+
+ENDLOCAL
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/bin/bundle.bat	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,31 @@
+@REM
+@REM Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+@REM All rights reserved.
+@REM This component and the accompanying materials are made available
+@REM under the terms of "Eclipse Public License v1.0"
+@REM which accompanies this distribution, and is available
+@REM at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@REM
+@REM Initial Contributors:
+@REM Nokia Corporation - initial contribution.
+@REM
+@REM Contributors:
+@REM
+@REM Description:
+@REM Runs bundle main python file
+@REM
+
+@ECHO OFF
+SETLOCAL
+
+SET __PYTHON__=python.exe
+IF DEFINED BLOCKS_PYTHON SET __PYTHON__="%BLOCKS_PYTHON%"
+
+SET CYGWIN=nodosfilewarning
+
+SET __BUNDLE__="%~dp0..\python\bundle.py"
+%__PYTHON__% %__BUNDLE__% %*
+
+IF DEFINED BLOCKS_TESTING exit %errorlevel%
+
+ENDLOCAL
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/conf/comps_template.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Comps-file template for generating group information
+
+-->
+
+<comps>
+    <group>
+        <id>tools-win</id>
+        <name>Tools Windows</name>
+        <description>Development tools for Windows</description>
+    </group>
+    <group>
+        <id>tools-linux</id>
+        <name>Tools Linux</name>
+        <description>Development tools for Linux</description>
+    </group>
+    <group>
+        <id>tools-dev</id>
+        <name>Tools Development</name>
+        <description>Files needed for tools development</description>
+    </group>
+    <group>
+        <id>arm-dev</id>
+        <name>ARM Development</name>
+        <description>Files needed for ARM target development</description>
+    </group>
+    <group>
+        <id>emul-dev</id>
+        <name>Emulator Development</name>
+        <description>Files needed for Emulator development</description>
+    </group>
+    <group>
+        <id>emulator</id>
+        <name>Emulator Binaries</name>
+        <description>Symbian OS emulator</description>
+    </group>
+    <group>
+        <id>arm</id>
+        <name>ARM Binaries</name>
+        <description>ARM binaries</description>
+    </group>
+    <group>
+        <id>doc</id>
+        <name>Documentation</name>
+        <description>Documentation</description>
+    </group>
+    <group>
+        <id>legacy</id>
+        <name>Legacy</name>
+        <description>Legacy files</description>
+    </group>
+    <group>
+        <id>images</id>
+        <name>Images</name>
+        <description>Image files</description>
+    </group>
+    <group>
+        <id>src</id>
+        <name>Sources</name>
+        <description>Source files</description>
+    </group>
+
+    <rules>
+        <rule groupid="tools-win">
+            <match key="Package">\.tools(-win)?$</match>
+        </rule>
+        <rule groupid="tools-linux">
+            <match key="Package">\.tools(-linux)?$</match>
+        </rule>
+        <rule groupid="tools-dev">
+            <match key="Package">\.dev(-tools)?$</match>
+        </rule>
+        <rule groupid="arm-dev">
+            <match key="Package">\.dev(-arm)?$</match>
+        </rule>
+        <rule groupid="emul-dev">
+            <match key="Package">\.dev(-emul)?$</match>
+        </rule>
+        <rule groupid="arm">
+            <match key="Package">\.exec-arm$|\.resource(-l10n)?(-arm)?$</match>
+        </rule>
+        <rule groupid="emulator">
+            <match key="Package">\.exec-emul$|\.resource(-l10n)?(-emul)?$</match>
+        </rule>
+        <rule groupid="legacy">
+            <match key="Package">\.legacy$</match>
+        </rule>
+        <rule groupid="doc">
+            <match key="Package">\.doc$</match>
+        </rule>
+        <rule groupid="images">
+            <match key="Package">\.images$</match>
+        </rule>
+        <rule groupid="src">
+            <match key="Package">\.src$</match>
+        </rule>
+    </rules>
+
+</comps>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/conf/directives/single-bundle/file-directives.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Simple file directives to generate just one bundle
+
+-->
+
+<rules>
+  <rule>
+    <match>
+      <path>.*</path>
+    </match>
+    <type>file</type>
+    <package>default</package>
+  </rule>
+</rules>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/conf/directives/single-bundle/pkg-directives.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Simple package directives to generate just one bundle
+
+-->
+
+<rules>
+  <rule>
+    <match>
+      <package>.*</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix></suffix>
+  </rule>
+</rules>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/UserConfiguration/UserConfiguration.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,380 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Users configuration manager
+#
+
+'''
+The filesystem hierarchy for user metadata and corresponding classes. n is a
+directory for the nth workspace.
+
++.blocks (UserConfiguration)
+  - blocks.conf (GlobalConfiguration)
+  - workspaces (WorkspaceList)
+  + n (Workspace)
+    - available
+    - blocks (BlocksFile)
+    - list
+    - status
+    + info
+    + updates
+
+A Blocks workspace is essentially a deb repository. There may be many
+workspaces per user on a system.
+
+Integrity
+
+There is no guarantee that workspaces and metadata are consistent if either the
+metadata or workspaces are modified simultaneously by multiple clients or
+offline by other means.
+
+Orphaned workspace directories are ignored.
+
+The UserConfiguration.removeCruft() method is provided to clean up metadata.
+
+Usage
+
+Creation:
+u = UserConfiguration()
+u.createWorkspace("foo", "/my/workspaces/foo", ["http://sources.com/sources/foo"])
+
+Access:
+v = UserConfiguration()
+v.getWorkspaceByName("foo").path
+v.getWorkspaceByName("foo").sources
+v.getWorkspaceById(1).name
+
+
+'''
+
+# TODO: This module needs some refactoring
+
+import os
+import sys
+import shutil
+import logging
+from xml.dom import minidom
+from collections import namedtuple
+import platform
+
+# TODO: Get rid of XMLConfigFile and use XMLConfig
+from XMLConfigFile import *
+from XMLConfig import XMLConfig, Property, ConfigError
+if not ".." in sys.path:
+    sys.path.append("..")
+import utils
+
+class BadWorkspace(Exception):
+    ''' The workspace is not listed in metadata '''
+
+class MissingWorkspace(Exception):
+    ''' The workspace directory cannot be found '''
+
+WorkspaceConfigError = ConfigError
+
+class WorkspaceList(XMLConfigFile):
+    '''
+    The I{workspace} configuration file found in the user blocks metadata
+    directory.
+
+    int(workspaceID) - str(workspacename) pairs saved as an XML file
+    '''
+    schema = os.path.join(os.path.dirname(__file__), "workspacelist.wsd")
+    WorkspaceInfo = namedtuple("workspace", "id, name, path")
+
+    def __init__(self, configFilePath):
+        self._workspaces = {}
+        self.default_workspace = 0
+        XMLConfigFile.__init__(self, configFilePath)
+
+##        self.name = doc.getElementsByTagName("name")[0].firstChild.nodeValue.strip()
+##        self.path = doc.getElementsByTagName("path")[0].firstChild.nodeValue.strip()
+    def fromXMLString(self, xmlString):
+        doc = minidom.parseString(xmlString)
+
+        default_tag = doc.getElementsByTagName("default")
+        if default_tag:
+            self.default_workspace = int(default_tag[0].firstChild.nodeValue.strip())
+        logging.debug("Default workspace: %s", self.default_workspace)
+
+        for w in doc.getElementsByTagName("workspace"):
+            id = int(w.getElementsByTagName("id")[0].firstChild.nodeValue.strip())
+            name = w.getElementsByTagName("name")[0].firstChild.nodeValue.strip()
+            path = w.getElementsByTagName("path")[0].firstChild.nodeValue.strip()
+            wsInfo = self.WorkspaceInfo(id, name, path)
+            self._workspaces[id] = wsInfo
+            #self._workspaces[name] = wsInfo
+
+    def toXMLString(self):
+        doc = minidom.Document()
+        workspaces = doc.createElement("workspaces")
+
+        default = doc.createElement("default")
+        default.appendChild(doc.createTextNode(str(self.default_workspace)))
+        workspaces.appendChild(default)
+
+        doc.appendChild(workspaces)
+        for id, ws in self._workspaces.iteritems():
+            n = doc.createElement("name")
+            n.appendChild(doc.createTextNode(ws.name))
+            i = doc.createElement("id")
+            i.appendChild(doc.createTextNode(str(id)))
+            p = doc.createElement("path")
+            p.appendChild(doc.createTextNode(ws.path))
+            w = doc.createElement("workspace")
+            w.appendChild(n)
+            w.appendChild(i)
+            w.appendChild(p)
+            workspaces.appendChild(w)
+        return doc.toprettyxml()
+
+    def setDefaultWorkspace(self, id):
+        try:
+            id = int(id)
+        except ValueError:
+            pass
+        if id in self.getWorkspaces() + [0]: # Use zero id to unset
+            self.default_workspace = id
+            self.write()
+        else:
+            raise BadWorkspace("No workspace with id '%s'" % id)
+
+    def getWorkspaceIdByName(self, name):
+        #return self._workspaces.get(name)
+        ids = [v.id for v in self._workspaces.values() if v.name == name]
+        return ids[0] if ids else None
+
+    def getWorkspaceNameById(self, id):
+        ws = self._workspaces.get(id)
+        return ws.name if ws else None
+
+    def getWorkspacePath(self, id):
+        ws = self._workspaces.get(id)
+        return ws.path if ws else None
+
+    def getNextId(self):
+        try:
+            keys = self._workspaces.keys()
+            keys.sort()
+            id = keys[-1] + 1
+        except IndexError:
+            id = 1
+        return id
+
+    def getWorkspaces(self):
+        return self._workspaces.keys()
+
+    def addWorkspaceToList(self, name, path, id=None):
+        if id == None: # pragma: no cover
+            id = self.getNextId()
+        if self.getWorkspaceIdByName(name):
+            raise BadWorkspace("Workspace name is not unique")
+        if len(name) < 1:
+            raise BadWorkspace("Name must be a non-empty string")
+        if id < 1: # pragma: no cover
+            raise BadWorkspace("Id must be a positive integer")
+
+        if not self.isWorkspacePathUnique(path):
+            raise BadWorkspace("Workspace path is not unique. Any of the workspace paths must not be under existing workspace path.")
+
+        self._workspaces[id] = self.WorkspaceInfo(id, name, path)
+        self.write()
+
+    def isWorkspacePathUnique(self, path):
+        for wsid in self.getWorkspaces():
+            if not utils.pathsUnique(path, self.getWorkspacePath(wsid)):
+                return False
+        return True
+
+    def removeWorkspace(self, id):
+        idnum = int(id)
+        if idnum in self._workspaces:
+            del self._workspaces[idnum]
+            # If default workspace -> unset default workspace
+            if self.default_workspace == idnum:
+                logging.debug("Removing workspace which is default. Unsetting default workspace.")
+                self.default_workspace = 0
+            self.write()
+
+class WorkspaceConfig(XMLConfig):
+
+    def __init__(self, path):
+        XMLConfig.__init__(self, path)
+        self.addProperty(Property("cache-bundles", Property.BOOLEAN, False))
+        self.load()
+
+class Workspace(WorkspaceConfig):
+    '''
+    Workspace metadata directory; one is created in the user metadata directory
+    for each workspace.
+    '''
+    def __init__(self, metadataDir, name, path):
+        '''
+        Create a new workspace by specifying metadata directory, workspace name and workspace path.
+
+        OR
+
+        Read an existing Workspace by specifying metadata path only.
+
+        @param metadataDir:       Location of workspace metadata directory
+        @type metadataDir:        String
+        @param name:     Name of the workspace
+        @type name:      String
+        @param path:     Location of the workspace
+        @type path:      String
+        '''
+        self.metadataDir = metadataDir
+        if not os.path.isdir(self.metadataDir):
+            path = os.path.abspath(path)
+            os.mkdir(self.metadataDir)
+            for d in ("info", "updates", "triggers"):
+                os.mkdir(os.path.join(self.metadataDir, d))
+            for f in ("available", "list", "status"):
+                h = open(os.path.join(self.metadataDir, f), "w")
+                h.close()
+            # Apt dirs
+            aptBaseDir = os.path.join(self.metadataDir, "apt")
+            os.makedirs(os.path.join(aptBaseDir, "lists", "partial"))
+            os.makedirs(os.path.join(aptBaseDir, "cache", "archives", "partial"))
+            # TODO: Copy apt config
+
+        self.name = name
+        self.path = os.path.normcase(path)
+
+        WorkspaceConfig.__init__(self, os.path.join(self.metadataDir, "blocks.conf"))
+
+class UserConfiguration(WorkspaceList, WorkspaceConfig):
+    '''
+    The user blocks metadata.
+    @var path: The name of the directory containing the metadata files.
+    '''
+    def __init__(self):
+        self.path = utils.getMetaPath()
+        utils.createFile(os.path.join(self.path, "trusted.gpg"))
+
+        WorkspaceList.__init__(self, os.path.join(self.path, "workspaces"))
+        WorkspaceConfig.__init__(self, os.path.join(self.path, "blocks.conf"))
+
+        self.logger = logging.Logger(self.__class__.__name__)
+
+    def workspaceExists(self, id):
+        return True if self.getWorkspaceNameById(id) else False
+
+    def checkWorkspaceExists(self, id):
+        if not self.getWorkspaceNameById(id):
+            raise BadWorkspace("Workspace with id %s does not exist." % id)
+
+    def getWorkspaceMetadataPath(self, id):
+        return os.path.join(self.path, str(id))
+
+    def createWorkspace(self, name, path):
+        '''
+        @param name:    workspace name
+        @type name:     String
+        @param path:    workspace location
+        @type path:     String
+        '''
+        # check name is unique
+        # get next ID
+        # create directory and empty contents
+        # write to workspaces file
+        id = self._getNextDirectoryId(self.path)
+        if self.getNextId() > id:
+            id = self.getNextId()
+        mdpath = self.getWorkspaceMetadataPath(id)
+        while os.path.isfile(mdpath) or os.path.isdir(mdpath): # pragma: no cover
+            id += 1
+            mdpath = self.getWorkspaceMetadataPath(id)
+        self.addWorkspaceToList(name, path, id)
+        Workspace(mdpath, name, path)
+
+    @staticmethod
+    def _getNextDirectoryId(path):
+        '''
+        Get the next ID in sequence.
+
+        I{path} is taken to contain files and/or subdirectories named with a
+        running sequence of integers
+
+        @return: version
+        @rtype: Integer
+        '''
+        try:
+            contents = [int(d) for d in os.listdir(path)]
+            contents.sort()
+            last = int(contents[-1]) + 1
+            return last
+        except Exception:
+            return 1 # pragma: no cover
+
+    def getWorkspaceById(self, id):
+        wsname = self.getWorkspaceNameById(id)
+        if not wsname:
+            raise BadWorkspace("Workspace with id '%s' does not exist" % id)
+        return Workspace(self.getWorkspaceMetadataPath(id), wsname, self.getWorkspacePath(id))
+
+    def getWorkspaceByName(self, name): # pragma: no cover
+        id = self.getWorkspaceIdByName(name)
+        if not id:
+            raise BadWorkspace("Workspace with name '%s' does not exist" % name)
+        return Workspace(self.getWorkspaceMetadataPath(id), self.getWorkspaceNameById(id), self.getWorkspacePath(id))
+
+    def getOrphanedListEntries(self):
+        ''' Workspaces in list that have no metadatadir '''
+        return [wid for wid in self.getWorkspaces() if not os.path.isdir(self.getWorkspaceMetadataPath(wid))]
+
+    def getOrphanedMetadataDirs(self):
+        ''' Metadata directories that have no corresponding list entry '''
+        return [os.path.join(self.path, d)
+                for d in os.listdir(self.path)
+                if os.path.isdir(os.path.join(self.path, d)) and d.isdigit() and
+                int(d) not in self.getWorkspaces()]
+
+    def getOrphanedWorkspaces(self):
+        ''' Workspaces that no longer exist. Ignore errors caused missing metadata. '''
+        orphaned = []
+        for wid in self.getWorkspaces():
+            try:
+                if not os.path.isdir(self.getWorkspaceById(wid).path):
+                    orphaned.append(wid)
+            except UserConfiguration.MissingWorkspace:
+                pass
+        return orphaned
+
+    def removeWorkspace(self, id):
+        '''
+        @param id: WS ID
+        '''
+        path = self.getWorkspaceMetadataPath(id)
+        WorkspaceList.removeWorkspace(self, id)
+        if os.path.isdir(path):
+            shutil.rmtree(path)
+
+    def removeCruft(self, checkForBlocks=True): # pragma: no cover
+        ''' Delete workspaces from list that have no corresponding metadatadir '''
+        for gone in self.getOrphanedListEntries():
+            self.logger.info("Removing workspace %s" % gone)
+            self.removeWorkspace(gone)
+
+        for gone in self.getOrphanedMetadataDirs():
+            if checkForBlocks:
+                if not os.path.isfile(os.path.join(gone, "blocks")):
+                    self.logger.warning("No blocks file in metadatadir, not removing %s"%gone)
+                    return
+            self.logger.info("Removing metadata dir %s" % gone)
+            shutil.rmtree(gone)
+
+        for gone in self.getOrphanedWorkspaces():
+            self.logger.info("Removing workspace %s" % gone)
+            self.removeWorkspace(gone)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/UserConfiguration/XMLConfig.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,146 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Simple XML-configuration file with multiple properties
+#
+
+import os
+import sys
+if ".." not in sys.path:
+    sys.path.append("..")
+
+import elementtree as ET
+import utils
+
+class Property(object):
+    '''Property
+
+    If value is None, default value will be returned as value
+    '''
+    STRING, BOOLEAN = range(2)
+
+    def __init__(self, name, type, value):
+        assert type == Property.STRING or type == Property.BOOLEAN
+        self.name = name
+        self.type = type
+        self.value = value
+
+    @property
+    def value(self):
+        return self._value
+
+    @value.setter
+    def value(self, value):
+        if self.type == Property.BOOLEAN:
+            origValue = value
+            value = utils.toBoolean(value)
+            if value is None:
+                raise ValueError("Trying to set invalid value '%s' for boolean property '%s'" % (origValue, self.name))
+        self._value = value
+
+    def setValue(self, value):
+        ''' Convenience method to set value and get (possibly converted) value back '''
+        self.value = value
+        return self.value
+
+class ConfigError(Exception):
+    ''' Configuration Error '''
+
+# TODO: Should properties be case-insensitive?
+class XMLConfig(object):
+    ''' Simple XML-configuration file with multiple properties '''
+    def __init__(self, path):
+        '''Opens xml-file
+
+        After initialization add properties into config and load
+        '''
+        self.configpath = path
+        if not os.path.isfile(path):
+            self._create()
+
+        self.tree = ET.MyElementTree(file=path)
+        self.properties = {}
+
+    def addProperty(self, prop):
+        '''Adds property to config so it can be set/get
+
+        Give Property instance as prop.
+        '''
+        if not isinstance(prop, Property):
+            raise ValueError("prop argument must be Property instance")
+        self.properties[prop.name] = prop
+
+    def removeProperty(self, name):
+        self._checkPropertyName(name)
+        del self.properties[name]
+
+    def load(self):
+        for prop in self.properties.itervalues():
+            value = self.tree.findtext(prop.name)
+            self.properties[prop.name].loaded = False
+            if value is not None:
+                self.properties[prop.name].value = value
+                self.properties[prop.name].loaded = True
+
+    def getPropertyValue(self, name, getDefault=True):
+        self._checkPropertyName(name)
+        if not getDefault and not self.properties[name].loaded:
+            return None
+        return self.properties[name].value
+
+    def setPropertyValue(self, name, value):
+        ''' Set property value '''
+        self._checkPropertyName(name)
+        try:
+            value = str(self.properties[name].setValue(value))
+        except ValueError, ex:
+            raise ConfigError(str(ex))
+        tag = self.tree.find(name)
+        if tag is None:
+            ET.SubElement(self.tree.getroot(), name).text = value
+        else:
+            tag.text = value
+        self._write()
+
+    def getPropertyValues(self):
+        ''' Get all the properties available '''
+        values = {}
+        for prop in self.properties.itervalues():
+            values[prop.name] = prop.value
+        return values
+
+    def _checkPropertyName(self, name):
+        if name not in self.properties:
+            raise ConfigError("Invalid property name '%s'" % name)
+
+    def _create(self):
+        self.tree = ET.MyElementTree(ET.Element("properties"))
+        self._write()
+
+    def _write(self):
+        self.tree.write(self.configpath)
+
+def test(): # pragma: no cover
+    config = XMLConfig(r"c:\temp\test.xml")
+    config.addProperty(Property("settingx", Property.BOOLEAN, False))
+    config.addProperty(Property("settingy", Property.BOOLEAN, False))
+    config.setPropertyValue("settingx", "yes")
+    print "setting x:", config.getPropertyValue("settingx")
+    print "setting y:", config.getPropertyValue("settingy")
+    config.setPropertyValue("settingx", "no")
+    print "setting x:", config.getPropertyValue("settingx")
+    config.setPropertyValue("settingx", "a")
+
+if __name__ == "__main__": # pragma: no cover
+    test()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/UserConfiguration/XMLConfigFile.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,85 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# XML-configuration file base class
+#
+
+import os#, libxml2
+
+# TODO: What to do about validation
+class XMLConfigFile(object):
+    '''
+    Validate the config against schema, read and write.
+    '''
+##    class Error(Exception):
+##        '''
+##
+##        '''
+
+##    schema = ""
+##    def isValid(cls, xmlString):
+##        '''
+##        Validate XML against schema
+##        '''
+##        ctxt = libxml2.schemaNewParserCtxt(cls.schema)
+##        validationCtxt = ctxt.schemaParse().schemaNewValidCtxt()
+##        try:
+##            doc = libxml2.parseMemory(xmlString, len(xmlString))
+##            if validationCtxt.schemaValidateDoc(doc) == 0:
+##                return True
+##        except:
+##            pass
+##        return False
+##
+##    isValid = classmethod(isValid)
+
+    def __init__(self, XMLConfigurationFilePath):
+        self.XMLConfigurationFilePath = XMLConfigurationFilePath
+        if os.path.isfile(self.XMLConfigurationFilePath):
+            self.read()
+        else:
+            self.write()
+
+    def read(self, file=None):
+        '''
+        Read from file
+        '''
+        if not file:
+            file = self.XMLConfigurationFilePath
+        f = open(file)
+        xmlString = f.read()
+        f.close()
+        # Disabled temporarily to get working on python environment without libxml2
+##        if not self.__class__.isValid(xmlString):
+##            raise ValueError, "Configuration file is not valid. See %s."%self.__class__.schema
+        self.fromXMLString(xmlString)
+
+    def write(self):
+        '''
+        Write to path
+        '''
+        xmlString = self.toXMLString()
+        # Disabled temporarily to get working on python environment without libxml2
+##        if self.__class__.isValid(xmlString):
+        f = open(self.XMLConfigurationFilePath, "w")
+        f.write(xmlString)
+        f.close()
+##        else:
+##            raise __class__.Error, "Failed to create valid XML from inputs."
+
+    def fromXMLString(self): # pragma: no cover
+        raise NotImplementedError
+
+    def toXMLString(self): # pragma: no cover
+        raise NotImplementedError
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/UserConfiguration/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,17 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# User configuration
+#
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/blocks.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,67 @@
+#!/usr/bin/python
+
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Blocks main module
+#
+
+import os
+import logging
+from optparse import OptionGroup
+
+from Blocks.singleinstance import SingleInstance
+
+from cmdlineapp import CmdlineApp
+from blockscommand import BlocksCommand
+import utils
+
+class BlocksApp(CmdlineApp):
+    ''' Blocks app '''
+
+    def initParser(self):
+        CmdlineApp.initParser(self)
+        self.parser.add_option("-f", "--force", action="store_true",
+            help="Force execution of a command and do not ask any confirmations")
+        wsOptions = OptionGroup(self.parser, "Workspace selection")
+        wsOptions.add_option("--wsid", type="int", help="Use workspace id")
+        wsOptions.add_option("--wsname", help="Use workspace name")
+        self.parser.add_option_group(wsOptions)
+
+    def init(self):
+        # Allow only one blocks instance per metadata-directory
+        si = SingleInstance(utils.getMetaPath(), True)
+        if si.alreadyRunning():
+            print "Waiting for another instance of blocks to complete processing..."
+        si.waitRelease()
+
+        # Delete default variables and use BLOCKS variables instead
+        for evar, blocks_evar in {"http_proxy": "BLOCKS_HTTP_PROXY",
+                                  "ftp_proxy": "BLOCKS_FTP_PROXY",
+                                  "no_proxy": "BLOCKS_NO_PROXY"}.iteritems():
+            blocksvar = os.environ.get(blocks_evar)
+            if blocksvar:
+                logging.debug("Blocks variable %s found. Using it in place of %s. Value = %s", blocks_evar, evar, blocksvar)
+                os.environ[evar] = blocksvar
+            else:
+                if evar in os.environ:
+                    del os.environ[evar]
+                    logging.debug("Environment variable %s deleted", evar)
+
+def main():
+    app = BlocksApp(BlocksCommand, "blocks", "blocks_info")
+    app.run()
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/blocks_info.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,23 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Version info for blocks
+#
+
+VERSION_DATE = ""
+VERSION_MAJOR = 0
+VERSION_MINOR = 6
+VERSION_PRE_RELEASE = 0
+VERSION_PRE_RELEASE_ID = "a"
+VERSION_REVISION = 2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/blockscommand.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,677 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# All blocks commands are here
+#
+
+import logging
+import os
+import tarfile
+import collections
+import urlparse
+
+from Blocks.debfile import DebFile, DebError
+
+from UserConfiguration.UserConfiguration import BadWorkspace, UserConfiguration, WorkspaceConfigError
+from sourceslist import SourcesList, RepoError
+from workspacediff import WorkspaceDiff
+from generalexceptions import ExternalProcessError, InvalidFileInput, InvalidUserInput, GenericError
+import utils
+import uitools
+import localbundle
+import cmdlineutils
+from cmdlinecommand import CmdLineCommand, CommandOptionParser
+import comps
+
+class BlocksCommand(CmdLineCommand):
+    ''' Blocks commands '''
+
+    ENV_WSID = "BLOCKS_WSID"
+    WS_NOT_DEFINED_ERROR = ("Workspace not defined.\n"
+                            "Please go to workspace directory, set environment variable '%s', use options or "
+                            "set default workspace with workspace-select." % ENV_WSID)
+    NO_REPOS_DEFINED = "No repositories defined for the current workspace."
+
+    def checkWorkspace(self, idnum=None):
+        idnum = idnum if idnum is not None else self.workspaceId
+        if idnum is None:
+            raise BadWorkspace(self.WS_NOT_DEFINED_ERROR)
+        try:
+            idnum = int(idnum)
+        except ValueError:
+            raise BadWorkspace("Workspace id must be integer")
+        self.config.checkWorkspaceExists(idnum)
+
+    def __init__(self, options):
+        CmdLineCommand.__init__(self, options)
+        # If these are called uninitialized return error string
+        # When workspace is not really needed handle it in the command
+        self.dpkg = utils.getErrorFunction(self.WS_NOT_DEFINED_ERROR)
+        self.apt = utils.getErrorFunction(self.WS_NOT_DEFINED_ERROR)
+
+        self.config = UserConfiguration()
+        self.workspaceId = None
+
+        uitools.force = True if self.options.force or self.options.verbose <= -2 else False
+
+        self._setCurrentWorkspace()
+
+    def _setCurrentWorkspace(self):
+        wsid = os.environ.get(BlocksCommand.ENV_WSID)
+        if self.options.wsid != None:
+            wsid = self.options.wsid
+        elif self.options.wsname != None:
+            wsid = self.config.getWorkspaceIdByName(self.options.wsname)
+            if wsid == None:
+                raise BadWorkspace("Workspace with name '%s' does not exist" % self.options.wsname)
+        elif wsid:
+            if not wsid.isdigit() or not self.config.workspaceExists(int(wsid)):
+                logging.warning("Invalid workspace id '%s' in environment variable '%s'", wsid, self.ENV_WSID)
+                wsid = None
+        elif self.config.default_workspace > 0:
+            wsid = self.config.default_workspace
+        else:
+            ws = self.config.getWorkspaces()
+            if ws:
+                # If current working dir is on substed drive get the real path
+                cwd = os.path.normcase(utils.getRealPath(os.getcwd())) + os.sep
+                for idnum in ws:
+                    path = os.path.normcase(self.config.getWorkspacePath(idnum))
+                    path = utils.addPathSep(path)
+                    if cwd.startswith(path):
+                        wsid = idnum
+                        logging.info("Workspace '%s' with id %s selected because of current directory.", self.config.getWorkspaceNameById(wsid), wsid)
+                        break
+        if wsid != None:
+            self.setWorkspace(int(wsid))
+
+    def setWorkspace(self, idnum=None):
+        '''
+        Set workspace to be used
+        If workspace is not needed call without id
+        '''
+        if idnum is None:
+            try:
+                # Check first if there is already a valid workspace
+                self.checkWorkspace()
+            except BadWorkspace:
+                self.dpkg = cmdlineutils.Dpkg()
+                self.apt = cmdlineutils.Apt()
+        else:
+            self.workspaceId = idnum
+            wsPath = self.config.getWorkspaceById(idnum).path
+            wsMetaPath = self.config.getWorkspaceMetadataPath(self.workspaceId)
+            self.dpkg = cmdlineutils.Dpkg(wsPath, wsMetaPath, self.options)
+            self.apt = cmdlineutils.Apt(wsPath, wsMetaPath, self.options)
+
+    def cmd_workspace_add(self, path):
+        ''' Add new workspace '''
+        path = os.path.abspath(path)
+        if not os.path.isdir(path):
+            raise BadWorkspace("Directory '%s' does not exist" % path)
+        realpath = utils.getRealPath(path)
+        if realpath != path:
+            logging.info("Path '%s' on substed drive. Adding workspace to real path '%s'.", path, realpath)
+            path = realpath
+
+        name = self.cmd_options.name
+        if not name:
+            name = self.getUniqueWorkspaceName()
+
+        self.config.createWorkspace(name, path)
+        print "Workspace id:", self.config.getWorkspaceIdByName(name)
+
+    cmd_workspace_add._optionparser = CommandOptionParser()
+    cmd_workspace_add._optionparser.add_option("-n", "--name", help="Workspace name [default: Generated]")
+
+    def getUniqueWorkspaceName(self):
+        wsnames = [self.config.getWorkspaceNameById(wsid) for wsid in self.config.getWorkspaces()]
+        return utils.uniqueName("workspace", wsnames)
+
+    def getWorkspaceId(self, id_or_name):
+        if id_or_name.isdigit():
+            idnum = id_or_name
+        else:
+            idnum = self.config.getWorkspaceIdByName(id_or_name)
+            if idnum is None:
+                raise BadWorkspace("Workspace with name '%s' does not exist" % id_or_name)
+        return int(idnum)
+
+    def cmd_workspace_select(self, id_or_name):
+        ''' Select default workspace (deselect with id 0) '''
+        self.config.setDefaultWorkspace(self.getWorkspaceId(id_or_name))
+
+    def cmd_workspace_list(self):
+        ''' List workspaces and show the current one '''
+        ws = self.config.getWorkspaces()
+        if ws:
+            for idnum in ws:
+                print "%s%s%s\n  name: %s\n  path: %s\n" % (idnum,
+                                                            "*" if idnum == self.workspaceId else "",
+                                                            " (default)" if idnum == self.config.default_workspace else "",
+                                                            self.config.getWorkspaceById(idnum).name,
+                                                            self.config.getWorkspaceById(idnum).path)
+        else:
+            print "No workspaces defined."
+
+    def cmd_workspace_remove(self, id_or_name):
+        ''' Remove workspace (removes only metadata) '''
+        idnum = self.getWorkspaceId(id_or_name)
+        self.checkWorkspace(idnum)
+
+        text = """Removing workspace (only metadata).
+Workspace info:
+  Id: %d
+  Name: %s
+  Path: %s
+"""
+        workspace = self.config.getWorkspaceById(idnum)
+        if uitools.askConfirmation(text % (idnum, workspace.name, workspace.path)):
+            self.config.removeWorkspace(int(idnum))
+
+    def cmd_advanced_metadata_check(self): # pragma: no cover
+        ''' Check inconsistencies in metadata '''
+        oL = self.config.getOrphanedListEntries()
+        if oL:
+            print "Found workspace entries without matching metadata directories:"
+            for o in oL:
+                print "\tID: %s\tMissing directory: %s" % (o, self.config.getWorkspaceMetadataPath(o))
+            print
+
+        oM = self.config.getOrphanedMetadataDirs()
+        if oM:
+            print "Found unlisted metadata directories:"
+            for o in oM:
+                print "\t%s" % (o)
+            print
+
+        oW = self.config.getOrphanedWorkspaces()
+        if oW:
+            print "Found metadata for non-existent workspaces:"
+            for o in oW:
+                print "\tID: %s\tName: '%s'\tPath: %s" % (o, self.config.getWorkspaceNameById(o), self.config.getWorkspaceById(o).path)
+
+    def aptUpdate(self):
+        self.checkWorkspace()
+        try:
+            slist = SourcesList(self.getSourcesListPath())
+            if not slist.repos:
+                raise GenericError(self.NO_REPOS_DEFINED)
+            logging.debug("\n" + self.apt("update", quiet=True))
+            # get comps.xmls
+            utils.createDir(self.getCompsPath())
+            logging.debug("Starting to retrieve comps")
+            for name, uri in slist.repos:
+                uri = utils.addSuffix(uri, "/")
+                uri = urlparse.urljoin(uri, "comps.xml")
+                localpath = self.getRepoCompsPath(name)
+                logging.debug("Retrieving '%s' to '%s'", uri, localpath)
+                try:
+                    utils.urlretrieve(uri, localpath)
+                except IOError, ex:
+                    if ex.errno == 404:
+                        logging.debug(str(ex))
+                    else:
+                        logging.warning(str(ex))
+        except ExternalProcessError, ex:
+            logging.warning("Bundle list update failed: %s\nRun apt-get update for more information.", ex)
+
+    def getCompsPath(self):
+        wsMetaPath = self.config.getWorkspaceMetadataPath(self.workspaceId)
+        return os.path.join(wsMetaPath, "comps")
+
+    def getRepoCompsPath(self, reponame):
+        compsDir = self.getCompsPath()
+        return os.path.join(compsDir, reponame + ".xml")
+
+    def getCurrentCompsPaths(self):
+        slist = SourcesList(self.getSourcesListPath())
+        return [self.getRepoCompsPath(name) for name, _ in slist.repos]
+
+    @staticmethod
+    def _separateDebs(files):
+        ''' Separate names which are deb files '''
+        bundles = []
+        debs = []
+        for name in files:
+            if name.endswith(".deb"):
+                debs.append(name)
+            else:
+                bundles.append(name)
+        return bundles, debs
+
+    def cmd_bundle_install(self, bundle1, *bundle):
+        ''' Install/update bundle(s) from repo/file/http/ftp '''
+        self.checkWorkspace()
+        bundles, debs = self._separateDebs(utils.listify(bundle1, bundle))
+        if bundles:
+            self.aptUpdate()
+            self.apt("install %s" % " ".join(bundles))
+            # NOTE: If apt raises an exception we won't clean up the cache
+            self.cleanCache()
+        if debs:
+            lb = localbundle.LocalBundle(self.dpkg)
+            try:
+                lb.install(debs)
+            except ValueError, ex:
+                raise IOError("None of the deb files found. %s." % ex)
+
+    def getGroupComp(self):
+        self.aptUpdate()
+        comp = comps.Comps()
+        for path in self.getCurrentCompsPaths():
+            try:
+                comp.add(path)
+            except SyntaxError, ex:
+                raise InvalidFileInput("Error in comps.xml: %s" % ex)
+            except IOError, ex: # comps.xml not available on repo
+                logging.debug("Cannot get comps.xml: %s", ex)
+        comp.resolveInstalledGroups(self.getInstalledPackages())
+        return comp
+
+    def groupProcess(self, groups, processor):
+        comp = self.getGroupComp()
+        for name in groups:
+            group = comp.getGroup(name)
+            if group:
+                processor(group)
+            else:
+                raise InvalidUserInput("Group '%s' not found" % name)
+
+    def cmd_group_list(self):
+        ''' List available/installed bundle groups '''
+        comp = self.getGroupComp()
+        installedGroups = ["  %s" % g.name for g in comp.groups.Installed]
+        availableGroups = ["  %s" % g.name for g in comp.groups.Available]
+        result = []
+        if installedGroups:
+            result += ["Installed Groups:"] + sorted(installedGroups)
+        if availableGroups:
+            if installedGroups:
+                result += [""]
+            result += ["Available Groups:"] + sorted(availableGroups)
+        if not result:
+            result = ["No groups available on current repositories"]
+        print "\n".join(result)
+
+    @staticmethod
+    def _askGroupTypeInstall(group, typeText):
+        packages = group.packages[typeText]
+        packageCount = len(packages)
+        if packageCount > 0:
+            result = "s"
+            while result == "s":
+                result = uitools.ask("""Group contains %d %s bundles.
+Do you want to install those (answer 's' to show bundles)""" % (packageCount, typeText), ["y", "n", "s"])
+                if result == "s":
+                    print "\n%s\n" % "\n".join(packages)
+            return result == "y"
+        else:
+            return False
+
+    def cmd_group_install(self, group1, *group):
+        ''' Install bundle group(s) '''
+        def processor(group):
+            if not group.installed:
+                installPackages = group.mandatoryPackages
+                defaultValues = self.cmd_group_install._optionparser.get_default_values()
+                def processGroupType(gtype):
+                    install = getattr(self.cmd_options, gtype) == "y"
+                    ask = getattr(defaultValues, gtype) == "n" and not install
+                    if ask:
+                        install = self._askGroupTypeInstall(group, gtype)
+                    if install:
+                        return list(group.packages[gtype])
+                    else:
+                        return []
+
+                installPackages += processGroupType("default")
+                installPackages += processGroupType("optional")
+
+                self.apt("install " + utils.argsToStr(installPackages))
+                self.cleanCache()
+            else:
+                print "Group '%s' is already installed." % group.name
+
+        self.groupProcess(utils.listify(group1, group), processor)
+
+    cmd_group_install._optionparser = CommandOptionParser()
+    cmd_group_install._optionparser.add_option("-d", "--default", choices=["y", "n"], metavar="y/n",
+                                               help="Install default bundles [default: %default]")
+    cmd_group_install._optionparser.add_option("-o", "--optional", choices=["y", "n"], metavar="y/n",
+                                               help="Install optional bundles [default: %default]")
+    cmd_group_install._optionparser.set_defaults(default="y", optional="n")
+
+    def cmd_group_remove(self, group1, *group):
+        ''' Remove bundle group(s) '''
+        def processor(group):
+            if group.installed:
+                self.apt("remove " + utils.argsToStr(group.allPackages))
+            else:
+                print "Group '%s' is not installed." % group.name
+
+        self.groupProcess(utils.listify(group1, group), processor)
+
+    # TODO: Add option to show package installation status?
+    def cmd_group_info(self, group1, *group):
+        ''' Get information about group(s) '''
+        def processor(group):
+            formatString = "Group: %s\n  Description: %s\n  Installed: %s"
+            print formatString % (group.name,
+                                  group.description,
+                                  "yes" if group.installed else "no")
+            for packageType in ((group.mandatoryPackages, "Mandatory Bundles"),
+                                (group.defaultPackages, "Default Bundles"),
+                                (group.optionalPackages, "Optional Bundles")):
+                if packageType[0]:
+                    print "\n  %s:" % packageType[1]
+                    print "\n".join("    %s" % p for p in packageType[0])
+            print
+
+        self.groupProcess(utils.listify(group1, group), processor)
+
+    # TODO: Refactor this with workspacediff
+    class Output(object):
+
+        def __init__(self, differ):
+            self.differ = differ
+            self.bundlestatus = collections.defaultdict(set)
+
+        def __call__(self, status, bundle, name):
+            self.bundlestatus[bundle].add(status)
+            if ("M" in self.bundlestatus[bundle] or
+                "R" in self.bundlestatus[bundle]):
+                return True
+
+    def getModifiedPackages(self, packages=None):
+        wsPath = self.config.getWorkspaceById(self.workspaceId).path
+        wsMetaPath = self.config.getWorkspaceMetadataPath(self.workspaceId)
+        wsdiff = WorkspaceDiff(wsPath, wsMetaPath, self.Output)
+        wsdiff.start(packages, bundlediff=True)
+        return [k for k, v in wsdiff.output.bundlestatus.iteritems() if "M" in v or "R" in v]
+
+    def cmd_workspace_restore(self, *bundle):
+        ''' Reinstall all bundles or just the specified ones '''
+        self.aptUpdate()
+        if self.options.force:
+            if bundle:
+                packages = bundle
+            else:
+                packages = self.getInstalledPackages()
+        else:
+            packages = self.getModifiedPackages(bundle)
+        if packages:
+            self.apt("--reinstall install " + utils.argsToStr(packages))
+            self.cleanCache()
+        else:
+            print "Modified bundles not found."
+
+    def cmd_update(self):
+        ''' Update all bundles and dependencies '''
+        self.aptUpdate()
+        self.apt("dist-upgrade")
+        self.cleanCache()
+
+    def cmd_bundle_remove(self, bundle1, *bundle):
+        ''' Remove bundle(s) '''
+        self.apt("remove %s" % utils.argsToStr(bundle1, bundle))
+
+    def cmd_bundle_info(self, bundle1, *bundle):
+        ''' Show information about bundles and .deb files '''
+        bundles, debs = self._separateDebs(utils.listify(bundle1, bundle))
+        for name in debs:
+            try:
+                deb = DebFile(name)
+            except DebError, ex:
+                logging.warning("Could not open '%s': %s", name, ex)
+            else:
+                metadata = deb.metadata
+                names = [
+                    "Package", "Version", "Architecture", "Installed-Size", "Depends",
+                    "Replaces", "Provides", "Conflicts", "Priority", "Description"]
+                names += sorted(name for name in metadata if name.startswith("X-"))
+                for name in names:
+                    print "%s: %s" % (name, metadata.get(name, "n/a"))
+            print
+        if bundles:
+            self.aptUpdate()
+            for bundle in bundles:
+                # Apt seems to give us extra blank line after metadata -> remove
+                print self.apt("show %s" % bundle, "apt-cache", True).strip()
+                print
+
+    def cmd_search(self, pattern):
+        ''' Search for bundles from repositories '''
+        self.aptUpdate()
+        self.apt("search " + pattern, "apt-cache")
+
+    def cmd_repo_add(self, uri):
+        ''' Add repository to current workspace '''
+        self.checkWorkspace()
+        slist = SourcesList(self.getSourcesListPath())
+        uri = uri.replace("\\", "/")
+        name = self.cmd_options.name
+        if not name:
+            name = utils.uniqueName("repo", [repo.name for repo in slist.repos])
+        try:
+            slist.add(name, uri)
+        except RepoError, ex:
+            raise InvalidUserInput(str(ex))
+        uri = utils.addSuffix(uri, "/")
+        try:
+            utils.openUrl(urlparse.urljoin(uri, "Packages")).close()
+        except IOError, ex:
+            problem = "Repository you are about to add has a problem:\nCannot get the index file: %s" % ex
+            if not uitools.askConfirmation(problem):
+                slist.removeByName(name)
+        # Test updating
+        self.aptUpdate()
+
+    cmd_repo_add._optionparser = CommandOptionParser()
+    cmd_repo_add._optionparser.add_option("-n", "--name", help="Repository name [default: Generated]")
+
+##    def aptUpdateTest(self):
+##        try:
+##            output = self.apt("update", quiet=True)
+##            return (not any(line.startswith("Err") for line in output.splitlines()), output)
+##        except ExternalProcessError, ex:
+##            return (False, ex.output)
+
+    def cmd_repo_remove(self, id_or_name):
+        ''' Remove repository from current workspace '''
+        self.checkWorkspace()
+        slist = SourcesList(self.getSourcesListPath())
+        try:
+            if id_or_name.isdigit():
+                idnum = id_or_name
+            else:
+                idnum = slist.getRepoIdByName(id_or_name)
+            repo = slist.getRepo(idnum)
+        except RepoError, ex:
+            raise InvalidUserInput(str(ex))
+        # Remove possibly downloaded comps file
+        try:
+            os.remove(self.getRepoCompsPath(repo.name))
+        except OSError:
+            pass # comps.xml not found
+        slist.remove(idnum)
+
+    def cmd_repo_list(self):
+        ''' List repositories in current workspace '''
+        self.checkWorkspace()
+        slist = SourcesList(self.getSourcesListPath())
+        if slist.repos:
+            print "\n\n".join("%d\n  Name: %s\n  URI: %s" % (i+1, name, uri)
+                              for i, (name, uri) in enumerate(slist.repos))
+        else:
+            print self.NO_REPOS_DEFINED
+
+    def cmd_advanced_bundle_depends(self, bundle1, *bundle):
+        ''' List bundle dependencies '''
+        self.apt("depends %s" % utils.argsToStr(bundle1, bundle), "apt-cache")
+
+    def cmd_advanced_bundle_rdepends(self, bundle1, *bundle):
+        ''' List bundle reverse dependencies '''
+        self.apt("rdepends %s" % utils.argsToStr(bundle1, bundle), "apt-cache")
+
+    def cmd_find_owner(self, path):
+        ''' Find owner bundle of a file '''
+        self.checkWorkspace()
+        isPath = "\\" in path or "/" in path
+        if isPath:
+            path = os.path.normpath(os.path.abspath(path))
+            wsPath = self.config.getWorkspacePath(self.workspaceId)
+            if utils.pathInside(wsPath, path, False):
+                path = utils.removeStart(path, utils.addPathSep(os.path.normpath(wsPath)))
+            else:
+                raise InvalidUserInput("Path not inside current workspace '%s'" % wsPath)
+        path = path.replace("\\", "/")
+        self.dpkg("-S " + path)
+
+    def getInstalledPackages(self, arg=None):
+        if arg == None:
+            arg = []
+        output = self.dpkg("--get-selections " + utils.argsToStr(arg), True)
+        return [line.split()[0] for line in output.splitlines() if line.split()[1] == "install"]
+
+    def cmd_bundle_list(self, *pattern):
+        ''' List installed bundles (optional wildcard search) '''
+        print "\n".join(self.getInstalledPackages(pattern))
+        #self.dpkg("--list " + utils.argsToStr(arg)) # This gives more detailed stuff
+
+    def cmd_bundle_list_files(self, bundle):
+        ''' List files of a installed bundle or a .deb file '''
+        if utils.isFile(bundle, ".deb"):
+            self.setWorkspace() # no workspace needed
+            self.dpkg("--contents " + '"' + bundle + '"')
+        else:
+            self.dpkg("--listfiles " + bundle)
+
+    def cmd_advanced_apt_get(self, *arg):
+        ''' Pass commands/options to apt-get '''
+        self.setWorkspace() # no workspace needed
+        self.apt(utils.argsToStr(arg))
+
+    def cmd_advanced_apt_cache(self, *arg):
+        ''' Pass commands/options to apt-cache '''
+        self.setWorkspace() # no workspace needed
+        self.apt(utils.argsToStr(arg), "apt-cache")
+
+    def cmd_advanced_dpkg(self, *arg):
+        ''' Pass commands/options to dpkg '''
+        self.setWorkspace() # no workspace needed
+        self.dpkg(utils.argsToStr(arg))
+
+    def cmd_advanced_workspace_export(self, outputfile):
+        '''Export workspace metadata to a file
+
+        Workspace is selected by normal selection procedure.
+        '''
+        self.checkWorkspace()
+        tar = tarfile.open(outputfile, "w:gz")
+        tar.add(self.config.getWorkspaceMetadataPath(self.workspaceId), arcname="",
+                exclude=lambda f: f.lower().endswith(".deb"))
+        tar.close()
+
+    def cmd_advanced_workspace_import(self, inputfile, path):
+        ''' Import workspace metadata from a file '''
+        try:
+            tar = tarfile.open(inputfile, "r")
+        except IOError:
+            raise IOError("File '%s' does not exist." % inputfile)
+        name = self.cmd_options.name
+        if not name:
+            name = self.getUniqueWorkspaceName()
+        # This is for add workspace command
+        self.cmd_options.name = name
+        self.cmd_workspace_add(path)
+        tar.extractall(self.config.getWorkspaceMetadataPath(self.config.getWorkspaceIdByName(name)))
+        tar.close()
+
+    cmd_advanced_workspace_import._optionparser = CommandOptionParser()
+    cmd_advanced_workspace_import._optionparser.add_option("-n", "--name", help="Workspace name [default: Generated]")
+
+    def cmd_advanced_workspace_property_list(self):
+        ''' List workspace properties '''
+        self.checkWorkspace()
+        properties = self.config.getWorkspaceById(self.workspaceId).getPropertyValues()
+        for k, v in properties.iteritems():
+            print "%s = %s" % (k, v)
+
+    def cmd_advanced_workspace_property_set(self, prop, value):
+        ''' Set workspace property '''
+        self.checkWorkspace()
+        try:
+            self.config.getWorkspaceById(self.workspaceId).setPropertyValue(prop, value)
+        except WorkspaceConfigError, ex:
+            raise GenericError(str(ex))
+
+    def cmd_advanced_workspace_clean_cache(self):
+        ''' Clean workspace bundle cache '''
+        self.apt("clean")
+
+    def cmd_workspace_list_changes(self, *bundle):
+        ''' List changes to a workspace or specific bundle(s) '''
+        self.checkWorkspace()
+        if bundle and self.cmd_options.new:
+            self.cmd_workspace_list_changes._optionparser.error("Option new and bundle argument are mutually exclusive")
+        if self.cmd_options.bundlediff and self.cmd_options.new:
+            self.cmd_workspace_list_changes._optionparser.error("Options new and bundlediff are mutually exclusive")
+
+        wsPath = self.config.getWorkspaceById(self.workspaceId).path
+        wsMetaPath = self.config.getWorkspaceMetadataPath(self.workspaceId)
+        wsdiff = WorkspaceDiff(wsPath, wsMetaPath)
+        optiondict = self.cmd_options.__dict__
+        # If no mode options given, revert to defaults
+        if (not self.cmd_options.new and
+            not self.cmd_options.modified and
+            not self.cmd_options.removed and
+            not self.cmd_options.unmodified):
+            optiondict["new"] = False
+            optiondict["modified"] = True
+            optiondict["removed"] = True
+            optiondict["unmodified"] = False
+
+        wsdiff.start(bundle, **optiondict)
+
+    cmd_workspace_list_changes._optionparser = CommandOptionParser()
+    cmd_workspace_list_changes._optionparser.add_option("-n", "--new", action="store_true",
+                                                       help="List new files [default: False]")
+    cmd_workspace_list_changes._optionparser.add_option("-m", "--modified", action="store_true",
+                                                       help="List modified files [default: True]")
+    cmd_workspace_list_changes._optionparser.add_option("-r", "--removed", action="store_true",
+                                                       help="List removed files [default: True]")
+    cmd_workspace_list_changes._optionparser.add_option("-u", "--unmodified", action="store_true",
+                                                       help="List unmodified files [default: False]")
+    cmd_workspace_list_changes._optionparser.add_option("-d", "--dir", action="append", dest="dirs", metavar="DIR",
+                                                       help="Add dir which is used as a inclusion filter (you can set many)")
+    cmd_workspace_list_changes._optionparser.add_option("-b", "--bundlediff", action="store_true",
+                                                       help="List only bundles that have changed [default: %default]")
+    cmd_workspace_list_changes._optionparser.set_defaults(new=False, modified=False, removed=False, unmodified=False, bundlediff=False)
+
+    def getWorkspaceProperty(self, name):
+        ''' Takes into account also global configuration '''
+        workspace = self.config.getWorkspaceById(self.workspaceId)
+        value = workspace.getPropertyValue(name, getDefault=False)
+        if value is None:
+            value = self.config.getPropertyValue(name)
+        return value
+
+    def cleanCache(self):
+        if not self.getWorkspaceProperty("cache-bundles"):
+            self.apt("clean")
+
+    def getSourcesListPath(self):
+        wsMetaPath = self.config.getWorkspaceMetadataPath(self.workspaceId)
+        return os.path.join(wsMetaPath, "apt", "sources.list")
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/bundle.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Bundle main module
+#
+
+from cmdlineapp import CmdlineApp
+from bundlecommand import BundleCommand
+
+def main():
+    app = CmdlineApp(BundleCommand, "bundle", "blocks_info")
+    app.run()
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/bundlecommand.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,464 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# All bundle commands are here
+#
+
+import os
+import logging
+import tempfile
+import shutil
+import glob
+
+import Blocks.Packaging as BP
+from Blocks.Packaging.DependencyProcessors.RomPatchProcessor import RomPatchDependencyProcessor
+from Blocks.Packaging.DependencyProcessors.RaptorDependencyProcessor import DotDeeDependencyProcessor
+from Blocks.Packaging.DependencyProcessors.DefaultProcessors import BuildDataDependencyProcessor
+from Blocks.Packaging.DataSources.LinkInfoToBuildData import LinkInfo
+from Blocks.debfile import DebFile, DebError
+import Blocks.gpg as gpg
+
+from cmdlinecommand import CmdLineCommand, CommandOptionParser
+import utils
+import elementtree as ET
+from generalexceptions import InvalidFileInput, InvalidUserInput, GenericError, ExternalProcessError
+from UserConfiguration.UserConfiguration import UserConfiguration
+from cmdlineutils import Dpkg, CmdLineUtil
+import comps
+
+GPG_PASSPHRASEFILE_ENVVAR = "BLOCKS_GPG_PASSPHRASEFILE"
+
+class BlocksDependency(object):
+    ''' Blocks workspace dependency resolver '''
+    def __init__(self, root):
+        self.dpkg = None
+        paths = self._getWorkspacePaths(root)
+        if paths:
+            self.dpkg = Dpkg(*paths)
+
+    def getPackageName(self, path):
+        if self.dpkg:
+            try:
+                output = self.dpkg("-S %s" % path, True)
+            except ExternalProcessError:
+                pass
+            else:
+                packagename = output.split(":", 1)[0]
+                return packagename
+
+    @staticmethod
+    def _getWorkspacePaths(path):
+        config = UserConfiguration()
+        workspacePaths = [(config.getWorkspacePath(wsid), config.getWorkspaceMetadataPath(wsid))
+                          for wsid in config.getWorkspaces()]
+        for wsroot, metapath in workspacePaths:
+            if utils.pathInside(wsroot, path):
+                return (wsroot, metapath)
+
+class BundleCommand(CmdLineCommand):
+    ''' Bundle commands '''
+    def cmd_create(self, xmlfile):
+        ''' Create bundle(s) '''
+        logFormat, logTimeFormat = utils.getConsoleLogFormat()
+        BP.Logging.setupLogHandling(utils.loglevel, logFormat=logFormat, logTimeFormat=logTimeFormat)
+        targetroot = os.path.abspath(self.cmd_options.workspaceroot)
+        sourceroot = os.path.abspath(self.cmd_options.sourceroot or targetroot)
+
+        file_directives_path, pkg_directives_path = self.getDirectivePaths(self.cmd_options.directives)
+        storage = BP.OneoffStorage(self.cmd_options.metadir)
+        utils.createDir(self.cmd_options.outputdir)
+        packager = BP.Packager(storage=storage, packageOutputDirectory=self.cmd_options.outputdir,
+                               targetRules=file_directives_path, sourceRules=file_directives_path,
+                               directives=pkg_directives_path, keepGoing=False)
+
+        # Dependency processors
+        packager.globalFileMap.registerExternalSource(BlocksDependency(sourceroot))
+        packager.addProcessor(RomPatchDependencyProcessor)
+        packager.addProcessor(BuildDataDependencyProcessor, None)
+        if self.cmd_options.sbs_dep_log:
+            packager.addProcessor(DotDeeDependencyProcessor, {"directivesPath": pkg_directives_path})
+
+        try:
+            tree = ET.MyElementTree(file=xmlfile)
+        except SyntaxError, ex:
+            raise GenericError("Bundle XML-file %s" % ex)
+
+        for bundle in tree.getiterator("bundle"):
+            component = BP.PlainBuildData()
+            try:
+                component.setTargetRoot(targetroot)
+                component.setSourceRoot(sourceroot)
+                component.setComponentName(bundle.findtext("name"))
+                component.setComponentVersion(bundle.findtext("version"))
+                for attribute in bundle.findall("meta"):
+                    attrName = attribute.get("name")
+                    if not attrName:
+                        raise ValueError("Name for attribute not defined")
+                    component.attributes[attrName] = attribute.text
+            except ValueError, ex:
+                raise InvalidFileInput("XML-file error: %s" % ex)
+            targets = bundle.find("targets")
+            if targets:
+                targetFiles = [utils.relativePath(t.text, targetroot) for t in targets.findall("target")]
+                logging.log(logging.DEBUG2, "Target files: %s", targetFiles)
+                component.addTargetFiles(targetFiles)
+            sources = bundle.find("sources")
+            if sources:
+                sourceFiles = [utils.relativePath(t.text, sourceroot) for t in sources.findall("source")]
+                logging.log(logging.DEBUG2, "Source files: %s", sourceFiles)
+                component.addSourceFiles(sourceFiles)
+
+            # Dependency processing
+            component.dependencyData["DotDeeDependencyProcessor"] = {}
+            if self.cmd_options.sbs_build_dir:
+                component.dependencyData["DotDeeDependencyProcessor"]["buildDir"] = self.cmd_options.sbs_build_dir
+            bldinfPaths = [e.text for e in bundle.findall("bldinf")]
+            component.dependencyData["DotDeeDependencyProcessor"]["infs"] = bldinfPaths
+            logging.debug("Bldinfs: %s", bldinfPaths)
+            if self.cmd_options.sbs_dep_log:
+                linkinfo = LinkInfo(self.cmd_options.sbs_dep_log)
+                if bldinfPaths:
+                    for infpath in bldinfPaths:
+                        linkinfo.addDependencies(component, os.path.abspath(infpath))
+                else:
+                    linkinfo.addDependencies(component)
+                component.addNonstandardInterfaces(linkinfo.getNonstandardInterfaces())
+            packager.addComponent(component)
+        try:
+            packager.wait()
+        except BP.PackagingError, ex:
+            raise GenericError("Packaging error: %s" % ex)
+
+    @staticmethod
+    def getDirectivePaths(name):
+        paths = [None, None]
+        if not name:
+            return paths
+        for path in (utils.getMetaPath(), utils.getInstallDir()):
+            directive_path = os.path.join(path, "conf", "directives", name)
+            if os.path.isdir(directive_path):
+                file_directives_path = os.path.join(directive_path, "file-directives.xml")
+                pkg_directives_path = os.path.join(directive_path, "pkg-directives.xml")
+                if os.path.isfile(file_directives_path):
+                    paths[0] = file_directives_path
+                if os.path.isfile(pkg_directives_path):
+                    paths[1] = pkg_directives_path
+            if not paths[0] and not paths[1]:
+                logging.debug("Directive '%s' not found from '%s'", name, directive_path)
+            else:
+                break
+        if not paths[0] and not paths[1]:
+            raise InvalidUserInput("Directive '%s' not found" % name)
+        logging.debug("File directive: %s, Package directive: %s", paths[0], paths[1])
+        return paths
+
+    cmd_create._optionparser = CommandOptionParser()
+    cmd_create._optionparser.add_option("-w", "--workspaceroot", metavar="DIR", help="Target files root [Default: Current directory]")
+    cmd_create._optionparser.add_option("-s", "--sourceroot", metavar="DIR", help="Source files root [Default: Same as workspace root]")
+    cmd_create._optionparser.add_option("-m", "--metadir", metavar="DIR", help="Directory for meta-data storage [Default: Temp directory]")
+    cmd_create._optionparser.add_option("-o", "--outputdir", metavar="DIR", help="Output directory [Default: '%default']")
+    cmd_create._optionparser.add_option("-l", "--sbs-dep-log", metavar="FILE", help="Blocks SBS-plugin log file")
+    cmd_create._optionparser.add_option("-b", "--sbs-build-dir", metavar="DIR", help="SBS build directory")
+    cmd_create._optionparser.add_option("-d", "--directives", metavar="name", help="Override default directives (e.g. single-bundle)")
+    cmd_create._optionparser.set_defaults(workspaceroot=".", outputdir="bundles")
+
+    def cmd_create_xml(self, xmlfile):
+        ''' Create XML-file for use with create command '''
+        if self.cmd_options.append:
+            tree = ET.MyElementTree(file=xmlfile)
+        else:
+            tree = ET.MyElementTree(ET.Element("bundles"))
+
+        bundle = ET.SubElement(tree.getroot(), "bundle")
+        ET.SubElement(bundle, "name").text = self.cmd_options.name
+        ET.SubElement(bundle, "version").text = self.cmd_options.version
+        for attr in self.cmd_options.attribute:
+            key, _, value = attr.partition("=")
+            if value:
+                meta = ET.SubElement(bundle, "meta")
+                meta.text = value
+                meta.set("name", key)
+
+        targets = ET.SubElement(bundle, "targets")
+        sources = ET.SubElement(bundle, "sources")
+        for path in utils.getFileLines(self.cmd_options.targetfiles_file):
+            ET.SubElement(targets, "target").text = path
+            utils.warnIfFileNotFound(path)
+        for path in utils.getFileLines(self.cmd_options.sourcefiles_file):
+            ET.SubElement(sources, "source").text = path
+            utils.warnIfFileNotFound(path)
+
+        for name in self.getFiles(self.cmd_options.target):
+            ET.SubElement(targets, "target").text = name
+        for name in self.getFiles(self.cmd_options.source):
+            ET.SubElement(sources, "source").text = name
+
+        tree.write(xmlfile)
+
+    cmd_create_xml._optionparser = CommandOptionParser()
+    cmd_create_xml._optionparser.add_option("-n", "--name", help="Component name [Default: %default]")
+    cmd_create_xml._optionparser.add_option("-v", "--version", help="Component version [Default: %default]")
+    cmd_create_xml._optionparser.add_option("-t", "--target", action="append", metavar="FILE/DIR", help="Add target file/dir")
+    cmd_create_xml._optionparser.add_option("-s", "--source", action="append", metavar="FILE/DIR", help="Add source file/dir")
+    cmd_create_xml._optionparser.add_option("-T", "--targetfiles-file", metavar="FILE", help="Specify file which contains target files")
+    cmd_create_xml._optionparser.add_option("-S", "--sourcefiles-file", metavar="FILE", help="Specify file which contains source files")
+    cmd_create_xml._optionparser.add_option("-a", "--attribute", action="append", metavar="NAME=VALUE", help="Add bundle attribute")
+    cmd_create_xml._optionparser.add_option("--append", action="store_true",
+                                            help="Append component to XML-file instead of overwriting it [Default: %default]")
+    cmd_create_xml._optionparser.set_defaults(name="bundle", version="1.0.0", target=[], source=[], attribute=[], append=False)
+
+    @staticmethod
+    def getFiles(pathlist):
+        '''
+        Get list of files from pathlist.
+        Add all files from list and add all files from paths recursively in pathlist
+        '''
+        files = []
+        for path in pathlist:
+            path = os.path.abspath(path)
+            if os.path.isdir(path):
+                files.extend(utils.getFilesRecursive(path))
+            else:
+                files.append(path)
+                utils.warnIfFileNotFound(path)
+        return files
+
+    def cmd_compare(self, bundle1, bundle2):
+        ''' Compare two bundles for differences '''
+        for name in (bundle1, bundle2):
+            if not os.path.isfile(name):
+                raise IOError("Bundle file '%s' not found" % name)
+
+##        if filecmp.cmp(bundle1, bundle2, False):
+##            print "Files are identical"
+##            return
+
+        try:
+            deb1 = DebFile(bundle1)
+            deb2 = DebFile(bundle2)
+        except DebError, ex:
+            raise IOError(str(ex))
+        metadiff, filediff = deb1.compare(deb2)
+        if filediff.changed:
+            tempdir = tempfile.mkdtemp(prefix="bundle-compare")
+            try:
+                bundledir1 = os.path.join(tempdir, "bundle1")
+                bundledir2 = os.path.join(tempdir, "bundle2")
+                os.mkdir(bundledir1)
+                os.mkdir(bundledir2)
+                for filename in filediff.changed:
+                    deb1.extractData(filename, bundledir1)
+                    deb2.extractData(filename, bundledir2)
+                changedfiles = self._getEffectivelyChangedFiles(bundledir1, bundledir2, filediff.changed)
+                filediff = filediff._replace(changed=changedfiles)
+            finally:
+                shutil.rmtree(tempdir)
+
+        metadataDifferent = any(metadiff)
+        filesDifferent = any(filediff)
+        if metadataDifferent or filesDifferent:
+            if metadataDifferent:
+                print "Metadata differences:"
+                for name in sorted(metadiff.new):
+                    print "N %s: %s" % (name, deb2.metadata[name])
+                for name in sorted(metadiff.removed):
+                    print "R %s: %s" % (name, deb1.metadata[name])
+                for name in sorted(metadiff.changed):
+                    print "M %s: %s -> %s" % (name, deb1.metadata[name], deb2.metadata[name])
+                if filesDifferent:
+                    print
+            if filesDifferent:
+                print "File differences:"
+                for name in sorted(filediff.new):
+                    print "N %s" % name
+                for name in sorted(filediff.removed):
+                    print "R %s" % name
+                for name in sorted(filediff.changed):
+                    print "M %s" % name
+        else:
+            print "Bundle content identical"
+
+    @staticmethod
+    def _getEffectivelyChangedFiles(path1, path2, files):
+        diffFiles = []
+
+        bd1 = BP.PlainBuildData()
+        bd1.setTargetRoot(path1)
+        bd1.addTargetFiles(files)
+        bd1.setComponentName("temp")
+
+        bd2 = BP.PlainBuildData()
+        bd2.setTargetRoot(path2)
+        bd2.addTargetFiles(files)
+        bd2.setComponentName("temp")
+
+        builder = BP.ComponentBuilder.ComponentBuilder()
+        c1 = builder.createComponent(BP.OneoffStorage(), bd1)
+        c2 = builder.createComponent(BP.OneoffStorage(), bd2)
+        differentPackages = c1.diff(c2)[3]
+        for package in differentPackages:
+            p1 = c1.getPackage(package)
+            p2 = c2.getPackage(package)
+            diffFiles.extend(p1.diff(p2)[3])
+
+        return diffFiles
+
+    @staticmethod
+    def _validateSignType(signtype):
+        if len(signtype) > 10:
+            raise InvalidUserInput("Signature type length must be between 1 and 10 characters")
+        if not signtype.isalpha():
+            raise InvalidUserInput("Signature type must contain only alphabets")
+
+    def cmd_sign(self, bundle1, *bundle):
+        ''' Sign bundle(s) '''
+        self._validateSignType(self.cmd_options.type)
+        for bundle in [bundle1] + list(bundle):
+            deb = None
+            try:
+                deb = DebFile(bundle, "a")
+                if self.cmd_options.clear:
+                    try:
+                        deb.removeSignature()
+                    except DebError, ex:
+                        logging.warning("No signatures to remove")
+                elif deb.signatureExists(self.cmd_options.type):
+                    deb.removeSignature(self.cmd_options.type)
+                    logging.warning("Overwriting existing signature type '%s' on bundle '%s'", self.cmd_options.type, bundle)
+                passphraseFile = os.environ.get(GPG_PASSPHRASEFILE_ENVVAR)
+                deb.addSignature(self.cmd_options.type, gpgPassfile=passphraseFile)
+            except DebError, ex:
+                raise GenericError("Signing failed on bundle '%s': %s" % (bundle, ex.error))
+            if deb:
+                deb.close()
+
+    cmd_sign._optionparser = CommandOptionParser()
+    cmd_sign._optionparser.add_option("-t", "--type", help="Signature type [Default: %default]")
+    cmd_sign._optionparser.add_option("-c", "--clear", action="store_true", help="Clear all existing signatures [Default: %default]")
+    cmd_sign._optionparser.set_defaults(type="origin", clear=False)
+
+    def cmd_sign_remove(self, bundle1, *bundle):
+        ''' Remove signatures from bundle(s) '''
+        self._validateSignType(self.cmd_options.type)
+        if self.cmd_options.type == "all":
+            self.cmd_options.type = None
+        for bundle in [bundle1] + list(bundle):
+            deb = None
+            try:
+                deb = DebFile(bundle, "a")
+                deb.removeSignature(self.cmd_options.type)
+            except DebError, ex:
+                raise GenericError("Signature removal failed on bundle '%s': %s" % (bundle, ex.error))
+            if deb:
+                deb.close()
+
+    cmd_sign_remove._optionparser = CommandOptionParser()
+    cmd_sign_remove._optionparser.add_option("-t", "--type", help="Signature type [Default: %default]")
+    cmd_sign_remove._optionparser.set_defaults(type="all")
+
+    def cmd_verify(self, bundle1, *bundle):
+        ''' Verify signatures in bundle(s) '''
+        self._validateSignType(self.cmd_options.type)
+        for bundle in [bundle1] + list(bundle):
+            if self.cmd_options.type == "all":
+                self.cmd_options.type = None
+            status = []
+            deb = None
+            try:
+                deb = DebFile(bundle, "r")
+                status = deb.verifySignature(self.cmd_options.type)
+            except DebError, ex:
+                raise GenericError("Signature verification failed on bundle '%s': %s" % (bundle, ex.error))
+            if deb:
+                deb.close()
+            for signTypeStatus in status:
+                if signTypeStatus.status.code == gpg.GpgStatusCode.VERIFIED:
+                    print "Verified signature type '%s' from '%s' in bundle '%s'." % (signTypeStatus.type, signTypeStatus.status.info.name, bundle)
+                elif signTypeStatus.status.code == gpg.GpgStatusCode.BADSIG:
+                    raise GenericError("Bad signature with type '%s' in bundle '%s'!" % (signTypeStatus.type, bundle))
+                elif signTypeStatus.status.code == gpg.GpgStatusCode.NO_PUBKEY:
+                    raise GenericError("Signature with type '%s' in bundle '%s' can't be verified because public key is not available." % (signTypeStatus.type, bundle))
+                elif signTypeStatus.status.code == gpg.GpgStatusCode.KEYEXPIRED:
+                    raise GenericError("Public key for bundle '%s' with signature type '%s' has been expired." % (bundle, signTypeStatus.type))
+                elif signTypeStatus.status.code == gpg.GpgStatusCode.REVKEYSIG:
+                    raise GenericError("Public key for bundle '%s' with signature type '%s' has been revoked." % (bundle, signTypeStatus.type))
+                else:
+                    raise GenericError("Unknown verification error with signature type '%s' for bundle '%s'" % (signTypeStatus.type, bundle))
+
+    cmd_verify._optionparser = CommandOptionParser()
+    cmd_verify._optionparser.add_option("-t", "--type", help="Signature type [Default: %default]")
+    cmd_verify._optionparser.set_defaults(type="all")
+
+    def cmd_sign_list(self, bundle):
+        ''' List signature types in a bundle '''
+        try:
+            deb = DebFile(bundle, "r")
+            signs = deb.getSignatures()
+            if signs:
+                print "Signature types:"
+                print "\n".join(signs)
+            else:
+                print "No signatures found"
+        except DebError, ex:
+            raise IOError(ex.error)
+
+    TEMPLATE_FILE = os.path.join(utils.getInstallDir(), "conf", "comps_template.xml")
+    def cmd_create_repo(self, path):
+        ''' Create/update repository index '''
+        oldDir = os.getcwd()
+        try:
+            os.chdir(path)
+        except OSError:
+            raise InvalidUserInput("Path '%s' not found" % path)
+        if not any(glob.iglob("*.deb")):
+            logging.warning("Path '%s' does not contain any bundles(*.deb).\nCreating empty repository.", path)
+
+        ftparchive = CmdLineUtil("apt-ftparchive")
+        ftparchive("packages . > Packages")
+        if self.cmd_options.sign:
+            ftparchive("release . > Release")
+            passphraseFile = os.environ.get(GPG_PASSPHRASEFILE_ENVVAR)
+            try:
+                gpg.sign("Release", "Release.gpg", passfile=passphraseFile)
+            except gpg.GpgError, ex:
+                raise InvalidUserInput(str(ex.output))
+
+        os.chdir(oldDir)
+        if not self.cmd_options.no_comps:
+            self._createComps(path, self.cmd_options.comps_template)
+
+    cmd_create_repo._optionparser = CommandOptionParser()
+    cmd_create_repo._optionparser.add_option("-s", "--sign", action="store_true", help="Sign repository [Default: %default]")
+    cmd_create_repo._optionparser.add_option("-t", "--comps-template", metavar="FILE", help="comps.xml template [Default: %default]")
+    cmd_create_repo._optionparser.add_option("--no-comps", action="store_true", help="Do not generate group information (comps.xml) [Default: %default]")
+    cmd_create_repo._optionparser.set_defaults(sign=False, comps_template=TEMPLATE_FILE, no_comps=False)
+
+    @staticmethod
+    def _createComps(path, template):
+        try:
+            compsTemplate = comps.CompsTemplate(template)
+        except SyntaxError, ex:
+            raise InvalidFileInput("Error in comps template:\n%s" % ex)
+        except IOError, ex:
+            raise IOError("Comps template: %s" % ex)
+
+        for bundle in glob.glob(os.path.join(path, "*.deb")):
+            try:
+                deb = DebFile(bundle)
+            except DebError, ex:
+                raise InvalidFileInput("Error on bundle '%s': %s" % (bundle, ex))
+            compsTemplate.applyRule(deb.metadata)
+            deb.close()
+        absPath = os.path.join(os.path.abspath(path), "comps.xml")
+        compsTemplate.create(absPath)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/cmdlineapp.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,117 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Blocks style command line application class
+#
+
+from optparse import OptionParser, SUPPRESS_HELP
+import logging
+import pprint
+import sys
+
+import utils
+from cmdlinecommand import CmdLineCommand
+from generalexceptions import InvalidUserInput, InvalidFileInput, ExternalProcessError, GenericError
+from UserConfiguration.UserConfiguration import BadWorkspace
+
+class CmdlineApp(object):
+    ''' Command line application '''
+
+    def __init__(self, cmdlineCommand, prog, versionInfo):
+        ''' cmdlineCommand is CmdLineCommand class '''
+        self.cmdlineCommand = cmdlineCommand
+        self.prog = prog
+        self.versionInfo = versionInfo
+        self.parser = None
+
+    def initParser(self):
+        self.parser = OptionParser(prog=self.prog, add_help_option=False)
+        self.parser.disable_interspersed_args()
+        self.parser.version = utils.getVersion(self.versionInfo)
+        self.parser.add_option("--version", action="version", help="Show version information and exit")
+        self.parser.add_option("--help", "-h", action="help", help="Show list of all commands and exit")
+        self.parser.add_option("--verbose", "-v", action="count", help="Be verbose (add more for debug output)")
+        self.parser.add_option("--vmax", action="store_true", help=SUPPRESS_HELP)
+        self.parser.add_option("--psyco", action="store_true", help=SUPPRESS_HELP)
+        self.parser.add_option("--quiet", "-q", action="count", help="Be quiet (use -qq for very quiet operation)")
+        self.parser.set_defaults(verbose=0, quiet=0)
+
+    def parseCmdline(self):
+        self.initParser()
+        # Show advanced help if help requested
+        # If parse_args does not exit, help was not requested
+        self.parser.set_usage(self.cmdlineCommand.getUsage(True))
+        (options, args) = self.parser.parse_args()
+        self.parser.set_usage(self.cmdlineCommand.getUsage(False))
+        options.verbose = 100 if options.vmax else options.verbose
+        return (options, args)
+
+    def init(self):
+        '''Override this for initialization.
+
+        Logging is initialized before this method
+        '''
+        pass
+
+    def run(self):
+        options, args = self.parseCmdline()
+        options.verbose = options.verbose - options.quiet
+        try:
+            utils.setupLogging(self.parser.get_version(), options.verbose, self.prog + "_log.txt")
+        except GenericError, ex:
+            sys.exit(str(ex))
+        logging.debug("Command-line args: %s", " ".join(sys.argv[1:]))
+        logging.debug("Options:\n %s", pprint.pformat(options.__dict__))
+
+        if options.psyco:
+            try:
+                import psyco
+                psyco.full()
+            except ImportError:
+                logging.warning("Psyco extension not available. Not using psyco optimization.")
+
+        self.init()
+
+        if len(args) == 0:
+            self.parser.print_help()
+        else:
+            try:
+                command = self.cmdlineCommand(options)
+                command(args)
+            except ExternalProcessError, ex:
+                # If exception has output with it, external process did not
+                # print any information to stdout so we have to do it here
+                utils.error("Error: %s" % ex, bool(ex.output), ex.blocksRetcode)
+            except CmdLineCommand.CommandNotAvailableError, ex:
+                self.parser.print_help()
+                print
+                utils.error("Error: %s" % ex, retCode=2)
+            except (CmdLineCommand.ArgumentError, InvalidUserInput), ex:
+                print self.cmdlineCommand.getCommandUsage(args[0])
+                print
+                utils.error("Error: %s" % ex, retCode=2)
+            except BadWorkspace, ex:
+                utils.error("Workspace error: %s" % ex)
+            except IOError, ex:
+                utils.error("I/O Error: %s" % ex)
+            except InvalidFileInput, ex:
+                utils.error("Input file error: %s" % ex)
+            except GenericError, ex:
+                utils.error("Error: %s" % ex)
+            except KeyboardInterrupt, ex: # pragma: no cover
+                utils.error("User interrupt. Exiting...")
+            except AssertionError, ex: # pragma: no cover
+                utils.error("Assertion error: %s" % ex, True)
+            except Exception, ex: # pragma: no cover
+                utils.error("Unexpected error: %s" % ex, True)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/cmdlinecommand.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,186 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Command line command dispatcher base class
+#
+
+import inspect
+import logging
+from optparse import OptionParser
+import pprint
+from collections import namedtuple
+
+import utils
+
+ParameterInfo = namedtuple("ParameterInfo", "args, varargs, defaults, minArgCount, maxArgCount, cmd")
+
+class CommandOptionParser(OptionParser):
+    ''' Option parser for commands '''
+
+    def __init__(self, *args, **kwargs):
+        OptionParser.__init__(self, usage="", add_help_option=False, *args, **kwargs)
+
+    def error(self, msg):
+        raise CmdLineCommand.ArgumentError(msg)
+
+class CmdLineCommand(object):
+    ''' Run commands '''
+
+    COMMAND_PREFIX = "cmd_"
+    ADVANCED_COMMAND_PREFIX = COMMAND_PREFIX + "advanced_"
+
+    INFINITE = 99999
+
+    class ArgumentError(Exception):
+        """ Wrong argument count for command """
+
+    class CommandNotAvailableError(Exception):
+        """ Command not available """
+
+    def __init__(self, options):
+        utils.initSearchPath()
+        self.options = options
+        self.cmd_options = {}
+
+    @classmethod
+    def getUsage(cls, advanced=False):
+        usage = "%prog [OPTIONS] COMMAND [COMMAND-OPTIONS] [ARGS]\n\n"
+        commands = cls.getCommands()
+        cmdMaxLen = max(len(cmd) for cmd in commands) - len(cls.COMMAND_PREFIX)
+
+        advCommands = cls.getCommands(True)
+        if advanced:
+            if advCommands:
+                advCmdMaxLen = max(len(cmd) for cmd in advCommands) - len(cls.ADVANCED_COMMAND_PREFIX)
+                cmdMaxLen = max(cmdMaxLen, advCmdMaxLen)
+
+        if advCommands:
+            usage += "Commonly used commands:"
+        else:
+            usage += "Commands:"
+        usage += cls.formatCmdHelp(commands, cmdMaxLen)
+
+        if advanced and advCommands:
+            usage += "\n\nAdvanced commands:"
+            usage += cls.formatCmdHelp(advCommands, cmdMaxLen)
+
+        return usage
+
+    def cmd_help(self, command):
+        ''' Show help for specific command '''
+        print self.getCommandUsage(command)
+
+    def __call__(self, args):
+        info = self._getCmdParameterInfo(args[0])
+        if info:
+            arguments = args[1:]
+            cmd = info.cmd
+            if hasattr(cmd, "_optionparser"):
+                (self.cmd_options, arguments) = cmd._optionparser.parse_args(arguments)
+                logging.debug("Command options:\n%s", pprint.pformat(self.cmd_options.__dict__))
+
+            argumentCount = len(arguments)
+            if argumentCount < info.minArgCount:
+                raise self.ArgumentError("Not enough arguments for command '%s'" % args[0])
+            elif argumentCount > info.maxArgCount:
+                raise self.ArgumentError("Too many arguments for command '%s'" % args[0])
+            else:
+                cmd(self, *arguments)
+        else:
+            raise self.CommandNotAvailableError("Command '%s' not available" % args[0])
+
+    @classmethod
+    def _getCmdParameterInfo(cls, command):
+        cmd = getattr(cls, cls._cmdNameToInternalName(command), None)
+        if cmd and callable(cmd):
+            # Check function parameters
+            argspec = inspect.getargspec(cmd)
+            assert argspec.keywords == None, "no keyword arguments allowed"
+            # self not "real" parameter. substract 1
+            posArgCount = len(argspec.args) - 1
+            defArgCount = len(argspec.defaults) if argspec.defaults else 0
+            minArgCount = posArgCount - defArgCount
+            # Infinite for variable parameter functions
+            maxArgCount = posArgCount if not argspec.varargs else cls.INFINITE
+            return ParameterInfo(argspec.args[1:], argspec.varargs, argspec.defaults, minArgCount, maxArgCount, cmd)
+        else:
+            return False
+
+    #TODO: Add automatic alias generation?
+    @classmethod
+    def _cmdNameToInternalName(cls, command):
+        command = command.replace("-", "_")
+        cmdName = cls.COMMAND_PREFIX + command
+        advCmdName = cls.ADVANCED_COMMAND_PREFIX + command
+        if hasattr(cls, cmdName):
+            command = cmdName
+        elif hasattr(cls, advCmdName):
+            command = advCmdName
+        return command
+
+    @classmethod
+    def getCommandUsage(cls, command):
+        info = cls._getCmdParameterInfo(command)
+        if info:
+            doc = inspect.getdoc(info.cmd)
+            # If doc string is multi-line string add new-line after it
+            if "\n" in doc:
+                doc += "\n"
+            # Get full doc string from method without extra line break after summary
+            doc = doc.replace("\n", "", 1).strip()
+            usage = "Purpose: %s\nUsage: %s" % (doc, command)
+
+            cmd = getattr(cls, cls._cmdNameToInternalName(command))
+            optionParser = getattr(cmd, "_optionparser", None)
+            if optionParser:
+                usage += " [options]"
+
+            for arg in info.args:
+                usage += " <%s>" % arg
+            if info.varargs:
+                usage += " [%s%d ...]" % (info.varargs, 2 if info.args else 1)
+            if optionParser:
+                usage += "\n\n" + optionParser.format_help().strip()
+            return usage
+        else:
+            return "Command '%s' not available" % command
+
+    @classmethod
+    def getCommands(cls, advanced=False):
+        cmd_list = [n for n in dir(cls) if n.startswith(cls.COMMAND_PREFIX)]
+        if advanced:
+            return [n for n in cmd_list if n.startswith(cls.ADVANCED_COMMAND_PREFIX)]
+        elif advanced == False:
+            return [n for n in cmd_list if not n.startswith(cls.ADVANCED_COMMAND_PREFIX)]
+        else:
+            return cmd_list
+
+    @classmethod
+    def getCommandNames(cls):
+        for cmd in cls.getCommands(None):
+            cmd = cmd.replace("advanced_", "")
+            cmd = cmd[4:].replace("_", "-")
+
+    @classmethod
+    def formatCmdHelp(cls, commands, cmdMaxLen):
+        usage = ""
+        for cmd in commands:
+            usage += "\n"
+            cmdHelp = inspect.getdoc(getattr(cls, cmd))
+            # Get the summary line from doc string
+            cmdHelp = cmdHelp.splitlines()[0].strip() if cmdHelp else ""
+            cmd = cmd.replace("advanced_", "")
+            usage += "  %-*s  " % (cmdMaxLen, cmd[4:].replace("_", "-"))
+            usage += cmdHelp
+        return usage
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/cmdlineutils.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,163 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Command line utilities dpkg and apt
+#
+
+import os
+import sys
+import utils
+import logging
+from subprocess import Popen, PIPE, STDOUT
+from generalexceptions import ExternalProcessError
+from string import Template
+
+MAX_OUTPUT_LOGGING = 60 * 20
+
+class CmdLineUtil(object):
+
+    def __init__(self, binary):
+        self.binary = binary
+        self.retcodeMap = {}
+
+    def __call__(self, cmdstr, quiet=False):
+        cmdstr = "%s %s" % (self.binary, cmdstr)
+        logging.debug("Running: %s", cmdstr)
+
+        # Real-time output if high debugging level
+        if utils.loglevel < logging.DEBUG:
+            quiet = False
+
+        p = Popen(cmdstr, shell=True,
+            stdin=PIPE if quiet else sys.stdin,
+            stdout=PIPE if quiet else sys.stdout, stderr=STDOUT)
+        stdoutput = p.communicate()[0]
+        stdoutput = stdoutput or ""
+        if len(stdoutput) <= MAX_OUTPUT_LOGGING:
+            logging.debug("\n" + stdoutput)
+        if p.returncode != 0:
+            raise ExternalProcessError(self.binary, p.returncode, stdoutput, self.retcodeMap.get(p.returncode, 1))
+
+        return stdoutput
+
+class PackageManagerUtil(CmdLineUtil):
+
+    def __init__(self, rootDir="", adminDir="", binary=""):
+        CmdLineUtil.__init__(self, binary)
+        # Make sure that paths are compatible with dpkg
+        self.rootDir = rootDir.replace("\\", "/")
+        self.adminDir = adminDir.replace("\\", "/")
+
+class Dpkg(PackageManagerUtil):
+
+    def __init__(self, rootDir="", adminDir="", options=None):
+        PackageManagerUtil.__init__(self, rootDir, adminDir, "dpkg")
+        self.retcodeMap = {2: 2}
+        self.options = options
+
+    def __call__(self, args, quiet=False):
+        if self.options:
+            if self.options.force:
+                args = "--force-depends --force-overwrite " + args
+            else:
+                args = "--no-force-downgrade " + args
+            args = getDpkgDebugOption(self.options.verbose) + " " + args
+
+        extraDpkgOptions = os.environ.get("BLOCKS_DPKG_OPTIONS")
+        if extraDpkgOptions:
+            args += " " + extraDpkgOptions
+
+        cmdstr = '--admindir="%s" --instdir="%s" %s' % (self.adminDir, self.rootDir, args)
+        return PackageManagerUtil.__call__(self, cmdstr, quiet)
+
+class Apt(PackageManagerUtil):
+
+    def __init__(self, rootDir="", adminDir="", options=None):
+        PackageManagerUtil.__init__(self, rootDir, adminDir)
+        self.options = options
+
+        # We set APT_CONFIG always in case if BLOCKS_APT_CONFIG variable is not set to
+        # the correct apt.conf dir
+        aptconf = os.environ.get("BLOCKS_APT_CONFIG")
+        if aptconf:
+            logging.debug("BLOCKS_APT_CONFIG defined. Using it in place of APT_CONFIG. Value = %s", aptconf)
+            os.environ["APT_CONFIG"] = aptconf
+        else:
+            os.environ["APT_CONFIG"] = os.path.join(self.adminDir, "apt", "apt.conf")
+
+    def __call__(self, args, binary="apt-get", quiet=False):
+        self.binary = binary
+
+        dpkgOptions = []
+        aptOptions = []
+
+        if binary == "apt-get":
+            aptOptions.append("--allow-unauthenticated") # Temporarily activated (when to remove?)
+
+        extraDpkgOptions = ""
+        if self.options:
+            if self.options.verbose == -1:
+                aptOptions.append("-q")
+            elif self.options.verbose <= -2:
+                aptOptions.append("-qq")
+
+            if self.options.force:
+                if binary == "apt-get":
+                    aptOptions.append("--force-yes")
+                    aptOptions.append("-y")
+                    #aptOptions.append("--allow-unauthenticated")
+                dpkgOptions.append('-o DPkg::Options::="--force-depends" -o DPkg::Options::="--force-overwrite"')
+
+            extraDpkgOptions = getDpkgDebugOption(self.options.verbose)
+            if extraDpkgOptions:
+                extraDpkgOptions += " "
+
+        extraDpkgOptions += os.environ.get("BLOCKS_DPKG_OPTIONS") or ""
+        if extraDpkgOptions:
+            dpkgOptions += ['-o DPkg::Options::="%s"' % o for o in extraDpkgOptions.split()]
+
+        dpkgOptions = " ".join(dpkgOptions)
+        aptOptions = " ".join(aptOptions)
+
+        if binary == "apt-get":
+            extraAptOptions = os.environ.get("BLOCKS_APTGET_OPTIONS")
+        elif binary == "apt-cache":
+            extraAptOptions = os.environ.get("BLOCKS_APTCACHE_OPTIONS")
+        if extraAptOptions:
+            aptOptions += " " + extraAptOptions
+
+        t = Template('-o DPkg::Options::="--instdir=$ROOTDIR" -o DPkg::Options::="--admindir=$ADMINDIR" '
+                     '-o DPkg::Options::="--force-bad-verify" '
+                     '-o Dir::State="$APTDIR" -o Dir::State::status="$ADMINDIR/status" -o Dir::Cache="$APTDIR/cache" '
+                     '-o Dir::Log="$APTDIR" -o Dir::Bin::gzip="$BINDIR/gzip" -o Dir::Bin::dpkg="$BINDIR/dpkg" '
+                     '-o Dir::Etc="$APTDIR" -o Dir::Bin::methods="$BINDIR/methods" '
+                     '-o Dir::Bin::gpg="$BINDIR/gpgv" '
+                     '-o APT::GPGV::TrustedKeyring="$KEYRING" '
+                     )
+        blocks_gpg = os.environ.get("BLOCKS_GPGTRUSTED")
+        if blocks_gpg:
+            keyring_path = blocks_gpg
+        else:
+            keyring_path = os.path.normpath(os.path.join(self.adminDir, "..", "trusted.gpg"))
+        cmdstr = t.substitute(ROOTDIR=self.rootDir,
+                              ADMINDIR=self.adminDir,
+                              APTDIR=self.adminDir + "/apt",
+                              BINDIR=utils.getUtilDir(),
+                              KEYRING=keyring_path) + dpkgOptions + " " + aptOptions + " " + args
+
+        return PackageManagerUtil.__call__(self, cmdstr, quiet)
+
+def getDpkgDebugOption(level):
+    level = min(level, 5)
+    return "" if level < 2 else "-D" + "1" * (level - 1)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/comps.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,197 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# comps.xml manager
+#
+
+from collections import defaultdict, namedtuple
+import logging
+import re
+
+import elementtree as ET
+
+Groups = namedtuple("Groups", "Installed, Available")
+
+#TODO: XML validation?
+
+class Comps(object):
+    ''' Manages comps.xml files '''
+
+    _GROUP_TAG = "group"
+
+    def __init__(self):
+        self._tagProcessors = [self._processGroupTag]
+        self._groups = {}
+
+    def create(self, filename_or_obj):
+        ''' Create comps file with filename or write to file object '''
+        tree = ET.MyElementTree(ET.Element("comps"))
+        for group in (g for g in self._groups.itervalues() if len(g.allPackages) > 0):
+            groupElem = ET.SubElement(tree.getroot(), "group")
+            ET.SubElement(groupElem, "id").text = group.id
+            ET.SubElement(groupElem, "name").text = group.name
+            ET.SubElement(groupElem, "description").text = group.description
+            packagesElem = ET.SubElement(groupElem, "packagelist")
+            for packages, packageType in (
+            (group.mandatoryPackages, "mandatory"),
+            (group.defaultPackages, "default"),
+            (group.optionalPackages, "optional")):
+                if packageType == "mandatory":
+                    for package in packages:
+                        ET.SubElement(packagesElem, "packagereq").text = package
+                else:
+                    for package in packages:
+                        ET.SubElement(packagesElem, "packagereq", {"type": packageType}).text = package
+        tree.write(filename_or_obj)
+
+    @property
+    def groups(self):
+        ''' Namedtuple with Installed and Available attributes '''
+        return Groups([g for g in self._groups.itervalues() if g.installed],
+                      [g for g in self._groups.itervalues() if not g.installed])
+
+    def addGroup(self, group):
+        self._groups[group.name] = group
+
+    def getGroup(self, name):
+        return self._groups.get(name)
+
+    def getGroupById(self, idname):
+        try:
+            return [g for g in self._groups.itervalues() if g.id == idname][0]
+        except IndexError:
+            return None
+
+    def _processGroupTag(self, elem):
+        if elem.tag == self._GROUP_TAG:
+            group = Group(elem=elem)
+            self._groups[group.name] = group
+            elem.clear()
+
+    def add(self, srcfile):
+        ''' Add groups from comps.xml '''
+        try:
+            for _, elem in ET.iterparse(srcfile):
+                for processor in self._tagProcessors:
+                    processor(elem)
+        except SyntaxError, ex:
+            raise SyntaxError("Parsing of comps file '%s' failed: %s" % (srcfile, ex))
+        except IOError, ex:
+            raise IOError("Problem on opening comps file: %s" % ex)
+
+    def resolveInstalledGroups(self, installedPackages):
+        ''' Resolve installation status for all groups '''
+        if installedPackages:
+            for group in self._groups.itervalues():
+                mandatoryPackages = group.packages[Group.MANDATORY_TYPE]
+                if mandatoryPackages:
+                    group.installed = mandatoryPackages.issubset(installedPackages)
+                else:
+                    nonMandatoryPackages = group.packages[Group.DEFAULT_TYPE] + group.packages[Group.OPTIONAL_TYPE]
+                    group.installed = not nonMandatoryPackages.isdisjoint(installedPackages)
+
+class Group(object):
+    ''' Package group '''
+
+    MANDATORY_TYPE = "mandatory"
+    DEFAULT_TYPE = "default"
+    OPTIONAL_TYPE = "optional"
+
+    def __init__(self, id="", name="", description="", elem=None):
+        self.id = id
+        self.name = name
+        self.description = description
+        self.installed = False
+        self.packages = defaultdict(set)
+
+        if elem:
+            for child in elem:
+                if child.tag in ("id", "name", "description"):
+                    self.__dict__[child.tag] = child.text
+                elif child.tag == "packagelist":
+                    for package in child:
+                        reqtype = package.get("type")
+                        if not reqtype:
+                            reqtype = self.MANDATORY_TYPE
+                        self.packages[reqtype].add(package.text)
+
+    def addPackages(self, packages, ptype=None):
+        '''
+        Accepts one package name or list of names
+        type is by default mandatory
+        '''
+        ptype = ptype or self.MANDATORY_TYPE
+        assert ptype in (self.MANDATORY_TYPE, self.DEFAULT_TYPE, self.OPTIONAL_TYPE), "Wrong type"
+        if not isinstance(packages, list):
+            packages = [packages]
+        self.packages[ptype].update(packages)
+
+    @property
+    def mandatoryPackages(self):
+        return list(self.packages[self.MANDATORY_TYPE])
+
+    @property
+    def defaultPackages(self):
+        return list(self.packages[self.DEFAULT_TYPE])
+
+    @property
+    def optionalPackages(self):
+        return list(self.packages[self.OPTIONAL_TYPE])
+
+    @property
+    def allPackages(self):
+        return (self.mandatoryPackages +
+                self.defaultPackages +
+                self.optionalPackages)
+
+Rule = namedtuple("Rule", "groupid, type, key, regexp")
+class CompsTemplate(Comps):
+    ''' Parses also rules from comps.xml and processes those with metadata '''
+
+    _RULES_TAG = "rules"
+
+    def __init__(self, filename):
+        self.rules = []
+        Comps.__init__(self)
+        self._tagProcessors.append(self.rulesTagProcessor)
+        self.add(filename)
+
+    def applyRule(self, metadata):
+        for rule in self.rules:
+            value = metadata.get(rule.key)
+            if value:
+                if re.search(rule.regexp, value):
+                    group = self.getGroupById(rule.groupid)
+                    if group:
+                        packageName = metadata.get("Package")
+                        if packageName:
+                            group.addPackages(packageName, rule.type)
+                        else:
+                            raise SyntaxError("Corrupted metadata. No package information available.")
+                    else:
+                        raise SyntaxError("Comps template: Invalid group id '%s' in rule" % rule.groupid)
+            else:
+                logging.warning("Comps template: Key '%s' not found from bundle metadata", rule.key)
+
+    def rulesTagProcessor(self, elem):
+        if elem.tag == self._RULES_TAG:
+            for rule in elem:
+                groupId = rule.get("groupid")
+                if groupId:
+                    packageType = rule.get("type", "mandatory")
+                    for match in rule:
+                        key = match.get("key", "Package")
+                        regexp = match.text
+                        self.rules.append(Rule(groupId, packageType, key, regexp))
+            elem.clear()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/elementtree.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,40 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Custom ElementTree
+#
+
+from xml.etree.cElementTree import *
+
+class MyElementTree(ElementTree):
+    ''' Enhanced ElementTree '''
+
+    def indent(self, elem, level=0):
+        i = "\n" + level*"  "
+        if len(elem):
+            if not elem.text or not elem.text.strip():
+                elem.text = i + "  "
+            if not elem.tail or not elem.tail.strip():
+                elem.tail = i
+            for elem in elem:
+                self.indent(elem, level+1)
+            if not elem.tail or not elem.tail.strip():
+                elem.tail = i
+        else:
+            if level and (not elem.tail or not elem.tail.strip()):
+                elem.tail = i
+
+    def write(self, *args, **kwargs):
+        self.indent(self.getroot())
+        ElementTree.write(self, *args, **kwargs)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/generalexceptions.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,39 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Exceptions for general use
+#
+
+''' General exceptions '''
+
+class GenericError(Exception):
+    ''' Generic Error '''
+
+class InvalidUserInput(Exception):
+    ''' Invalid user input '''
+
+class InvalidFileInput(Exception):
+    ''' Invalid file input '''
+
+class ExternalProcessError(Exception):
+    """ External process exited with error status """
+    def __init__(self, binary, errorcode, output=None, blocksRetcode=None):
+        if blocksRetcode == 2:
+            Exception.__init__(self, "%s argument error" % binary)
+        else:
+            Exception.__init__(self, "%s failed with error code %s" % (binary, errorcode))
+        self.blocksRetcode = blocksRetcode
+        self.errorcode = errorcode
+        self.binary = binary
+        self.output = output
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/localbundle.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,60 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# LocalBundle class capable of bundle installation
+#
+
+import logging
+import urlparse
+import utils
+import tempfile
+import shutil
+
+class LocalBundle(object):
+    ''' Install bundles from URIs '''
+    def __init__(self, dpkg):
+        self.dpkg = dpkg
+
+    def install(self, uris):
+        '''
+        Install bundle(s) in uris list
+        List must not be empty
+        Raises ValueError if nothing to install
+        '''
+        assert uris, "uri installation list empty!"
+        files = []
+        tempdir = tempfile.mkdtemp(prefix="local-bundle")
+        try:
+            for uri in uris:
+                # uri can be local filename without file scheme
+                filename = uri
+                if urlparse.urlparse(uri).scheme in ("http", "https", "ftp", "ftps", "file"):
+                    downloadPath = tempfile.mktemp(dir=tempdir)
+                    try:
+                        utils.urlretrieve(uri, downloadPath)
+                        filename = downloadPath
+                    except IOError, ex:
+                        if len(uris) == 1:
+                            raise IOError(str(ex))
+                        else:
+                            logging.warning(str(ex))
+                        filename = None
+                if filename:
+                    files.append(filename)
+            if files:
+                self.dpkg("--install %s" % utils.argsToStr(files))
+            else:
+                raise ValueError("Nothing to install")
+        finally:
+            shutil.rmtree(tempdir)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/sourceslist.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,91 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# SourcesList class that manages sources.list
+#
+
+from collections import namedtuple
+import utils
+
+class RepoError(Exception):
+    ''' Repository error '''
+
+Repo = namedtuple("Repo", "name, uri")
+
+class SourcesList(object):
+    ''' Manipulate sources.list '''
+    def __init__(self, path):
+        self.fpath = path
+        with open(self.fpath, "a+") as f:
+            # parse sources.list to a list of repos
+            lines = f.readlines()
+            repotuples = zip(lines[::2], lines[1::2])
+            self._repos = [Repo(name[1:].strip(), uri.split()[1]) for name, uri in repotuples]
+
+    @property
+    def repos(self):
+        return self._repos[:]
+
+    def _validateId(self, idnum):
+        try:
+            idnum = int(idnum)
+            if idnum < 1:
+                raise ValueError
+        except ValueError:
+            raise RepoError("Repository id must be positive integer")
+        if idnum > len(self._repos):
+            raise RepoError("Repository with id %d not found" % idnum)
+        return idnum
+
+    def getRepo(self, idnum):
+        idnum = self._validateId(idnum)
+        return self._repos[idnum - 1]
+
+    def getRepoIdByName(self, name):
+        for i, repo in enumerate(self._repos):
+            if repo.name == name:
+                return i + 1
+        raise RepoError("Repository with name '%s' does not exist" % name)
+
+    def add(self, name, uri):
+        if name in (repo.name for repo in self._repos):
+            raise RepoError("Repository with name '%s' already exists" % name)
+        if uri in (repo.uri for repo in self._repos):
+            raise RepoError("Repository with URI '%s' already exists" % uri)
+        try:
+            utils.validateURL(uri)
+        except ValueError, ex:
+            raise RepoError(str(ex))
+        self._repos.append(Repo(name, uri))
+        self._write()
+
+    def remove(self, idnum):
+        idnum = self._validateId(idnum)
+        del self._repos[idnum - 1]
+        self._write()
+
+    def removeByName(self, name):
+        for i, repo in enumerate(self._repos):
+            if repo.name == name:
+                del self._repos[i]
+                break
+        else:
+            raise RepoError("Repository with name '%s' does not exist" % name)
+        self._write()
+
+    def _write(self):
+        with open(self.fpath, "w") as f:
+            for name, uri in self._repos:
+                f.write("#" + name + "\n")
+                f.write("deb " + uri + " /\n")
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/uitools.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,48 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Various utilities for command line UI
+#
+
+''' UI utils '''
+
+force = False
+
+def askConfirmation(info, default_yes=False):
+    ''' Return true for confirmation else false '''
+    if force:
+        return True
+    result = ask("%s\nDo you want to continue" % info, ["y", "n"] if default_yes else None)
+    yes = result == "y"
+    if not yes:
+        print "Operation aborted."
+    return yes
+
+def ask(question, choices=None):
+    '''Asks question from user and returns given choice
+
+    First choice in choices is default
+    If force is in use default value is always returned
+    '''
+    choices = choices or ["n", "y"]
+    choices = [c.lower() for c in choices]
+    default = choices[0]
+    if force:
+        return "y" if "y" in choices else default
+    choicesText = "/".join(choices)
+    choicesText = choicesText[0].upper() + choicesText[1:]
+    answer = None
+    while answer not in choices + [""]:
+        answer = raw_input("%s [%s]? " % (question, choicesText)).lower()
+    return default if answer == "" else answer
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/utils.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,424 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Various utility functions
+#
+
+import timeit
+import platform
+import logging
+import logging.handlers
+import os
+import sys
+import stat
+import shutil
+import atexit
+import urllib2
+import urlparse
+import fnmatch
+import tempfile
+import itertools
+
+from generalexceptions import GenericError
+
+URL_SCHEMES = ("http", "https", "ftp", "ftps", "file")
+
+loglevel = logging.DEBUG
+
+def platformIsWindows():
+    return platform.system() == "Windows"
+
+class StopWatch(object): # pragma: no cover
+    ''' Measure elapsed time '''
+    def __init__(self, start=True):
+        self.start_time = None
+        self.stopped = False
+        if start:
+            self.start()
+
+    def start(self):
+        self.stopped = False
+        self.start_time = timeit.default_timer()
+
+    def stop(self):
+        if not self.stopped:
+            self.stopped = self.elapsed()
+        return self.stopped
+
+    def elapsed(self):
+        if self.stopped:
+            return self.stopped
+
+        if self.start_time:
+            elapsed = timeit.default_timer() - self.start_time
+        else:
+            elapsed = None
+        return elapsed
+
+def getErrorFunction(errortext):
+    def errorFunction(*args, **kwargs):
+        exit("Error: %s" % errortext)
+    return errorFunction
+
+def getRealPath(origpath):
+    realpath = origpath
+    if platformIsWindows():
+        try:
+            import win32file
+        except ImportError: # pragma: no cover
+            logging.warning("Pywin32 extension not available. Subst support disabled.")
+            return realpath
+
+        (drive, relpath) = os.path.splitdrive(origpath)
+        dev = win32file.QueryDosDevice(drive)
+        if dev.startswith("\\??\\"):
+            dev = dev[4:-2]
+            realpath = os.path.join(dev, relpath[1:])
+            logging.debug("Directory '%s' is on substed drive %s => %s. Real path is %s", origpath, drive, dev, realpath)
+    return realpath
+
+def getMetaPath():
+    altPath = os.environ.get("BLOCKS_METADATA")
+    if altPath:
+        blocksPath = altPath
+    else:
+        homeEnv = "APPDATA" if platformIsWindows() else "HOME"
+        home = os.environ.get(homeEnv)
+        if home is None:
+            raise GenericError("Could not get home directory from environment variable %s.\n"
+                               "Please define BLOCKS_METADATA environment variable to set blocks home dir." % homeEnv)
+        blocksPath = os.path.join(home, "Blocks" if platformIsWindows() else ".blocks")
+    blocksPath = os.path.normcase(blocksPath)
+    if not os.path.isdir(blocksPath):
+        os.mkdir(blocksPath) # pragma: no cover
+    return blocksPath
+
+def addSearchPath(path):
+    envPath = os.environ.get("PATH", "")
+    if path not in envPath:
+        os.environ["PATH"] = os.pathsep.join([path, envPath])
+
+def pathsUnique(path1, path2):
+    path1 = addPathSep(os.path.normcase(path1))
+    path2 = addPathSep(os.path.normcase(path2))
+    return not (path1.startswith(path2) or path2.startswith(path1))
+
+def pathInside(root, path, equalInside=True):
+    root = addPathSep(os.path.normcase(root))
+    path = addPathSep(os.path.normcase(path))
+    if not equalInside and path == root:
+        return False
+    return path.startswith(root)
+
+def removeStart(text, remove):
+    ''' Case-insensitive removing of text in start of string '''
+    removeCount = len(text) - len(text.lower().replace(remove.lower(), "", 1))
+    return text[removeCount:]
+
+def setReadOnly(path, read_only=True): # pragma: no cover
+    os.chmod(path, stat.S_IREAD if read_only else stat.S_IWRITE)
+
+def forceDelete(path): # pragma: no cover
+    ''' Deletes read-only files '''
+    os.chmod(path, stat.S_IWRITE)
+    os.remove(path)
+
+def __readOnlyDelete(func, path, exc): # pragma: no cover
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+def superDelete(path): # pragma: no cover
+    ''' Deletes both files and directories even if read-only '''
+    if os.path.isfile(path):
+        forceDelete(path)
+    elif os.path.isdir(path):
+        shutil.rmtree(path, onerror=__readOnlyDelete)
+
+DETAILED_LOG_FORMAT = '%(asctime)s.%(msecs)d - %(levelname)s: %(message)s'
+DETAILED_LOG_TIMEFORMAT = "%d.%m.%Y %H:%M:%S"
+DEFAULT_LOG_FORMAT = '%(levelname)s: %(message)s'
+
+def getConsoleLogFormat():
+    if loglevel <= logging.DEBUG:
+        return (DETAILED_LOG_FORMAT, DETAILED_LOG_TIMEFORMAT)
+    else:
+        return (DEFAULT_LOG_FORMAT, None)
+
+def setupLogging(version, verbose, path=None):
+    global loglevel
+
+    if not os.path.isabs(path):
+        path = os.path.join(getMetaPath(), path)
+
+    logging.DEBUG2 = logging.DEBUG - 1
+    logging.addLevelName(logging.DEBUG2, "DEBUG2")
+    verbosityToLoglevel = {-2: logging.CRITICAL,
+                           -1: logging.ERROR,
+                            0: logging.WARNING,
+                            1: logging.INFO,
+                            2: logging.DEBUG,
+                            3: logging.DEBUG2}
+    minVerbosity = min(verbosityToLoglevel.keys())
+    maxVerbosity = max(verbosityToLoglevel.keys())
+    verbose = min(max(verbose, minVerbosity), maxVerbosity)
+    loglevel = verbosityToLoglevel[verbose]
+
+    logger = logging.getLogger()
+    logger.setLevel(logging.NOTSET)
+
+    console = logging.StreamHandler()
+    console.setLevel(loglevel)
+    formatter = logging.Formatter(*getConsoleLogFormat())
+    console.setFormatter(formatter)
+    logger.addHandler(console)
+
+    fileHandler = logging.handlers.RotatingFileHandler(path, maxBytes=500000, backupCount=1)
+    if __debug__:
+        fileHandler.setLevel(loglevel if loglevel < logging.DEBUG else logging.DEBUG)
+    else:
+        fileHandler.setLevel(loglevel)
+    formatter = logging.Formatter(DETAILED_LOG_FORMAT, DETAILED_LOG_TIMEFORMAT)
+    fileHandler.setFormatter(formatter)
+    logger.addHandler(fileHandler)
+
+    cpu_count = "Unknown"
+    try:
+        import multiprocessing
+        cpu_count = multiprocessing.cpu_count()
+    except ImportError: # pragma: no cover
+        pass
+
+    logging.debug("%s started (PID %s) [OS: %s | Python: %s | CPU: %s | CPU Count: %s]",
+        version,
+        os.getpid(),
+        platform.platform(),
+        platform.python_version(),
+        platform.processor(),
+        cpu_count)
+    stopwatch = StopWatch()
+
+    @atexit.register
+    def runatexit():
+        # Fix to make coverage work. logging was none
+        if logging: # pragma: no cover
+            logging.info("Stopped. Run time: %.3fs", stopwatch.stop())
+
+    return loglevel
+
+def addPathSep(path):
+    return addSuffix(path, os.sep)
+
+def addSuffix(text, suffix):
+    return text if text.endswith(suffix) else text + suffix
+
+def relativePath(path, root):
+    '''
+    Returns relative path if path is absolute
+    Keeps casing in the path, but on windows matching of path and root are done
+    in lower case
+    '''
+    if os.path.isabs(path):
+        root = os.path.abspath(root)
+        if pathInside(root, path):
+            path = os.path.normpath(path)
+            root = os.path.normpath(root)
+            root = addPathSep(root)
+            # On windows we don't care about path case
+            if platformIsWindows():
+                path = removeStart(path, root)
+            else:
+                path = path.replace(root, "", 1)
+    return path
+
+def removeFilesRecursive(path, glob):
+    for name in getFilesRecursive(path, glob):
+        os.remove(name)
+
+def getFilesRecursive(path, glob=None):
+    ''' Get list of all files recursively from a path '''
+    files = [os.path.join(root, name) for root, _, files in os.walk(path) for name in files]
+    if glob:
+        files = fnmatch.filter(files, glob)
+    return files
+
+def getFileLines(path):
+    lines = []
+    if path:
+        with open(path) as f:
+            lines = [line.strip() for line in f.readlines()]
+    return lines
+
+def warnIfFileNotFound(path):
+    if not os.path.isfile(path):
+        if os.path.isdir(path):
+            logging.warning("No such file: %s. Directory found instead.", path)
+        else:
+            logging.warning("No such file: %s", path)
+
+def createFile(path):
+    open(path, "a+").close()
+
+def createDir(path):
+    '''
+    Create directory if it doesn't exist.
+    Ignores errors.
+    '''
+    try:
+        os.makedirs(path)
+    except OSError:
+        pass
+
+def getInstallDir():
+    return os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
+
+def copyFileData(src, dst, size=None, blocksize=32*1024):
+    ''' Copy data from source file to destination file '''
+    bytesread = 0
+    while size is None or bytesread < size:
+        if size and (bytesread + blocksize) >= size:
+            blocksize = size - bytesread
+        buf = src.read(blocksize)
+        bytesread += blocksize
+        if not buf:
+            break
+        dst.write(buf)
+
+def fileobjsEqual(fobj1, fobj2):
+    for line in fobj1:
+        if line != fobj2.readline():
+            return False
+    return True
+
+def getUtilDir():
+    binDir = os.path.join(getInstallDir(), "utils")
+    binDir = binDir.replace("\\", "/")
+    return binDir
+
+def initSearchPath():
+    addSearchPath(getUtilDir())
+
+def urlretrieve(url, path):
+    urlFile = openUrl(url)
+    try:
+        with open(path, "wb") as f:
+            copyFileData(urlFile, f)
+    finally:
+        urlFile.close()
+
+def openUrl(url, timeout=10):
+    ''' If URL cannot be opened raises IOError '''
+    errorText = "Problem on fetching '%s'" % url
+    try:
+        scheme = validateURL(url)
+    except ValueError, ex:
+        raise IOError("%s: %s" % (errorText, ex))
+
+    try:
+        urlFile = urllib2.urlopen(url, timeout=timeout)
+    except IOError, ex:
+        if hasattr(ex, "reason"):
+            problem = "Local file cannot be found" if scheme == "file" else "Server cannot be reached"
+            exc = IOError("%s: %s.\nReason: %s" % (errorText, problem, ex.reason))
+            if scheme == "file":
+                exc.errno = 404
+        elif hasattr(ex, "code"):
+            exc = IOError("%s: %s" % (errorText, ex))
+            exc.errno = ex.code
+        raise exc
+
+    return urlFile
+
+def validateURL(url):
+    ''' Raises ValueError exception if invalid URL '''
+    scheme = urlparse.urlparse(url).scheme
+    if scheme not in URL_SCHEMES:
+        raise ValueError("Invalid URL '%s' with scheme '%s': Supported URL schemes: %s" % (url, scheme, ", ".join(URL_SCHEMES)))
+    return scheme
+
+def error(text, critical=False, noOutput=False, retCode=1):
+    if critical:
+        logging.error(text, exc_info=True) # pragma: no cover
+    else:
+        logging.debug(text, exc_info=True)
+    if loglevel == logging.DEBUG or critical:
+        sys.exit(retCode) # pragma: no cover
+    else:
+        if not noOutput:
+            print >> sys.stderr, text
+        sys.exit(retCode)
+
+def getVersion(infoModule):
+    info = __import__(infoModule)
+    if info.VERSION_PRE_RELEASE > 0:
+        pre_release = info.VERSION_PRE_RELEASE_ID + str(info.VERSION_PRE_RELEASE)
+    else:
+        pre_release = "" # pragma: no cover
+    return "%%prog %d.%d.%d%s (%s)" % (info.VERSION_MAJOR,
+                                       info.VERSION_MINOR,
+                                       info.VERSION_REVISION,
+                                       pre_release,
+                                       info.VERSION_DATE or "dev")
+
+def atomicFileCreate(path, data):
+    tmpfile = tempfile.NamedTemporaryFile(bufsize=0, delete=False)
+    try:
+        tmpfile.write(data)
+        tmpfile.flush()
+        os.fsync(tmpfile.fileno())
+        tmpfile.close()
+        os.rename(tmpfile.name, path)
+    except Exception: # cleanup
+        tmpfile.close()
+        os.remove(tmpfile.name)
+        raise
+
+def uniqueName(prefix, usedNames):
+    for idnum in itertools.count(1):
+        name = "%s%d" % (prefix, idnum)
+        if name not in usedNames:
+            break
+    return name
+
+def isFile(name, ext):
+    ''' Checks if there is a file having extension ext with given name '''
+    return name.lower().endswith(ext) and os.path.isfile(name)
+
+def argsToStr(*args):
+    ''' Takes at least one string or iterable containing strings and quotes all that have spaces and returns string '''
+    argList = listify(*args)
+    for i, arg in enumerate(argList):
+        if " " in arg:
+            argList[i] = '"%s"' % arg
+    return " ".join(argList)
+
+def listify(*args):
+    retList = []
+    for arg in args:
+        if hasattr(arg, "__iter__"):
+            retList.extend(list(arg))
+        else:
+            retList.append(arg)
+    return retList
+
+def toBoolean(var):
+    if isinstance(var, bool):
+        return var
+    return {"yes": True, "true": True, "1": True, "enable": True,
+            "no": False, "false": False, "0": False, "disable": False}.get(var.lower() if var else None)
+
+def test():
+    print relativePath(r"c:\users\work\vc\blocks\blocks\python\data.py", ".")
+
+if __name__ == "__main__":
+    test()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/python/workspacediff.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,181 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Workspace diffing
+#
+
+import os
+from hashlib import md5
+
+from generalexceptions import InvalidUserInput
+import utils
+
+class DefaultOutput(object):
+    ''' Default output class for WorkspaceDiff '''
+
+    def __init__(self, differ):
+        self.differ = differ
+        self.currentBundle = ""
+        self._clear()
+
+    def _clear(self):
+        self.removed_files = []
+        self.modified_files = []
+        self.unmodified_files = []
+
+    def _bundlePrint(self, bundle):
+        print "%s%s%s %s" % ("M" if self.modified_files else " ",
+                             "R" if self.removed_files else " ",
+                             "U" if self.unmodified_files else " ",
+                             bundle)
+
+    def _filePrint(self, bundle):
+        print "%s:" % bundle
+        if self.modified_files:
+            print "\n".join("  M " + n for n in self.modified_files)
+        if self.removed_files:
+            print "\n".join("  R " + n for n in self.removed_files)
+        if self.unmodified_files:
+            print "\n".join("  U " + n for n in self.unmodified_files)
+
+    def _print(self, bundle):
+        if self.modified_files or self.removed_files or self.unmodified_files:
+            if self.differ.bundlediff:
+                self._bundlePrint(bundle)
+            else:
+                self._filePrint(bundle)
+
+    def __call__(self, status, bundle, name):
+        if status == "END":
+            self._print(self.currentBundle)
+            if self.differ.new:
+                print
+            return True
+
+        if not self.currentBundle:
+            self.currentBundle = bundle
+        if bundle != self.currentBundle and bundle:
+            self._print(self.currentBundle)
+            self.currentBundle = bundle
+            self._clear()
+
+        if self.differ.bundlediff:
+            if status == "M":
+                self.modified_files = True
+            elif status == "R":
+                self.removed_files = True
+            elif status == "U":
+                self.unmodified_files = True
+            # Stop scanning this bundle if all needed flags found
+            if (self.modified_files == self.differ.modified and
+                self.removed_files == self.differ.removed and
+                self.unmodified_files == self.differ.unmodified):
+                return True
+        else:
+            if status == "N":
+                print "N", name
+            else:
+                {"M": self.modified_files, "R": self.removed_files, "U": self.unmodified_files}[status].append(name)
+
+class WorkspaceDiff(object):
+    ''' Find changes in workspace '''
+
+    def __init__(self, wsPath, wsMetaDir, output=DefaultOutput):
+        self.wsPath = wsPath
+        self.wsMetaDir = wsMetaDir
+        self.output = output(self)
+        self.new = self.modified = self.removed = self.unmodified = self.bundlediff = None
+
+    def start(self, bundles=None,
+              new=False, modified=True, removed=True, unmodified=False,
+              dirs=None, bundlediff=False):
+        if not bundles:
+            bundles = []
+        else:
+            new = False
+        if dirs == None:
+            dirs = [self.wsPath]
+
+        assert not (new and bundlediff), "mutually exclusive"
+
+        self.new, self.modified, self.removed, self.unmodified = new, modified, removed, unmodified
+        self.bundlediff = bundlediff
+
+        self._validateDirs(dirs)
+
+        root = os.path.join(self.wsMetaDir, "info")
+        workspace_files = set()
+        md5_files = [os.path.splitext(f)[0] for f in os.listdir(root) if f.endswith(".md5sum")]
+        # If we are filtering with bundles check that all exist
+        for bundle in bundles:
+            if bundle not in md5_files:
+                raise InvalidUserInput("Invalid bundle name '%s'" % bundle)
+        if bundles:
+            md5_files = bundles
+        # Go through .md5sum files
+        for md5name in md5_files:
+            with open(os.path.join(root, md5name + ".md5sum")) as f:
+                # Check all files named in md5sum file
+                for line in f:
+                    (pkg_md5, _, pkg_file) = line.partition("  ")
+                    pkg_file = os.path.normcase(pkg_file)[:-1]
+                    if new:
+                        # Use md5 to save memory
+                        workspace_files.add(md5(pkg_file).digest())
+                    if modified or removed or unmodified:
+                        skip = False
+                        pkg_file = os.path.join(self.wsPath, pkg_file)
+                        if pkg_file.startswith(tuple(dirs)):
+                            try:
+                                with open(pkg_file, "rb") as pf:
+                                    if pkg_md5 != md5(pf.read()).hexdigest():
+                                    # TODO: Could be optimized with checking mod time or
+                                    # by creating new processes for calculating md5
+                                    #if modified and os.path.getmtime(pkg_file) == 0:
+                                        if modified:
+                                            skip = self.output("M", md5name, pkg_file)
+                                    elif unmodified:
+                                        skip = self.output("U", md5name, pkg_file)
+                            except IOError:
+                            #except OSError, e:
+                                if removed:
+                                    skip = self.output("R", md5name, pkg_file)
+                        if skip:
+                            break
+        self.output("END", None, None)
+        if new:
+            self._findNewFiles(dirs, workspace_files)
+
+    def _validateDirs(self, dirs):
+        for i, d in enumerate(dirs):
+            dirs[i] = os.path.normcase(os.path.abspath(d))
+            if not dirs[i].startswith(self.wsPath) or not os.path.isdir(dirs[i]):
+                raise InvalidUserInput("Search path '%s' not found from under current workspace" % dirs[i])
+
+        # Check that dirs do not include other dirs
+        for i, d in enumerate(dirs):
+            for d2 in dirs[i+1:]:
+                if not utils.pathsUnique(d, d2):
+                    raise InvalidUserInput("Search paths '%s' and '%s' are in conflict. Please remove either one." % (d, d2))
+
+    def _findNewFiles(self, dirs, existing_files):
+        for d in dirs:
+            for root, dirs, files in os.walk(d):
+                for name in files:
+                    rel_path = os.path.normcase(os.path.join(root[len(self.wsPath):], name))
+                    if rel_path[0] == os.sep:
+                        rel_path = rel_path[1:]
+                    rel_path = md5(rel_path).digest()
+                    if rel_path not in existing_files:
+                        self.output("N", None, os.path.normcase(os.path.join(root, name)))
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks/utils/README.txt	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,50 @@
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+
+Where to Get Needed Utilities
+-----------------------------
+
+Copy utilities to this directory from these sources:
+
+gpg.exe and gpgv.exe:
+http://www.gnupg.org -> install windows version
+
+gzip.exe, rm.exe and tar.exe:
+http://sourceforge.net/projects/unxutils/files/unxutils/current/UnxUtils.zip/download
+
+You can obtain cygwin binaries by copying those from your cygwin installation or by downloading these packages:
+
+cygwin1.dll: cygwin-1.7.1-1
+ftp://ftp.cygwin.com/pub/cygwin/release/cygwin/cygwin-1.7.1-1.tar.bz2
+
+cygz.dll: zlib0-1.2.3-10
+ftp://ftp.cygwin.com/pub/cygwin/release/zlib/zlib-1.2.3-10.tar.bz2
+
+cygbz2-1.dll: libbz2_1-1.0.5-10
+ftp://ftp.cygwin.com/pub/cygwin/release/bzip2/libbz2_1/libbz2_1-1.0.5-10.tar.bz2
+
+cygcurl-4.dll: libcurl4-7.19.6-1
+ftp://ftp.cygwin.com/pub/cygwin/release/curl/libcurl4/libcurl4-7.19.6-1.tar.bz2
+
+cygdb-4.2.dll: libdb4.2-4.2.52.5-2
+ftp://ftp.cygwin.com/pub/cygwin/release/db/db4.2/libdb4.2/libdb4.2-4.2.52.5-2.tar.bz2
+
+cyggcc_s-1.dll: libgcc1-4.3.4-3
+ftp://ftp.cygwin.com/pub/cygwin/release/gcc4/libgcc1/libgcc1-4.3.4-3.tar.bz2
+
+cygiconv-2.dll: libiconv2-1.13.1-1
+ftp://ftp.cygwin.com/pub/cygwin/release/libiconv/libiconv2/libiconv2-1.13.1-1.tar.bz2
+
+cygintl-8.dll: libintl8-0.17-11
+ftp://ftp.cygwin.com/pub/cygwin/release/gettext/libintl8/libintl8-0.17-11.tar.bz2
+
+cygstdc++-6.dll: libstdc++6-4.3.4-3
+ftp://ftp.cygwin.com/pub/cygwin/release/gcc4/libstdc++6/libstdc++6-4.3.4-3.tar.bz2
+
+bzip2.exe: bzip2-1.0.5-10
+ftp://ftp.cygwin.com/pub/cygwin/release/bzip2/bzip2-1.0.5-10.tar.bz2
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/blocks_files	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,62 @@
+[COMMON]
+blocks\python\cmdlineapp.py
+blocks\python\blocks.py
+blocks\python\blockscommand.py
+blocks\python\cmdlineutils.py
+blocks\python\comps.py
+blocks\python\generalexceptions.py
+blocks\python\blocks_info.py
+blocks\python\localbundle.py
+blocks\python\sourceslist.py
+blocks\python\uitools.py
+blocks\python\utils.py
+blocks\python\workspacediff.py
+blocks\conf\directives\single-bundle\file-directives.xml
+blocks\conf\directives\single-bundle\pkg-directives.xml
+blocks\conf\comps_template.xml
+blocks\python\UserConfiguration\UserConfiguration.py
+blocks\python\UserConfiguration\XMLConfig.py
+blocks\python\UserConfiguration\XMLConfigFile.py
+blocks\python\UserConfiguration\__init__.py
+blocks\python\bundle.py
+blocks\python\bundlecommand.py
+blocks\python\cmdlinecommand.py
+blocks\python\elementtree.py
+
+[WINDOWS]
+blocks\bin\blocks.bat
+blocks\bin\bundle.bat
+blocks\utils\apt-cache.exe
+blocks\utils\apt-get.exe
+blocks\utils\cygbz2-1.dll
+blocks\utils\cygiconv-2.dll
+blocks\utils\cygintl-8.dll
+blocks\utils\cygwin1.dll
+blocks\utils\cygz.dll
+blocks\utils\dpkg-deb.exe
+blocks\utils\dpkg-query.exe
+blocks\utils\dpkg-split.exe
+blocks\utils\dpkg-trigger.exe
+blocks\utils\dpkg.exe
+blocks\utils\gzip.exe
+blocks\utils\methods\copy.exe
+blocks\utils\methods\file.exe
+blocks\utils\methods\ftp.exe
+blocks\utils\methods\gzip.exe
+blocks\utils\methods\http.exe
+blocks\utils\methods\https.exe
+blocks\utils\rm.exe
+blocks\utils\tar.exe
+blocks\utils\gpgv.exe
+blocks\utils\methods\gpgv.exe
+blocks\utils\cyggcc_s-1.dll
+blocks\utils\cygstdc++-6.dll
+blocks\utils\apt-ftparchive.exe
+blocks\utils\cygdb-4.2.dll
+blocks\utils\gpg.exe
+blocks\utils\methods\bzip2.exe
+blocks\utils\bzip2.exe
+blocks\utils\cygcurl-4.dll
+
+[IGNORE]
+blocks\utils\README.txt
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/buildbinaries.bat	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,34 @@
+@REM
+@REM Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+@REM All rights reserved.
+@REM This component and the accompanying materials are made available
+@REM under the terms of "Eclipse Public License v1.0"
+@REM which accompanies this distribution, and is available
+@REM at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@REM
+@REM Initial Contributors:
+@REM Nokia Corporation - initial contribution.
+@REM
+@REM Contributors:
+@REM
+@REM Description:
+@REM Builds apt and dpkg binaries for blocks client
+@REM
+
+
+@echo off
+setlocal
+set DPKG_DIR=%~dp0/dpkg
+set APT_DIR=%~dp0/apt
+set CYGWIN_DIR=C:\cygwin-1.7
+set path=%DPKG_DIR%\src;%path%
+
+if not exist %DPKG_DIR%/Makefile %CYGWIN_DIR%\bin\bash --login -c "cd $DPKG_DIR && ./configure-cygwin"
+%CYGWIN_DIR%\bin\bash --login -c "cd $DPKG_DIR && make -i"
+
+if not exist %APT_DIR%/makefile %CYGWIN_DIR%\bin\bash --login -c "cd $APT_DIR && ./configure-cygwin"
+%CYGWIN_DIR%\bin\bash --login -c "cd $APT_DIR && make -i"
+
+endlocal
+
+copybinaries.bat
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/buildpackage.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,173 @@
+#!/usr/bin/python
+
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Builds installation packages
+#
+
+import zipfile
+import tarfile
+import os
+import sys
+import compileall
+import datetime
+from collections import defaultdict
+import itertools
+from optparse import OptionParser
+
+sys.path.append("blocks/python")
+import utils
+
+class FileList(object):
+    ''' File list for each OS '''
+    def __init__(self, listname):
+        self.files = defaultdict(set)
+        with open(listname) as f:
+            fileType = None
+            for line in (l.strip() for l in f):
+                if line != "":
+                    if line.startswith("[") and line.endswith("]"):
+                        fileType = line[1:-1].lower()
+                    elif fileType:
+                        line = os.path.normcase(line)
+                        self.files[fileType].add(line)
+
+    def popFile(self, path):
+        for k, paths in self.files.items():
+            if path in paths:
+                if k != "ignore":
+                    self.files[k].remove(path)
+                return k
+
+    def getAll(self):
+        all = []
+        for k, paths in self.files.iteritems():
+            if k != "ignore":
+                all.extend(itertools.izip_longest([k] * len(paths), paths))
+        return all
+
+def main():
+    parser = OptionParser(prog="build-package", usage="%prog [OPTIONS] package")
+    parser.add_option("-d", "--dryrun", action="store_true", help="just show what would happen")
+    parser.add_option("-n", "--newversion", action="store_true", help="increase version number for next release")
+    parser.add_option("-i", "--ignore-errors", action="store_true", help="ignore errors")
+    parser.set_defaults(dryrun=False, newversion=False)
+    (options, args) = parser.parse_args()
+    if args:
+        packageName = args[0]
+    else:
+        parser.print_help()
+        print
+        sys.exit("Package to build required as an argument")
+
+    error = ""
+    DIRECTORY = "blocks"
+    infoPath = "%s/python/%s_info.py" % (DIRECTORY, packageName)
+    version_info = {}
+
+    def writeInfo():
+        with open(infoPath, "w") as f:
+            for k, v in sorted(version_info.iteritems()):
+                if k != "__builtins__":
+                    f.write("%s = %s\n" % (k, v if isinstance(v, int) else '"%s"' % v))
+
+    execfile(infoPath, version_info)
+    version_info["VERSION_DATE"] = datetime.date.today().isoformat()
+    if not options.dryrun:
+        writeInfo()
+
+    if version_info["VERSION_PRE_RELEASE"] > 0:
+        version_string = packageName + "-%(VERSION_MAJOR)d.%(VERSION_MINOR)d.%(VERSION_REVISION)d%(VERSION_PRE_RELEASE_ID)s%(VERSION_PRE_RELEASE)d" % version_info
+    else:
+        version_string = packageName + "-%(VERSION_MAJOR)d.%(VERSION_MINOR)d.%(VERSION_REVISION)d" % version_info
+
+    print "Byte compiling..."
+    compileall.compile_dir(DIRECTORY, force=1)
+    print
+
+    filelist = FileList("%s_files" % packageName)
+    skipped_files = set()
+
+    print "Archiving..."
+    zipName = version_string + ".zip"
+    tarName = version_string + ".tar.gz"
+    if not options.dryrun:
+        zipArchive = zipfile.ZipFile(zipName, "w", zipfile.ZIP_DEFLATED)
+        tarArchive = tarfile.open(tarName, "w:gz")
+    for root, _, files in os.walk(DIRECTORY):
+        for name in (f for f in files if not f.endswith((".pyc", ".pyo", ".~py", ".bak"))):
+            path = os.path.join(root, name)
+            normpath = os.path.normcase(path)
+            fileType = filelist.popFile(normpath)
+            if fileType is None:
+                skipped_files.add(path)
+            elif fileType != "ignore":
+                if not options.dryrun:
+                    archName = path.replace(DIRECTORY, version_string, 1)
+                    if fileType == "windows" or fileType == "common":
+                        zipArchive.write(path, archName)
+                    if fileType == "linux" or fileType == "common":
+                        tarArchive.add(path, archName)
+                print path
+
+    if not options.dryrun:
+        zipArchive.close()
+        tarArchive.close()
+
+    leftovers = filelist.getAll()
+    if leftovers:
+        print
+        print "ERROR: Files that should have been packaged but not found:"
+        for ftype, name in sorted(leftovers):
+            print "%s: %s" % (ftype, name)
+
+        if not options.ignore_errors:
+            error += "All files were not packaged."
+            if not options.dryrun:
+                os.remove(tarName)
+                os.remove(zipName)
+
+    if skipped_files:
+        print
+        print "WARNING: Files found but not included in any package:"
+        for f in sorted(skipped_files):
+            print f
+
+    if error and options.newversion:
+        options.newversion = False
+        error += "\nSkipped version increase because of an error."
+
+    if options.newversion:
+        if version_info["VERSION_PRE_RELEASE"] != 0:
+            version_info["VERSION_PRE_RELEASE"] += 1
+        else:
+            version_info["VERSION_REVISION"] += 1
+
+    version_info["VERSION_DATE"] = ""
+    if not options.dryrun:
+        writeInfo()
+
+    utils.removeFilesRecursive(DIRECTORY, "*.pyc")
+
+    print
+    if error:
+        print "ERROR:", error
+    else:
+        print "Successfully created packages for %s:" % version_string
+        print zipName
+        print tarName
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/copybinaries.bat	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,37 @@
+@REM
+@REM Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+@REM All rights reserved.
+@REM This component and the accompanying materials are made available
+@REM under the terms of "Eclipse Public License v1.0"
+@REM which accompanies this distribution, and is available
+@REM at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@REM
+@REM Initial Contributors:
+@REM Nokia Corporation - initial contribution.
+@REM
+@REM Contributors:
+@REM
+@REM Description:
+@REM Copy needed dpkg and apt binaries to blocks client utils dir
+@REM
+
+@echo off
+cd dpkg
+xcopy /D src\*.exe ..\blocks\utils
+xcopy /D dpkg-deb\dpkg-deb.exe ..\blocks\utils
+xcopy /D dpkg-split\dpkg-split.exe ..\blocks\utils
+cd ..
+cd apt\bin
+xcopy /D apt-get.exe ..\..\blocks\utils
+xcopy /D apt-cache.exe ..\..\blocks\utils
+xcopy /D apt-ftparchive.exe ..\..\blocks\utils
+cd methods
+xcopy /D copy.exe ..\..\..\blocks\utils\methods\
+xcopy /D file.exe ..\..\..\blocks\utils\methods\
+xcopy /D ftp.exe ..\..\..\blocks\utils\methods\
+xcopy /D gpgv.exe ..\..\..\blocks\utils\methods\
+xcopy /D gzip.exe ..\..\..\blocks\utils\methods\
+copy gzip.exe ..\..\..\blocks\utils\methods\bzip2.exe
+xcopy /D http.exe ..\..\..\blocks\utils\methods\
+xcopy /D https.exe ..\..\..\blocks\utils\methods\
+cd ..\..\..
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/apt-cache-search.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,130 @@
+*** apt-0.7.20.2/cmdline/apt-cache.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/cmdline/apt-cache.cc	2010-01-08 20:21:29.000000000 +0200
+***************
+*** 1208,1214 ****
+     pkgCache::VerFileIterator Vf = V.FileList();
+     for (; Vf.end() == false; Vf++)
+        if ((Vf.File()->Flags & pkgCache::Flag::NotSource) == 0)
+! 	 break;
+     if (Vf.end() == true)
+        Vf = V.FileList();
+        
+--- 1208,1214 ----
+     pkgCache::VerFileIterator Vf = V.FileList();
+     for (; Vf.end() == false; Vf++)
+        if ((Vf.File()->Flags & pkgCache::Flag::NotSource) == 0)
+! 	 			break;
+     if (Vf.end() == true)
+        Vf = V.FileList();
+        
+***************
+*** 1233,1238 ****
+--- 1233,1245 ----
+  
+     // Get a pointer to start of Description field
+     const unsigned char *DescP = (unsigned char*)strstr((char*)Buffer, "Description:");
++    // HH: Bug fix. If descriptions are not found quit now to prevent crash
++    if (DescP == NULL)
++    {
++ 	cout << "E: Malformed packages inserted into cache. Description field missing!";
++ 	delete [] Buffer;
++ 	return false;
++    }
+  
+     // Write all but Description
+     if (fwrite(Buffer,1,DescP - Buffer,stdout) < (size_t)(DescP - Buffer))
+***************
+*** 1282,1287 ****
+--- 1289,1300 ----
+  bool Search(CommandLine &CmdL)
+  {
+     pkgCache &Cache = *GCache;
++    // HH: Bug fix. No need to do anything if no packages
++    if (Cache.HeaderP->PackageCount == 0)
++    {
++    		return true;
++    }
++    	
+     bool ShowFull = _config->FindB("APT::Cache::ShowFull",false);
+     bool NamesOnly = _config->FindB("APT::Cache::NamesOnly",false);
+     unsigned NumPatterns = CmdL.FileSize() -1;
+***************
+*** 1316,1322 ****
+     }
+     
+     ExDescFile *DFList = new ExDescFile[Cache.HeaderP->PackageCount+1];
+!    memset(DFList,0,sizeof(*DFList)*Cache.HeaderP->PackageCount+1);
+  
+     // Map versions that we want to write out onto the VerList array.
+     for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+--- 1329,1336 ----
+     }
+     
+     ExDescFile *DFList = new ExDescFile[Cache.HeaderP->PackageCount+1];
+!    // HH: Bug fix. Memset all the memory
+!    memset(DFList, 0, sizeof(*DFList) * (Cache.HeaderP->PackageCount + 1));
+  
+     // Map versions that we want to write out onto the VerList array.
+     for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+***************
+*** 1416,1423 ****
+        pkgCache::PkgIterator Pkg = Cache.FindPkg(*I);
+        if (Pkg.end() == true)
+        {
+! 	 _error->Warning(_("Unable to locate package %s"),*I);
+! 	 continue;
+        }
+  
+        ++found;
+--- 1430,1437 ----
+        pkgCache::PkgIterator Pkg = Cache.FindPkg(*I);
+        if (Pkg.end() == true)
+        {
+! 	 			_error->Warning(_("Unable to locate package %s"),*I);
+! 	 			continue;
+        }
+  
+        ++found;
+***************
+*** 1425,1444 ****
+        // Find the proper version to use.
+        if (_config->FindB("APT::Cache::AllVersions","true") == true)
+        {
+! 	 pkgCache::VerIterator V;
+! 	 for (V = Pkg.VersionList(); V.end() == false; V++)
+! 	 {
+! 	    if (DisplayRecord(V) == false)
+! 	       return false;
+! 	 }
+        }
+        else
+        {
+! 	 pkgCache::VerIterator V = Plcy.GetCandidateVer(Pkg);
+! 	 if (V.end() == true || V.FileList().end() == true)
+! 	    continue;
+! 	 if (DisplayRecord(V) == false)
+! 	    return false;
+        }      
+     }
+  
+--- 1439,1458 ----
+        // Find the proper version to use.
+        if (_config->FindB("APT::Cache::AllVersions","true") == true)
+        {
+! 				 pkgCache::VerIterator V;
+! 				 for (V = Pkg.VersionList(); V.end() == false; V++)
+! 				 {
+! 				    if (DisplayRecord(V) == false)
+! 				       return false;
+! 				 }
+        }
+        else
+        {
+! 				 pkgCache::VerIterator V = Plcy.GetCandidateVer(Pkg);
+! 				 if (V.end() == true || V.FileList().end() == true)
+! 				    continue;
+! 				 if (DisplayRecord(V) == false)
+! 				    return false;
+        }      
+     }
+  
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/apt-debian-system.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,20 @@
+diff -crB apt-0.7.20.2/apt-pkg/init.cc apt-0.7.20.2-rhel/apt-pkg/init.cc
+*** apt-0.7.20.2/apt-pkg/init.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/apt-pkg/init.cc	2009-07-07 16:42:07.000000000 +0300
+***************
+*** 116,122 ****
+  bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys)
+  {
+     Sys = 0;
+!    string Label = Cnf.Find("Apt::System","");
+     if (Label.empty() == false)
+     {
+        Sys = pkgSystem::GetSystem(Label.c_str());
+--- 116,122 ----
+  bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys)
+  {
+     Sys = 0;
+!    string Label = Cnf.Find("Apt::System","Debian dpkg interface");
+     if (Label.empty() == false)
+     {
+        Sys = pkgSystem::GetSystem(Label.c_str());
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/apt-ftp-archive-ret.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,245 @@
+*** apt-0.7.20.2/apt-pkg/contrib/error.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/apt-pkg/contrib/error.cc	2010-01-08 20:16:34.000000000 +0200
+***************
+*** 205,210 ****
+--- 205,226 ----
+  	 cerr << "W: " << Err << endl;
+     }
+  }
++ 
++ string GlobalError::GetErrorDump()
++ {
++    string err;
++    Item *item = List;
++    while (item)
++    {
++       if (item->Error)
++          err += "E: ";
++       else
++ 	 err += "W: ";
++       err += item->Text + "\n";
++       item = item->Next;
++    }
++    return err;
++ }
+  									/*}}}*/
+  // GlobalError::Discard - Discard									/*{{{*/
+  // ---------------------------------------------------------------------
+*** apt-0.7.20.2/apt-pkg/contrib/error.h	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/apt-pkg/contrib/error.h	2010-01-08 20:16:35.000000000 +0200
+***************
+*** 87,92 ****
+--- 87,93 ----
+  
+     // Usefull routine to dump to cerr
+     void DumpErrors();
++    string GetErrorDump();
+     
+     GlobalError();
+  };
+*** apt-0.7.20.2/ftparchive/apt-ftparchive.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/ftparchive/apt-ftparchive.cc	2010-01-08 20:16:59.000000000 +0200
+***************
+*** 729,739 ****
+     if (CmdL.FileSize() <= 2)
+     {
+        for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+! 	 if (I->GenPackages(Setup,Stats) == false)
+! 	    _error->DumpErrors();
+        for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+! 	 if (I->GenSources(Setup,SrcStats) == false)
+! 	    _error->DumpErrors();
+     }
+     else
+     {
+--- 729,737 ----
+     if (CmdL.FileSize() <= 2)
+     {
+        for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+! 	 			I->GenPackages(Setup,Stats);
+        for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+! 	 			I->GenSources(Setup,SrcStats);
+     }
+     else
+     {
+***************
+*** 758,764 ****
+  	 delete [] List;
+  	 return _error->Error(_("No selections matched"));
+        }
+-       _error->DumpErrors();
+        
+        // Do the generation for Packages
+        for (End = List; End->Str != 0; End++)
+--- 756,761 ----
+***************
+*** 769,776 ****
+  	 PackageMap *I = (PackageMap *)End->UserData;
+  	 if (I->PkgDone == true)
+  	    continue;
+! 	 if (I->GenPackages(Setup,Stats) == false)
+! 	    _error->DumpErrors();
+        }
+        
+        // Do the generation for Sources
+--- 766,772 ----
+  	 PackageMap *I = (PackageMap *)End->UserData;
+  	 if (I->PkgDone == true)
+  	    continue;
+! 	 		I->GenPackages(Setup,Stats);
+        }
+        
+        // Do the generation for Sources
+***************
+*** 782,789 ****
+  	 PackageMap *I = (PackageMap *)End->UserData;
+  	 if (I->SrcDone == true)
+  	    continue;
+! 	 if (I->GenSources(Setup,SrcStats) == false)
+! 	    _error->DumpErrors();
+        }
+        
+        delete [] List;
+--- 778,784 ----
+  	 PackageMap *I = (PackageMap *)End->UserData;
+  	 if (I->SrcDone == true)
+  	    continue;
+! 	 		I->GenSources(Setup,SrcStats);
+        }
+        
+        delete [] List;
+***************
+*** 837,845 ****
+  	    continue;
+        }
+        
+!       if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
+! 			 MaxContentsChange) == false)
+! 	 _error->DumpErrors();
+        
+        // Hit the limit?
+        if (MaxContentsChange == 0)
+--- 832,838 ----
+  	    continue;
+        }
+        
+!       I->GenContents(Setup,PkgList.begin(),PkgList.end(), MaxContentsChange);
+        
+        // Hit the limit?
+        if (MaxContentsChange == 0)
+***************
+*** 885,892 ****
+     {
+        c0out << I->BinCacheDB << endl;
+        CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+!       if (DB.Clean() == false)
+! 	 _error->DumpErrors();
+        
+        string CacheDB = I->BinCacheDB;
+        for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+--- 878,884 ----
+     {
+        c0out << I->BinCacheDB << endl;
+        CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+!       DB.Clean();
+        
+        string CacheDB = I->BinCacheDB;
+        for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+***************
+*** 955,961 ****
+     {
+        bool Errors = _error->PendingError();
+        _error->DumpErrors();
+!       return Errors == true?100:0;
+     }
+     return 0;
+  }
+--- 947,953 ----
+     {
+        bool Errors = _error->PendingError();
+        _error->DumpErrors();
+!       return Errors == true ? 100 : 0;
+     }
+     return 0;
+  }
+*** apt-0.7.20.2/ftparchive/multicompress.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/ftparchive/multicompress.cc	2010-01-08 20:16:59.000000000 +0200
+***************
+*** 178,184 ****
+        Child(Pipe[0]);
+        if (_error->PendingError() == true)
+        {
+- 	 _error->DumpErrors();
+  	 _exit(100);
+        }      
+        _exit(0);
+--- 178,183 ----
+*** apt-0.7.20.2/ftparchive/writer.cc	2009-02-07 17:09:35.000000000 +0200
+--- apt-0.7.20.2-rhel/ftparchive/writer.cc	2010-01-08 20:17:00.000000000 +0200
+***************
+*** 118,145 ****
+     else
+        Owner->DoPackage(File);
+     
+!    if (_error->empty() == false)
+     {
+!       // Print any errors or warnings found
+!       string Err;
+!       bool SeenPath = false;
+!       while (_error->empty() == false)
+!       {
+! 	 Owner->NewLine(1);
+! 	 
+! 	 bool Type = _error->PopMessage(Err);
+! 	 if (Type == true)
+! 	    cerr << _("E: ") << Err << endl;
+! 	 else
+! 	    cerr << _("W: ") << Err << endl;
+! 	 
+! 	 if (Err.find(File) != string::npos)
+! 	    SeenPath = true;
+!       }      
+!       
+!       if (SeenPath == false)
+! 	 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
+!       return 0;
+     }
+     
+     return 0;
+--- 118,132 ----
+     else
+        Owner->DoPackage(File);
+     
+!    if (!_error->empty())
+     {
+!       Owner->NewLine(1); // is this needed?
+! 	
+!       // HH: Show which file is involved in error if it's not found
+!       // from previous errors 
+!       string errdump = _error->GetErrorDump();
+!       if (errdump.find(File) == string::npos)
+! 	 _error->Error("Errors apply to file '%s'", File);
+     }
+     
+     return 0;
+***************
+*** 320,327 ****
+  
+     if (ExtOverrides.empty() == false)
+        Over.ReadExtraOverride(ExtOverrides);
+- 
+-    _error->DumpErrors();
+  }
+                                                                          /*}}}*/
+  // FTWScanner::SetExts - Set extensions to support                      /*{{{*/
+--- 307,312 ----
+***************
+*** 794,800 ****
+        if (_error->empty() == false)
+        {
+  	 _error->Error("Errors apply to file '%s'",File.c_str());
+- 	 _error->DumpErrors();
+        }
+     }
+     
+--- 779,784 ----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/dpkg-posix.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,21 @@
+
+--- archives.c.orig	2009-01-25 17:47:54.539050000 +0100
++++ archives.c	2009-01-25 17:49:48.270450000 +0100
+@@ -1177,9 +1177,14 @@
+   varbufinit(&fnametmpvb);
+   varbufinit(&fnamenewvb);
+ 
+-  varbufaddstr(&fnamevb,instdir); varbufaddc(&fnamevb,'/');
+-  varbufaddstr(&fnametmpvb,instdir); varbufaddc(&fnametmpvb,'/');
+-  varbufaddstr(&fnamenewvb,instdir); varbufaddc(&fnamenewvb,'/');
++  // Workaround for POSIX. POSIX says paths that begin with
++  // '//' have implementation-dependent behaviour.
++  // See also:
++  // http://www.opengroup.org/onlinepubs/9699919799/ \
++  //   basedefs/V1_chap03.html#tag_03_266
++  varbufaddstr(&fnamevb,instdir); varbufaddstr(&fnamevb,"///");
++  varbufaddstr(&fnametmpvb,instdir); varbufaddstr(&fnametmpvb,"///");
++  varbufaddstr(&fnamenewvb,instdir); varbufaddstr(&fnamenewvb,"///");
+   fnameidlu= fnamevb.used;
+ 
+   ensure_diversions();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/dpkg-remove-chown.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,62 @@
+diff -Naur dpkg-1.14.23.orig/src/archives.c dpkg-1.14.23/src/archives.c
+--- dpkg-1.14.23.orig/src/archives.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23/src/archives.c	2008-12-16 20:18:28.000000000 +0200
+@@ -267,10 +267,11 @@
+ }
+ 
+ static void newtarobject_allmodes(const char *path, struct TarInfo *ti, struct filestatoverride* statoverride) {
++  /* Blocks does not need this
+   if (chown(path,
+ 	    statoverride ? statoverride->uid : ti->UserID,
+ 	    statoverride ? statoverride->gid : ti->GroupID))
+-    ohshite(_("error setting ownership of `%.255s'"),ti->Name);
++    ohshite(_("error setting ownership of `%.255s'"),ti->Name);*/
+   if (chmod(path,(statoverride ? statoverride->mode : ti->Mode) & ~S_IFMT))
+     ohshite(_("error setting permissions of `%.255s'"),ti->Name);
+   newtarobject_utime(path,ti);
+@@ -664,10 +665,11 @@
+ 			  nifd->namenode->statoverride->uid,
+ 			  nifd->namenode->statoverride->gid,
+ 			  nifd->namenode->statoverride->mode);
++    /* Blocks does not need this
+     if (fchown(fd,
+ 	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+ 	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
+-      ohshite(_("error setting ownership of `%.255s'"),ti->Name);
++      ohshite(_("error setting ownership of `%.255s'"),ti->Name);*/
+     am=(nifd->namenode->statoverride ? nifd->namenode->statoverride->mode : ti->Mode) & ~S_IFMT;
+     if (fchmod(fd,am))
+       ohshite(_("error setting permissions of `%.255s'"),ti->Name);
+@@ -708,6 +710,7 @@
+     if (symlink(ti->LinkName,fnamenewvb.buf))
+       ohshite(_("error creating symbolic link `%.255s'"),ti->Name);
+     debug(dbg_eachfiledetail,"tarobject SymbolicLink creating");
++    /* Blocks does not need this
+ #ifdef HAVE_LCHOWN
+     if (lchown(fnamenewvb.buf,
+ 	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+@@ -718,7 +721,7 @@
+ 	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+ 	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
+       ohshite(_("error setting ownership of symlink `%.255s'"),ti->Name);
+-#endif
++#endif*/
+     break;
+   case Directory:
+     /* We've already checked for an existing directory. */
+@@ -772,13 +775,14 @@
+       symlinkfn.used= r; varbufaddc(&symlinkfn,0);
+       if (symlink(symlinkfn.buf,fnametmpvb.buf))
+         ohshite(_("unable to make backup symlink for `%.255s'"),ti->Name);
++    /* Blocks does not need this
+ #ifdef HAVE_LCHOWN
+       if (lchown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
+         ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
+ #else
+       if (chown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
+         ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
+-#endif
++#endif*/
+     } else {
+       debug(dbg_eachfiledetail,"tarobject nondirectory, `link' backup");
+       if (link(fnamevb.buf,fnametmpvb.buf))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/dpkg-remove-dbcheck.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,14 @@
+--- dpkg-1.14.23.orig/lib/dbmodify.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23/lib/dbmodify.c	2009-02-23 14:50:36.000000000 +0200
+@@ -153,8 +153,10 @@
+   switch (readwritereq) {
+   case msdbrw_needsuperuser:
+   case msdbrw_needsuperuserlockonly:
+-    if (getuid() || geteuid())
++    /* Not required by blocks */
++    /* if (getuid() || geteuid())
+       ohshit(_("requested operation requires superuser privilege"));
++    */
+     /* fall through */
+   case msdbrw_write: case msdbrw_writeifposs:
+     if (access(adir,W_OK)) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/linux/dpkg-remove-pathcheck.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,18 @@
+diff -Naur dpkg-1.14.23.orig/src/help.c dpkg-1.14.23/src/help.c
+--- dpkg-1.14.23.orig/src/help.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23/src/help.c	2008-12-22 10:48:34.000000000 +0200
+@@ -77,12 +77,12 @@
+ 
+ void checkpath(void) {
+ /* Verify that some programs can be found in the PATH. */
+-  static const char *const checklist[]= { "ldconfig", 
++  static const char *const checklist[]= { /*"ldconfig", 
+ #if WITH_START_STOP_DAEMON
+     "start-stop-daemon",
+ #endif    
+     "install-info",
+-    "update-rc.d",
++    "update-rc.d",*/
+     NULL
+   };
+ 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/windows/apt-win.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,722 @@
+diff -uwrBN apt-0.7.20.2/Makefile apt-0.7.20.2-win/Makefile
+--- apt-0.7.20.2/Makefile	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/Makefile	1970-01-01 02:00:00.000000000 +0200
+@@ -1,34 +0,0 @@
+-# -*- make -*-
+-
+-# This is the top level make file for APT, it recurses to each lower
+-# level make file and runs it with the proper target
+-ifndef NOISY
+-.SILENT:
+-endif
+-
+-.PHONY: default
+-default: startup all
+-
+-.PHONY: headers library clean veryclean all binary program doc
+-all headers library clean veryclean binary program doc dirs:
+-	$(MAKE) -C apt-pkg $@
+-	$(MAKE) -C apt-inst $@
+-	$(MAKE) -C methods $@
+-	$(MAKE) -C cmdline $@
+-	$(MAKE) -C ftparchive $@
+-	$(MAKE) -C dselect $@
+-	$(MAKE) -C doc $@
+-	$(MAKE) -C po $@
+-
+-# Some very common aliases
+-.PHONY: maintainer-clean dist-clean distclean pristine sanity 
+-maintainer-clean dist-clean distclean pristine sanity: veryclean
+-
+-# The startup target builds the necessary configure scripts. It should
+-# be used after a CVS checkout.
+-CONVERTED=environment.mak include/config.h include/apti18n.h build/doc/Doxyfile makefile
+-include buildlib/configure.mak
+-$(BUILDDIR)/include/config.h: buildlib/config.h.in
+-$(BUILDDIR)/include/apti18n.h: buildlib/apti18n.h.in
+-$(BUILDDIR)/environment.mak: buildlib/environment.mak.in
+-$(BUILDDIR)/makefile: buildlib/makefile.in
+diff -uwrBN apt-0.7.20.2/Makefile2 apt-0.7.20.2-win/Makefile2
+--- apt-0.7.20.2/Makefile2	1970-01-01 02:00:00.000000000 +0200
++++ apt-0.7.20.2-win/Makefile2	2010-03-25 19:31:17.957149000 +0200
+@@ -0,0 +1,34 @@
++# -*- make -*-
++
++# This is the top level make file for APT, it recurses to each lower
++# level make file and runs it with the proper target
++ifndef NOISY
++.SILENT:
++endif
++
++.PHONY: default
++default: startup all
++
++.PHONY: headers library clean veryclean all binary program doc
++all headers library clean veryclean binary program doc dirs:
++	$(MAKE) -C apt-pkg $@
++	$(MAKE) -C apt-inst $@
++	$(MAKE) -C methods $@
++	$(MAKE) -C cmdline $@
++	$(MAKE) -C ftparchive $@
++	$(MAKE) -C dselect $@
++	$(MAKE) -C doc $@
++	$(MAKE) -C po $@
++
++# Some very common aliases
++.PHONY: maintainer-clean dist-clean distclean pristine sanity 
++maintainer-clean dist-clean distclean pristine sanity: veryclean
++
++# The startup target builds the necessary configure scripts. It should
++# be used after a CVS checkout.
++CONVERTED=environment.mak include/config.h include/apti18n.h build/doc/Doxyfile makefile
++include buildlib/configure.mak
++$(BUILDDIR)/include/config.h: buildlib/config.h.in
++$(BUILDDIR)/include/apti18n.h: buildlib/apti18n.h.in
++$(BUILDDIR)/environment.mak: buildlib/environment.mak.in
++$(BUILDDIR)/makefile: buildlib/makefile.in
+diff -uwrBN apt-0.7.20.2/apt-pkg/contrib/configuration.cc apt-0.7.20.2-win/apt-pkg/contrib/configuration.cc
+--- apt-0.7.20.2/apt-pkg/contrib/configuration.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/contrib/configuration.cc	2010-03-25 19:31:18.327689200 +0200
+@@ -191,7 +191,9 @@
+    while (Itm->Parent != 0 && Itm->Parent->Value.empty() == false)
+    {	 
+       // Absolute
+-      if (val.length() >= 1 && val[0] == '/')
++      // HH: Windows port
++      if (val.length() >= 3 && val[1] == ':' && val[2] == '/')
++      //if (val.length() >= 1 && val[0] == '/')
+          break;
+ 
+       // ~/foo or ./foo 
+diff -uwrBN apt-0.7.20.2/apt-pkg/contrib/error.cc apt-0.7.20.2-win/apt-pkg/contrib/error.cc
+--- apt-0.7.20.2/apt-pkg/contrib/error.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/contrib/error.cc	2010-03-25 19:31:18.347718400 +0200
+@@ -205,6 +205,22 @@
+ 	 cerr << "W: " << Err << endl;
+    }
+ }
++
++string GlobalError::GetErrorDump()
++{
++   string err;
++   Item *item = List;
++   while (item)
++   {
++      if (item->Error)
++         err += "E: ";
++      else
++	 err += "W: ";
++      err += item->Text + "\n";
++      item = item->Next;
++   }
++   return err;
++}
+ 									/*}}}*/
+ // GlobalError::Discard - Discard									/*{{{*/
+ // ---------------------------------------------------------------------
+diff -uwrBN apt-0.7.20.2/apt-pkg/contrib/error.h apt-0.7.20.2-win/apt-pkg/contrib/error.h
+--- apt-0.7.20.2/apt-pkg/contrib/error.h	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/contrib/error.h	2010-03-25 19:31:18.347718400 +0200
+@@ -87,6 +87,7 @@
+ 
+    // Usefull routine to dump to cerr
+    void DumpErrors();
++   string GetErrorDump();
+    
+    GlobalError();
+ };
+diff -uwrBN apt-0.7.20.2/apt-pkg/contrib/strutl.cc apt-0.7.20.2-win/apt-pkg/contrib/strutl.cc
+--- apt-0.7.20.2/apt-pkg/contrib/strutl.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/contrib/strutl.cc	2010-03-25 19:31:18.417820600 +0200
+@@ -43,7 +43,8 @@
+ {
+   iconv_t cd;
+   const char *inbuf;
+-  char *inptr, *outbuf, *outptr;
++  char *inptr;
++  char *outbuf, *outptr;
+   size_t insize, outsize;
+   
+   cd = iconv_open(codeset, "UTF-8");
+@@ -397,7 +398,9 @@
+    U.Access.clear();
+    
+    // "\x00-\x20{}|\\\\^\\[\\]<>\"\x7F-\xFF";
+-   string NewURI = QuoteString(U,"\\|{}[]<>\"^~_=!@#$%^&*");
++   // HH: Windows port
++   // Added : and ? to quoting
++   string NewURI = QuoteString(U,"\\|{}[]<>\"^~_=!@#$%^&*:?");
+    replace(NewURI.begin(),NewURI.end(),'/','_');
+    return NewURI;
+ }
+@@ -1090,15 +1093,18 @@
+    for (; I < U.end() && *I != ':' ; I++);
+    string::const_iterator FirstColon = I;
+ 
++	 // HH: Windows port
++
+    /* Determine if this is a host type URI with a leading double //
+       and then search for the first single / */
+    string::const_iterator SingleSlash = I;
++   bool InBracket = false;
+    if (I + 3 < U.end() && I[1] == '/' && I[2] == '/')
++   {
+       SingleSlash += 3;
+    
+    /* Find the / indicating the end of the hostname, ignoring /'s in the
+       square brackets */
+-   bool InBracket = false;
+    for (; SingleSlash < U.end() && (*SingleSlash != '/' || InBracket == true); SingleSlash++)
+    {
+       if (*SingleSlash == '[')
+@@ -1106,6 +1112,11 @@
+       if (InBracket == true && *SingleSlash == ']')
+ 	 InBracket = false;
+    }
++	 }
++	 else // single slash?
++	 {
++	 		SingleSlash = I + 1;
++	 }
+    
+    if (SingleSlash > U.end())
+       SingleSlash = U.end();
+@@ -1113,7 +1124,9 @@
+    // We can now write the access and path specifiers
+    Access.assign(U.begin(),FirstColon);
+    if (SingleSlash != U.end())
++   {
+       Path.assign(SingleSlash,U.end());
++   }
+    if (Path.empty() == true)
+       Path = "/";
+ 
+diff -uwrBN apt-0.7.20.2/apt-pkg/deb/debsystem.cc apt-0.7.20.2-win/apt-pkg/deb/debsystem.cc
+--- apt-0.7.20.2/apt-pkg/deb/debsystem.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/deb/debsystem.cc	2010-03-26 11:59:47.087515500 +0200
+@@ -24,7 +24,6 @@
+ #include <errno.h>
+ 									/*}}}*/
+ 
+-debSystem debSys;
+ 
+ // System::debSystem - Constructor					/*{{{*/
+ // ---------------------------------------------------------------------
+diff -uwrBN apt-0.7.20.2/apt-pkg/deb/dpkgpm.cc apt-0.7.20.2-win/apt-pkg/deb/dpkgpm.cc
+--- apt-0.7.20.2/apt-pkg/deb/dpkgpm.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/deb/dpkgpm.cc	2010-03-25 19:31:18.507952000 +0200
+@@ -41,6 +41,33 @@
+ 
+ using namespace std;
+ 
++// HH
++typedef void (*sighandler_t)(int signum);
++
++void* memrchr(void *buffer, int c, size_t n)
++{
++ 	unsigned char *p = reinterpret_cast<unsigned char*>(buffer);
++
++  for (p += n; n ; n--)
++    if (*--p == c)
++      return p;
++  return NULL;
++}
++
++#if defined(__CYGWIN__)
++/* Workaround for Cygwin, which is missing cfmakeraw */
++/* Pasted from man page; added in serial.c arbitrarily */
++void cfmakeraw(struct termios *termios_p)
++{
++    termios_p->c_iflag &= ~(IGNBRK|BRKINT|PARMRK|ISTRIP|INLCR|IGNCR|ICRNL|IXON);
++    termios_p->c_oflag &= ~OPOST;
++    termios_p->c_lflag &= ~(ECHO|ECHONL|ICANON|ISIG|IEXTEN);
++    termios_p->c_cflag &= ~(CSIZE|PARENB);
++    termios_p->c_cflag |= CS8;
++}
++#endif /* defined(__CYGWIN__) */
++
++
+ namespace
+ {
+   // Maps the dpkg "processing" info to human readable names.  Entry 0
+@@ -737,7 +764,9 @@
+       {
+ 	 for (;I != J && Size < MaxArgBytes; I++)
+ 	 {
+-	    if (I->File[0] != '/')
++			// HH: Windows port
++			if (I->File[1] != ':' && I->File[2] != '/')
++		  //if (I->File[0] != '/')
+ 	       return _error->Error("Internal Error, Pathname to install is not absolute '%s'",I->File.c_str());
+ 	    Args[n++] = I->File.c_str();
+ 	    Size += strlen(Args[n-1]);
+@@ -812,7 +841,7 @@
+ 	 if(slave >= 0 && master >= 0) 
+ 	 {
+ 	    setsid();
+-	    ioctl(slave, TIOCSCTTY, 0);
++	    ioctl(slave, /*TIOCSCTTY*/O_NOCTTY, 0);
+ 	    close(master);
+ 	    dup2(slave, 0);
+ 	    dup2(slave, 1);
+diff -uwrBN apt-0.7.20.2/apt-pkg/deb/dpkgpm.h apt-0.7.20.2-win/apt-pkg/deb/dpkgpm.h
+--- apt-0.7.20.2/apt-pkg/deb/dpkgpm.h	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/deb/dpkgpm.h	2010-03-25 19:31:18.507952000 +0200
+@@ -18,6 +18,8 @@
+ using std::vector;
+ using std::map;
+ 
++// HH
++void *memrchr (const void *buffer, int c, size_t n);
+ 
+ class pkgDPkgPM : public pkgPackageManager
+ {
+diff -uwrBN apt-0.7.20.2/apt-pkg/init.cc apt-0.7.20.2-win/apt-pkg/init.cc
+--- apt-0.7.20.2/apt-pkg/init.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/apt-pkg/init.cc	2010-03-26 12:00:25.022820300 +0200
+@@ -17,6 +17,8 @@
+ #include <cstdlib>
+ #include <sys/stat.h>
+ 									/*}}}*/
++#include "deb/debsystem.h"
++debSystem debSys;
+ 
+ #define Stringfy_(x) # x
+ #define Stringfy(x)  Stringfy_(x)
+@@ -116,7 +118,7 @@
+ bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys)
+ {
+    Sys = 0;
+-   string Label = Cnf.Find("Apt::System","");
++	string Label = Cnf.Find("Apt::System","Debian dpkg interface");
+    if (Label.empty() == false)
+    {
+       Sys = pkgSystem::GetSystem(Label.c_str());
+diff -uwrBN apt-0.7.20.2/buildlib/makefile.in apt-0.7.20.2-win/buildlib/makefile.in
+--- apt-0.7.20.2/buildlib/makefile.in	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/buildlib/makefile.in	2010-03-25 19:31:18.778346200 +0200
+@@ -15,7 +15,7 @@
+ .PHONY: headers library clean veryclean all binary program doc \
+         veryclean/local
+ all headers library clean veryclean binary program doc:
+-	$(MAKE) -C $(SRCDIR) -f Makefile $@
++	$(MAKE) -C $(SRCDIR) -f Makefile2 $@
+ 
+ # Purge everything.
+ .PHONY: maintainer-clean dist-clean pristine sanity distclean
+@@ -30,7 +30,7 @@
+ # and run make dirs and have the shims updated.
+ .PHONY: dirs
+ dirs: 
+-	$(MAKE) -C $(SRCDIR) -f Makefile $@
++	$(MAKE) -C $(SRCDIR) -f Makefile2 $@
+ ifeq ($(HAVE_C9X),yes)
+ 	@rm -f include/inttypes.h > /dev/null 2>&1
+ else
+diff -uwrBN apt-0.7.20.2/cmdline/apt-cache.cc apt-0.7.20.2-win/cmdline/apt-cache.cc
+--- apt-0.7.20.2/cmdline/apt-cache.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/cmdline/apt-cache.cc	2010-03-25 19:31:18.838433800 +0200
+@@ -1233,6 +1233,13 @@
+ 
+    // Get a pointer to start of Description field
+    const unsigned char *DescP = (unsigned char*)strstr((char*)Buffer, "Description:");
++   // HH: Bug fix. If descriptions are not found quit now to prevent crash
++   if (DescP == NULL)
++   {
++   		cout << "E: Malformed packages inserted into cache. Description field missing!";
++   		delete [] Buffer;
++   		return false;
++   }
+ 
+    // Write all but Description
+    if (fwrite(Buffer,1,DescP - Buffer,stdout) < (size_t)(DescP - Buffer))
+@@ -1282,6 +1289,12 @@
+ bool Search(CommandLine &CmdL)
+ {
+    pkgCache &Cache = *GCache;
++   // HH: Bug fix. No need to do anything if no packages
++   if (Cache.HeaderP->PackageCount == 0)
++   {
++   		return true;
++   }
++   	
+    bool ShowFull = _config->FindB("APT::Cache::ShowFull",false);
+    bool NamesOnly = _config->FindB("APT::Cache::NamesOnly",false);
+    unsigned NumPatterns = CmdL.FileSize() -1;
+@@ -1316,7 +1329,8 @@
+    }
+    
+    ExDescFile *DFList = new ExDescFile[Cache.HeaderP->PackageCount+1];
+-   memset(DFList,0,sizeof(*DFList)*Cache.HeaderP->PackageCount+1);
++   // HH: Bug fix. Memset all the memory
++   memset(DFList, 0, sizeof(*DFList) * (Cache.HeaderP->PackageCount + 1));
+ 
+    // Map versions that we want to write out onto the VerList array.
+    for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+diff -uwrBN apt-0.7.20.2/configure-cygwin apt-0.7.20.2-win/configure-cygwin
+--- apt-0.7.20.2/configure-cygwin	1970-01-01 02:00:00.000000000 +0200
++++ apt-0.7.20.2-win/configure-cygwin	2010-03-25 19:31:18.898521400 +0200
+@@ -0,0 +1,9 @@
++#! /bin/sh
++
++export CFLAGS="-O2 -march=i686 -s -fomit-frame-pointer"
++export CXXFLAGS="-O2 -march=i686 -s -fomit-frame-pointer"
++export PATH=../dpkg/scripts:$PATH
++export PERL5LIB=../dpkg/scripts
++export DPKG_DATADIR=../dpkg
++./configure
++echo INTLLIBS = -lintl -liconv >> environment.mak
+diff -uwrBN apt-0.7.20.2/ftparchive/apt-ftparchive.cc apt-0.7.20.2-win/ftparchive/apt-ftparchive.cc
+--- apt-0.7.20.2/ftparchive/apt-ftparchive.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/ftparchive/apt-ftparchive.cc	2010-03-25 19:31:20.020156600 +0200
+@@ -729,11 +729,9 @@
+    if (CmdL.FileSize() <= 2)
+    {
+       for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+-	 if (I->GenPackages(Setup,Stats) == false)
+-	    _error->DumpErrors();
++	 			I->GenPackages(Setup,Stats);
+       for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+-	 if (I->GenSources(Setup,SrcStats) == false)
+-	    _error->DumpErrors();
++	 			I->GenSources(Setup,SrcStats);
+    }
+    else
+    {
+@@ -758,7 +756,6 @@
+ 	 delete [] List;
+ 	 return _error->Error(_("No selections matched"));
+       }
+-      _error->DumpErrors();
+       
+       // Do the generation for Packages
+       for (End = List; End->Str != 0; End++)
+@@ -769,8 +766,7 @@
+ 	 PackageMap *I = (PackageMap *)End->UserData;
+ 	 if (I->PkgDone == true)
+ 	    continue;
+-	 if (I->GenPackages(Setup,Stats) == false)
+-	    _error->DumpErrors();
++	 		I->GenPackages(Setup,Stats);
+       }
+       
+       // Do the generation for Sources
+@@ -782,8 +778,7 @@
+ 	 PackageMap *I = (PackageMap *)End->UserData;
+ 	 if (I->SrcDone == true)
+ 	    continue;
+-	 if (I->GenSources(Setup,SrcStats) == false)
+-	    _error->DumpErrors();
++	 		I->GenSources(Setup,SrcStats);
+       }
+       
+       delete [] List;
+@@ -837,9 +832,7 @@
+ 	    continue;
+       }
+       
+-      if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
+-			 MaxContentsChange) == false)
+-	 _error->DumpErrors();
++      I->GenContents(Setup,PkgList.begin(),PkgList.end(), MaxContentsChange);
+       
+       // Hit the limit?
+       if (MaxContentsChange == 0)
+@@ -885,8 +878,7 @@
+    {
+       c0out << I->BinCacheDB << endl;
+       CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+-      if (DB.Clean() == false)
+-	 _error->DumpErrors();
++      DB.Clean();
+       
+       string CacheDB = I->BinCacheDB;
+       for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+diff -uwrBN apt-0.7.20.2/ftparchive/cachedb.cc apt-0.7.20.2-win/ftparchive/cachedb.cc
+--- apt-0.7.20.2/ftparchive/cachedb.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/ftparchive/cachedb.cc	2010-03-25 19:31:20.020156600 +0200
+@@ -23,6 +23,18 @@
+ #include <netinet/in.h>       // htonl, etc
+ 									/*}}}*/
+ 
++// HH
++								
++void* memrchr(void *buffer, int c, size_t n)
++{
++ 	unsigned char *p = reinterpret_cast<unsigned char*>(buffer);
++
++  for (p += n; n ; n--)
++    if (*--p == c)
++      return p;
++  return NULL;
++}
++
+ // CacheDB::ReadyDB - Ready the DB2					/*{{{*/
+ // ---------------------------------------------------------------------
+ /* This opens the DB2 file for caching package information */
+diff -uwrBN apt-0.7.20.2/ftparchive/cachedb.h apt-0.7.20.2-win/ftparchive/cachedb.h
+--- apt-0.7.20.2/ftparchive/cachedb.h	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/ftparchive/cachedb.h	2010-03-25 19:31:20.030171200 +0200
+@@ -23,6 +21,9 @@
+     
+ #include "contents.h"
+     
++// HH
++void *memrchr (const void *buffer, int c, size_t n);
++    
+ class CacheDB
+ {
+    protected:
+diff -uwrBN apt-0.7.20.2/ftparchive/multicompress.cc apt-0.7.20.2-win/ftparchive/multicompress.cc
+--- apt-0.7.20.2/ftparchive/multicompress.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/ftparchive/multicompress.cc	2010-03-25 19:31:20.050200400 +0200
+@@ -178,7 +178,6 @@
+       Child(Pipe[0]);
+       if (_error->PendingError() == true)
+       {
+-	 _error->DumpErrors();
+ 	 _exit(100);
+       }      
+       _exit(0);
+diff -uwrBN apt-0.7.20.2/ftparchive/writer.cc apt-0.7.20.2-win/ftparchive/writer.cc
+--- apt-0.7.20.2/ftparchive/writer.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/ftparchive/writer.cc	2010-03-25 19:31:20.070229600 +0200
+@@ -118,28 +118,15 @@
+    else
+       Owner->DoPackage(File);
+    
+-   if (_error->empty() == false)
+-   {
+-      // Print any errors or warnings found
+-      string Err;
+-      bool SeenPath = false;
+-      while (_error->empty() == false)
++   if (!_error->empty())
+       {
+-	 Owner->NewLine(1);
+-	 
+-	 bool Type = _error->PopMessage(Err);
+-	 if (Type == true)
+-	    cerr << _("E: ") << Err << endl;
+-	 else
+-	    cerr << _("W: ") << Err << endl;
++      Owner->NewLine(1); // is this needed?
+ 	 
+-	 if (Err.find(File) != string::npos)
+-	    SeenPath = true;
+-      }      
+-      
+-      if (SeenPath == false)
+-	 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
+-      return 0;
++      // HH: Show which file is involved in error if it's not found
++      // from previous errors 
++      string errdump = _error->GetErrorDump();
++      if (errdump.find(File) == string::npos)
++	 _error->Error("Errors apply to file '%s'", File);
+    }
+    
+    return 0;
+@@ -320,8 +307,6 @@
+ 
+    if (ExtOverrides.empty() == false)
+       Over.ReadExtraOverride(ExtOverrides);
+-
+-   _error->DumpErrors();
+ }
+                                                                         /*}}}*/
+ // FTWScanner::SetExts - Set extensions to support                      /*{{{*/
+@@ -794,7 +779,6 @@
+       if (_error->empty() == false)
+       {
+ 	 _error->Error("Errors apply to file '%s'",File.c_str());
+-	 _error->DumpErrors();
+       }
+    }
+    
+diff -uwrBN apt-0.7.20.2/methods/connect.cc apt-0.7.20.2-win/methods/connect.cc
+--- apt-0.7.20.2/methods/connect.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/connect.cc	2010-03-25 19:31:20.110288000 +0200
+@@ -107,7 +107,7 @@
+ 
+    // Check the socket for an error condition
+    unsigned int Err;
+-   unsigned int Len = sizeof(Err);
++   socklen_t Len = sizeof(Err);
+    if (getsockopt(Fd,SOL_SOCKET,SO_ERROR,&Err,&Len) != 0)
+       return _error->Errno("getsockopt",_("Failed"));
+    
+diff -uwrBN apt-0.7.20.2/methods/copy.cc apt-0.7.20.2-win/methods/copy.cc
+--- apt-0.7.20.2/methods/copy.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/copy.cc	2010-03-25 19:31:20.160361000 +0200
+@@ -19,6 +19,7 @@
+ #include <unistd.h>
+ #include <apti18n.h>
+ 									/*}}}*/
++#include <iostream>
+ 
+ class CopyMethod : public pkgAcqMethod
+ {
+diff -uwrBN apt-0.7.20.2/methods/ftp.cc apt-0.7.20.2-win/methods/ftp.cc
+--- apt-0.7.20.2/methods/ftp.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/ftp.cc	2010-03-25 19:31:20.230463200 +0200
+@@ -112,7 +112,7 @@
+    Close();
+    
+    // Determine the proxy setting
+-   if (getenv("ftp_proxy") == 0)
++   if (getenv("FTP_PROXY") == 0)
+    {
+       string DefProxy = _config->Find("Acquire::ftp::Proxy");
+       string SpecificProxy = _config->Find("Acquire::ftp::Proxy::" + ServerName.Host);
+@@ -127,12 +127,12 @@
+ 	 Proxy = DefProxy;
+    }
+    else
+-      Proxy = getenv("ftp_proxy");
++      Proxy = getenv("FTP_PROXY");
+    
+    // Parse no_proxy, a , separated list of domains
+-   if (getenv("no_proxy") != 0)
++   if (getenv("NO_PROXY") != 0)
+    {
+-      if (CheckDomainList(ServerName.Host,getenv("no_proxy")) == true)
++      if (CheckDomainList(ServerName.Host,getenv("NO_PROXY")) == true)
+ 	 Proxy = "";
+    }
+    
+@@ -697,7 +697,7 @@
+       if (WaitFd(DataFd,true,TimeOut) == false)
+ 	 return _error->Error(_("Could not connect data socket, connection timed out"));
+       unsigned int Err;
+-      unsigned int Len = sizeof(Err);
++      socklen_t Len = sizeof(Err);
+       if (getsockopt(DataFd,SOL_SOCKET,SO_ERROR,&Err,&Len) != 0)
+ 	 return _error->Errno("getsockopt",_("Failed"));
+       if (Err != 0)
+@@ -1090,16 +1090,16 @@
+ 
+    /* See if we should be come the http client - we do this for http
+       proxy urls */
+-   if (getenv("ftp_proxy") != 0)
++   if (getenv("FTP_PROXY") != 0)
+    {
+-      URI Proxy = string(getenv("ftp_proxy"));
++      URI Proxy = string(getenv("FTP_PROXY"));
+       
+       // Run the HTTP method
+       if (Proxy.Access == "http")
+       {
+ 	 // Copy over the environment setting
+ 	 char S[300];
+-	 snprintf(S,sizeof(S),"http_proxy=%s",getenv("ftp_proxy"));
++	 snprintf(S,sizeof(S),"HTTP_PROXY=%s",getenv("FTP_PROXY"));
+ 	 putenv(S);
+ 	 putenv((char *)"no_proxy=");
+ 	 
+diff -uwrBN apt-0.7.20.2/methods/gpgv.cc apt-0.7.20.2-win/methods/gpgv.cc
+--- apt-0.7.20.2/methods/gpgv.cc	2009-04-20 22:46:30.000000000 +0300
++++ apt-0.7.20.2-win/methods/gpgv.cc	2010-03-25 19:31:20.310580000 +0200
+@@ -88,7 +88,7 @@
+ 
+       Args[i++] = gpgvpath.c_str();
+       Args[i++] = "--status-fd";
+-      Args[i++] = "3";
++      Args[i++] = "2";
+       Args[i++] = "--ignore-time-conflict";
+       Args[i++] = "--keyring";
+       Args[i++] = pubringpath.c_str();
+@@ -124,9 +124,11 @@
+       close(fd[0]);
+       // Redirect output to /dev/null; we read from the status fd
+       dup2(nullfd, STDOUT_FILENO); 
+-      dup2(nullfd, STDERR_FILENO); 
+-      // Redirect the pipe to the status fd (3)
+-      dup2(fd[1], 3);
++      //dup2(nullfd, STDERR_FILENO); 
++      // Windows port: File descriptor 3 is not available so we
++      // have to use STDERR
++      // Redirect the pipe to the status fd (2)
++      dup2(fd[1], 2);
+ 
+       putenv((char *)"LANG=");
+       putenv((char *)"LC_ALL=");
+@@ -241,7 +243,10 @@
+ bool GPGVMethod::Fetch(FetchItem *Itm)
+ {
+    URI Get = Itm->Uri;
+-   string Path = Get.Host + Get.Path; // To account for relative paths
++   // HH: Windows port. Not sure if this is needed, so removing because host
++   // adds drive to path two times
++   //string Path = Get.Host + Get.Path; // To account for relative paths
++   string Path = Get.Path;
+    string keyID;
+    vector<string> GoodSigners;
+    vector<string> BadSigners;
+diff -uwrBN apt-0.7.20.2/methods/gzip.cc apt-0.7.20.2-win/methods/gzip.cc
+--- apt-0.7.20.2/methods/gzip.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/gzip.cc	2010-03-26 12:01:49.596117300 +0200
+@@ -41,7 +42,10 @@
+ bool GzipMethod::Fetch(FetchItem *Itm)
+ {
+    URI Get = Itm->Uri;
+-   string Path = Get.Host + Get.Path; // To account for relative paths
++   // HH: Windows port. Not sure if this is needed, so removing because host
++   // adds drive to path two times
++   //string Path = Get.Host + Get.Path; // To account for relative paths
++   string Path = Get.Path;
+    
+    string GzPathOption = "Dir::bin::"+string(Prog);
+ 
+diff -uwrBN apt-0.7.20.2/methods/http.cc apt-0.7.20.2-win/methods/http.cc
+--- apt-0.7.20.2/methods/http.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/http.cc	2010-03-25 19:31:20.320594600 +0200
+@@ -309,7 +309,7 @@
+    Persistent = true;
+    
+    // Determine the proxy setting
+-   if (getenv("http_proxy") == 0)
++   if (getenv("HTTP_PROXY") == 0)
+    {
+       string DefProxy = _config->Find("Acquire::http::Proxy");
+       string SpecificProxy = _config->Find("Acquire::http::Proxy::" + ServerName.Host);
+@@ -324,12 +324,12 @@
+ 	 Proxy = DefProxy;
+    }
+    else
+-      Proxy = getenv("http_proxy");
++      Proxy = getenv("HTTP_PROXY");
+    
+    // Parse no_proxy, a , separated list of domains
+-   if (getenv("no_proxy") != 0)
++   if (getenv("NO_PROXY") != 0)
+    {
+-      if (CheckDomainList(ServerName.Host,getenv("no_proxy")) == true)
++      if (CheckDomainList(ServerName.Host,getenv("NO_PROXY")) == true)
+ 	 Proxy = "";
+    }
+    
+diff -uwrBN apt-0.7.20.2/methods/https.cc apt-0.7.20.2-win/methods/https.cc
+--- apt-0.7.20.2/methods/https.cc	2009-02-07 17:09:35.000000000 +0200
++++ apt-0.7.20.2-win/methods/https.cc	2010-03-25 19:31:20.330609200 +0200
+@@ -61,7 +61,7 @@
+    URI ServerName = Queue->Uri;
+ 
+    // Determine the proxy setting
+-   if (getenv("http_proxy") == 0)
++   if (getenv("HTTP_PROXY") == 0)
+    {
+       string DefProxy = _config->Find("Acquire::http::Proxy");
+       string SpecificProxy = _config->Find("Acquire::http::Proxy::" + ServerName.Host);
+@@ -77,9 +77,9 @@
+    }
+    
+    // Parse no_proxy, a , separated list of domains
+-   if (getenv("no_proxy") != 0)
++   if (getenv("NO_PROXY") != 0)
+    {
+-      if (CheckDomainList(ServerName.Host,getenv("no_proxy")) == true)
++      if (CheckDomainList(ServerName.Host,getenv("NO_PROXY")) == true)
+ 	 Proxy = "";
+    }
+    
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/patches/windows/dpkg-win.patch	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,1076 @@
+diff -uwrBN dpkg-1.14.23/configure-cygwin dpkg-1.14.23-win/configure-cygwin
+--- dpkg-1.14.23/configure-cygwin	1970-01-01 02:00:00.000000000 +0200
++++ dpkg-1.14.23-win/configure-cygwin	2010-03-25 19:31:22.403631400 +0200
+@@ -0,0 +1,2 @@
++export CFLAGS="-O2 -march=i686 -s -fomit-frame-pointer"
++./configure --with-selinux=no --without-start-stop-daemon
+diff -uwrBN dpkg-1.14.23/configure-cygwin-profile dpkg-1.14.23-win/configure-cygwin-profile
+--- dpkg-1.14.23/configure-cygwin-profile	1970-01-01 02:00:00.000000000 +0200
++++ dpkg-1.14.23-win/configure-cygwin-profile	2010-03-25 19:31:22.413646000 +0200
+@@ -0,0 +1,2 @@
++export CFLAGS="-g -pg"
++./configure --with-selinux=no --without-start-stop-daemon
+diff -uwrBN dpkg-1.14.23/debian/dpkg.postinst dpkg-1.14.23-win/debian/dpkg.postinst
+--- dpkg-1.14.23/debian/dpkg.postinst	2008-11-14 08:54:01.000000000 +0200
++++ dpkg-1.14.23-win/debian/dpkg.postinst	2010-03-25 19:31:22.483748200 +0200
+@@ -61,7 +61,7 @@
+     logfile=/var/log/dpkg.log
+     touch $logfile
+     chmod 640 $logfile
+-    chown root:adm $logfile 2>/dev/null || chown 0:4 $logfile
++    chown root:root $logfile 2>/dev/null || chown 0:4 $logfile
+ }
+ 
+ 
+diff -uwrBN dpkg-1.14.23/dpkg-deb/Makefile.am dpkg-1.14.23-win/dpkg-deb/Makefile.am
+--- dpkg-1.14.23/dpkg-deb/Makefile.am	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/dpkg-deb/Makefile.am	2010-03-25 19:31:22.543835800 +0200
+@@ -17,5 +17,5 @@
+ 	main.c
+ 
+ dpkg_deb_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) ../lib/libdpkg.a $(ZLIB_LIBS) $(BZ2_LIBS) $(SELINUX_LIBS)
+diff -uwrBN dpkg-1.14.23/dpkg-deb/Makefile.in dpkg-1.14.23-win/dpkg-deb/Makefile.in
+--- dpkg-1.14.23/dpkg-deb/Makefile.in	2008-11-18 13:19:14.000000000 +0200
++++ dpkg-1.14.23-win/dpkg-deb/Makefile.in	2010-03-25 19:31:22.553850400 +0200
+@@ -58,7 +58,7 @@
+ 	main.$(OBJEXT)
+ dpkg_deb_OBJECTS = $(am_dpkg_deb_OBJECTS)
+ am__DEPENDENCIES_1 =
+-dpkg_deb_DEPENDENCIES = ../libcompat/libcompat.a $(am__DEPENDENCIES_1) \
++dpkg_deb_DEPENDENCIES = ../libcompat/obstack.o $(am__DEPENDENCIES_1) \
+ 	../lib/libdpkg.a $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
+ 	$(am__DEPENDENCIES_1)
+ DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
+@@ -213,7 +213,7 @@
+ 	main.c
+ 
+ dpkg_deb_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) ../lib/libdpkg.a $(ZLIB_LIBS) $(BZ2_LIBS) $(SELINUX_LIBS)
+ 
+ all: all-am
+diff -uwrBN dpkg-1.14.23/dpkg-deb/extract.c dpkg-1.14.23-win/dpkg-deb/extract.c
+--- dpkg-1.14.23/dpkg-deb/extract.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/dpkg-deb/extract.c	2010-03-25 19:31:22.563865000 +0200
+@@ -243,50 +243,58 @@
+ 
+   }
+ 
++  // Why flushing??
+   safe_fflush(ar);
+ 
+-  if (oldformat) {
+-    if (admininfo) {
+-      m_pipe(p1);
+-      if (!(c1= m_fork())) {
+-        close(p1[0]);
+-	pi = fdopen(p1[1], "w");
+-	if (!pi)
+-	  ohshite(_("failed to open pipe descriptor `1' in paste"));
+-        errno=0; if (fwrite(ctrlarea,1,ctrllennum,pi) != ctrllennum)
+-          ohshit(_("failed to write to gzip -dc"));
+-        if (fclose(pi)) ohshit(_("failed to close gzip -dc"));
+-        exit(0);
+-      }
+-      close(p1[1]);
+-      readfromfd= p1[0];
+-    } else {
+-      if (lseek(fileno(ar),l+strlen(ctrllenbuf)+ctrllennum,SEEK_SET) == -1)
+-        ohshite(_("failed to syscall lseek to files archive portion"));
+-      c1= -1;
+-      readfromfd= fileno(ar);
+-    }
+-  } else {
+-    m_pipe(p1);
+-    if (!(c1= m_fork())) {
+-      close(p1[0]);
+-      stream_fd_copy(ar, p1[1], memberlen, _("failed to write to pipe in copy"));
+-      if (close(p1[1]) == EOF) ohshite(_("failed to close pipe in copy"));
+-      exit(0);
+-    }
+-    close(p1[1]);
+-    readfromfd= p1[0];
+-  }
+-  
++//  if (oldformat) {
++//    if (admininfo) {
++//      m_pipe(p1);
++//      if (!(c1= m_fork())) {
++//        close(p1[0]);
++//	pi = fdopen(p1[1], "w");
++//	if (!pi)
++//	  ohshite(_("failed to open pipe descriptor `1' in paste"));
++//        errno=0; if (fwrite(ctrlarea,1,ctrllennum,pi) != ctrllennum)
++//          ohshit(_("failed to write to gzip -dc"));
++//        if (fclose(pi)) ohshit(_("failed to close gzip -dc"));
++//        exit(0);
++//      }
++//      close(p1[1]);
++//      readfromfd= p1[0];
++//    } else {
++//      if (lseek(fileno(ar),l+strlen(ctrllenbuf)+ctrllennum,SEEK_SET) == -1)
++//        ohshite(_("failed to syscall lseek to files archive portion"));
++//      c1= -1;
++//      readfromfd= fileno(ar);
++//    }
++//  }
++//  else {
++//    m_pipe(p1);
++//    if (!(c1= m_fork())) {
++//      close(p1[0]);
++//      stream_fd_copy(ar, p1[1], memberlen, _("failed to write to pipe in copy"));
++//      if (close(p1[1]) == EOF) ohshite(_("failed to close pipe in copy"));
++//      exit(0);
++//    }
++//    close(p1[1]);
++//    readfromfd= p1[0];
++//  }
++  
++  //decompress_cat2(compress_type, debar, taroption, _("data"));
++  
++  if (!taroption)
++    decompress_cat(compress_type, fileno(ar), 1, _("data"));
++  else
++  {
+   if (taroption) m_pipe(p2);
+   
+   if (!(c2= m_fork())) {
+-    m_dup2(readfromfd,0);
+-    if (admininfo) close(p1[0]);
++      //m_dup2(readfromfd,0);
++      //if (admininfo) close(p1[0]);
+     if (taroption) { m_dup2(p2[1],1); close(p2[0]); close(p2[1]); }
+-    decompress_cat(compress_type, 0, 1, _("data"));
++      decompress_cat(compress_type, fileno(ar), 1, _("data"));
+   }
+-  if (readfromfd != fileno(ar)) close(readfromfd);
++    //if (readfromfd != fileno(ar)) close(readfromfd);
+   if (taroption) close(p2[1]);
+ 
+   if (taroption && directory) {
+@@ -316,7 +324,9 @@
+   }
+   
+   waitsubproc(c2,"<decompress>",PROCPIPE);
+-  if (c1 != -1) waitsubproc(c1,"paste",0);
++  //  if (c1 != -1) waitsubproc(c1,"paste",0);
++  }
++  
+   if (oldformat && admininfo) {
+     if (versionnum == 0.931F) {
+       movecontrolfiles(OLDOLDDEBDIR);
+diff -uwrBN dpkg-1.14.23/dpkg-split/Makefile.am dpkg-1.14.23-win/dpkg-split/Makefile.am
+--- dpkg-1.14.23/dpkg-split/Makefile.am	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/dpkg-split/Makefile.am	2010-03-25 19:31:22.573879600 +0200
+@@ -19,7 +19,7 @@
+ 	split.c
+ 
+ dpkg_split_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+diff -uwrBN dpkg-1.14.23/dpkg-split/Makefile.in dpkg-1.14.23-win/dpkg-split/Makefile.in
+--- dpkg-1.14.23/dpkg-split/Makefile.in	2008-11-18 13:19:15.000000000 +0200
++++ dpkg-1.14.23-win/dpkg-split/Makefile.in	2010-03-25 19:31:22.583894200 +0200
+@@ -59,7 +59,7 @@
+ 	queue.$(OBJEXT) split.$(OBJEXT)
+ dpkg_split_OBJECTS = $(am_dpkg_split_OBJECTS)
+ am__DEPENDENCIES_1 =
+-dpkg_split_DEPENDENCIES = ../libcompat/libcompat.a \
++dpkg_split_DEPENDENCIES = ../libcompat/obstack.o \
+ 	$(am__DEPENDENCIES_1) ../lib/libdpkg.a
+ pkglibSCRIPT_INSTALL = $(INSTALL_SCRIPT)
+ SCRIPTS = $(pkglib_SCRIPTS)
+@@ -217,7 +217,7 @@
+ 	split.c
+ 
+ dpkg_split_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+diff -uwrBN dpkg-1.14.23/lib/compression.c dpkg-1.14.23-win/lib/compression.c
+--- dpkg-1.14.23/lib/compression.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/lib/compression.c	2010-03-25 19:31:23.445149800 +0200
+@@ -48,7 +48,8 @@
+     case compress_type_gzip:
+ #ifdef WITH_ZLIB
+       {
+-        char buffer[4096];
++        //char buffer[4096];
++        char buffer[32768];
+         int actualread;
+         gzFile gzfile = gzdopen(fd_in, "r");
+         while ((actualread= gzread(gzfile,buffer,sizeof(buffer))) > 0) {
+diff -uwrBN dpkg-1.14.23/lib/dbmodify.c dpkg-1.14.23-win/lib/dbmodify.c
+--- dpkg-1.14.23/lib/dbmodify.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/lib/dbmodify.c	2010-03-25 19:31:23.495222800 +0200
+@@ -153,8 +153,8 @@
+   switch (readwritereq) {
+   case msdbrw_needsuperuser:
+   case msdbrw_needsuperuserlockonly:
+-    if (getuid() || geteuid())
+-      ohshit(_("requested operation requires superuser privilege"));
++    /*if (getuid() || geteuid())
++      ohshit(_("requested operation requires superuser privilege"));*/
+     /* fall through */
+   case msdbrw_write: case msdbrw_writeifposs:
+     if (access(adir,W_OK)) {
+diff -uwrBN dpkg-1.14.23/lib/dump.c dpkg-1.14.23-win/lib/dump.c
+--- dpkg-1.14.23/lib/dump.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/lib/dump.c	2010-03-25 19:31:23.525266600 +0200
+@@ -367,9 +367,9 @@
+   strcpy(newfn,filename); strcat(newfn,NEWDBEXT);
+   varbufinit(&vb);
+ 
+-  old_umask = umask(022);
++  //old_umask = umask(022);
+   file= fopen(newfn,"w");
+-  umask(old_umask);
++  //umask(old_umask);
+   if (!file) ohshite(_("failed to open `%s' for writing %s information"),filename,which);
+   
+   if (setvbuf(file,writebuf,_IOFBF,sizeof(writebuf)))
+@@ -402,6 +402,29 @@
+   if (link(filename,oldfn) && errno != ENOENT)
+     ohshite(_("failed to link `%.250s' to `%.250s' for backup of %s info"),
+             filename, oldfn, which);
++  
++  // HH: Windows port.
++  // For some reason you cannot rename file to replace existing file
++  // even if you delete to be replaced file before trying.
++  // File is not for some reason deleted right away, but after program termination.
++//  FILE *ifile = fopen(newfn, "r");
++//  fseek(ifile, 0, SEEK_END);
++//  long FileLen = ftell(ifile);
++//  rewind(ifile);
++//  
++//  char *FileBuf = malloc(FileLen);
++//  fread(FileBuf, FileLen, 1, ifile);
++//  fclose(ifile);
++//  
++//  FILE *ofile = fopen(filename, "w");
++//  fwrite(FileBuf, 1, FileLen, ofile);
++//  free(FileBuf);
++//  fclose(ofile);
++//  
++//  unlink(newfn);
++  
++  //unlink(filename);
++  //sleep(5);
+   if (rename(newfn,filename))
+     ohshite(_("failed to install `%.250s' as `%.250s' containing %s info"),
+             newfn, filename, which);
+diff -uwrBN dpkg-1.14.23/lib/mlib.c dpkg-1.14.23-win/lib/mlib.c
+--- dpkg-1.14.23/lib/mlib.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/lib/mlib.c	2010-03-25 19:31:23.705529400 +0200
+@@ -114,7 +114,8 @@
+ }
+ 
+ void m_pipe(int *fds) {
+-  if (!pipe(fds)) return;
++  //if (!pipe(fds)) return;
++  if (!_pipe(fds, 65536 * 32, 0)) return;
+   onerr_abort++;
+   ohshite(_("failed to create pipe"));
+ }
+@@ -313,7 +314,8 @@
+ off_t buffer_copy(buffer_data_t read_data, buffer_data_t write_data, off_t limit, const char *desc) {
+   char *buf, *writebuf;
+   long bytesread= 0, byteswritten= 0;
+-  int bufsize= 32768;
++  //int bufsize= 32768;
++  int bufsize= 32768 * 2;
+   off_t totalread= 0, totalwritten= 0;
+   if((limit!=-1) && (limit < bufsize)) bufsize= limit;
+   if(bufsize == 0)
+diff -uwrBN dpkg-1.14.23/lib/triglib.c dpkg-1.14.23-win/lib/triglib.c
+--- dpkg-1.14.23/lib/triglib.c	2008-11-18 12:57:33.000000000 +0200
++++ dpkg-1.14.23-win/lib/triglib.c	2010-03-25 19:31:23.785646200 +0200
+@@ -729,10 +729,10 @@
+ 			if (errno != EEXIST)
+ 				ohshite(_("unable to create triggers state"
+ 				          " directory `%.250s'"), triggersdir);
+-		} else if (chown(triggersdir, 0, 0)) {
+-			ohshite(_("unable to set ownership of triggers state"
+-			          " directory `%.250s'"), triggersdir);
+-		}
++			} //else if (chown(triggersdir, 0, 0)) {
++//			ohshite(_("unable to set ownership of triggers state"
++//			          " directory `%.250s'"), triggersdir);
++//		}
+ 		ur = trigdef_update_start(tduf, admindir);
+ 	}
+ 	switch (ur) {
+diff -uwrBN dpkg-1.14.23/ostable dpkg-1.14.23-win/ostable
+--- dpkg-1.14.23/ostable	2008-11-14 08:54:01.000000000 +0200
++++ dpkg-1.14.23-win/ostable	2010-03-25 19:31:25.588274200 +0200
+@@ -25,3 +25,4 @@
+ bsd-netbsd	netbsd		netbsd[^-]*
+ bsd-openbsd	openbsd		openbsd[^-]*
+ sysv-solaris	solaris		solaris[^-]*
++pc-cygwin	cygwin		pc-cygwin
+diff -uwrBN dpkg-1.14.23/src/Makefile.am dpkg-1.14.23-win/src/Makefile.am
+--- dpkg-1.14.23/src/Makefile.am	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/Makefile.am	2010-03-25 19:31:29.133442600 +0200
+@@ -28,7 +28,7 @@
+ 	update.c
+ 
+ dpkg_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) ../lib/libdpkg.a $(ZLIB_LIBS) $(BZ2_LIBS) $(SELINUX_LIBS)
+ 
+ dpkg_query_SOURCES = \
+@@ -37,7 +37,7 @@
+ 	query.c
+ 
+ dpkg_query_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+@@ -45,7 +45,7 @@
+ 	trigcmd.c
+ 
+ dpkg_trigger_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+diff -uwrBN dpkg-1.14.23/src/Makefile.in dpkg-1.14.23-win/src/Makefile.in
+--- dpkg-1.14.23/src/Makefile.in	2008-11-18 13:19:15.000000000 +0200
++++ dpkg-1.14.23-win/src/Makefile.in	2010-03-25 19:31:29.143457200 +0200
+@@ -62,17 +62,17 @@
+ 	update.$(OBJEXT)
+ dpkg_OBJECTS = $(am_dpkg_OBJECTS)
+ am__DEPENDENCIES_1 =
+-dpkg_DEPENDENCIES = ../libcompat/libcompat.a $(am__DEPENDENCIES_1) \
++dpkg_DEPENDENCIES = ../libcompat/obstack.o $(am__DEPENDENCIES_1) \
+ 	../lib/libdpkg.a $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
+ 	$(am__DEPENDENCIES_1)
+ am_dpkg_query_OBJECTS = errors.$(OBJEXT) filesdb.$(OBJEXT) \
+ 	query.$(OBJEXT)
+ dpkg_query_OBJECTS = $(am_dpkg_query_OBJECTS)
+-dpkg_query_DEPENDENCIES = ../libcompat/libcompat.a \
++dpkg_query_DEPENDENCIES = ../libcompat/obstack.o \
+ 	$(am__DEPENDENCIES_1) ../lib/libdpkg.a
+ am_dpkg_trigger_OBJECTS = trigcmd.$(OBJEXT)
+ dpkg_trigger_OBJECTS = $(am_dpkg_trigger_OBJECTS)
+-dpkg_trigger_DEPENDENCIES = ../libcompat/libcompat.a \
++dpkg_trigger_DEPENDENCIES = ../libcompat/obstack.o \
+ 	$(am__DEPENDENCIES_1) ../lib/libdpkg.a
+ DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
+ depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+@@ -239,7 +239,7 @@
+ 	update.c
+ 
+ dpkg_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) ../lib/libdpkg.a $(ZLIB_LIBS) $(BZ2_LIBS) $(SELINUX_LIBS)
+ 
+ dpkg_query_SOURCES = \
+@@ -248,7 +248,7 @@
+ 	query.c
+ 
+ dpkg_query_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+@@ -256,7 +256,7 @@
+ 	trigcmd.c
+ 
+ dpkg_trigger_LDADD = \
+-	../libcompat/libcompat.a \
++	../libcompat/obstack.o \
+ 	$(LIBINTL) \
+ 	../lib/libdpkg.a
+ 
+diff -uwrBN dpkg-1.14.23/src/archives.c dpkg-1.14.23-win/src/archives.c
+--- dpkg-1.14.23/src/archives.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/archives.c	2010-03-25 19:31:29.143457200 +0200
+@@ -267,12 +267,12 @@
+ }
+ 
+ static void newtarobject_allmodes(const char *path, struct TarInfo *ti, struct filestatoverride* statoverride) {
+-  if (chown(path,
+-	    statoverride ? statoverride->uid : ti->UserID,
+-	    statoverride ? statoverride->gid : ti->GroupID))
+-    ohshite(_("error setting ownership of `%.255s'"),ti->Name);
+-  if (chmod(path,(statoverride ? statoverride->mode : ti->Mode) & ~S_IFMT))
+-    ohshite(_("error setting permissions of `%.255s'"),ti->Name);
++//  if (chown(path,
++//	    statoverride ? statoverride->uid : ti->UserID,
++//	    statoverride ? statoverride->gid : ti->GroupID))
++//    ohshite(_("error setting ownership of `%.255s'"),ti->Name);
++//  if (chmod(path,(statoverride ? statoverride->mode : ti->Mode) & ~S_IFMT))
++//    ohshite(_("error setting permissions of `%.255s'"),ti->Name);
+   newtarobject_utime(path,ti);
+ }
+ 
+@@ -446,17 +446,18 @@
+      * backup/restore operation and were rudely interrupted.
+      * So, we see if we have .dpkg-tmp, and if so we restore it.
+      */
+-    if (rename(fnametmpvb.buf,fnamevb.buf)) {
+-      if (errno != ENOENT && errno != ENOTDIR)
+-        ohshite(_("unable to clean up mess surrounding `%.255s' before "
+-                "installing another version"),ti->Name);
++    // HH: Why should we restore the file if we are going to overwrite it?
++//    if (rename(fnametmpvb.buf,fnamevb.buf)) {
++//      if (errno != ENOENT && errno != ENOTDIR)
++//        ohshite(_("unable to clean up mess surrounding `%.255s' before "
++//                "installing another version"),ti->Name);
+       debug(dbg_eachfiledetail,"tarobject nonexistent");
+-    } else {
+-      debug(dbg_eachfiledetail,"tarobject restored tmp to main");
+-      statr= lstat(fnamevb.buf,&stab);
+-      if (statr) ohshite(_("unable to stat restored `%.255s' before installing"
+-                         " another version"), ti->Name);
+-    }
++//    } else {
++//      debug(dbg_eachfiledetail,"tarobject restored tmp to main");
++//      statr= lstat(fnamevb.buf,&stab);
++//      if (statr) ohshite(_("unable to stat restored `%.255s' before installing"
++//                         " another version"), ti->Name);
++//    }
+   } else {
+     debug(dbg_eachfiledetail,"tarobject already exists");
+   }
+@@ -579,8 +580,12 @@
+   /* Now, at this stage we want to make sure neither of .dpkg-new and .dpkg-tmp
+    * are hanging around.
+    */
++  // HH: These could be optimized away if we are sure we are in clean state
++  //if (!statr)
++  //{
+   ensure_pathname_nonexisting(fnamenewvb.buf);
+   ensure_pathname_nonexisting(fnametmpvb.buf);
++  //}
+ 
+   if (existingdirectory) return 0;
+   if (keepexisting) {
+@@ -642,6 +647,12 @@
+   }
+ #endif /* WITH_SELINUX */
+ 
++  // HH: Optimization: Do not create new file if file does not exist already
++  char* fnamenewvbbuf_old = fnamenewvb.buf;
++  if (statr)
++  {
++    fnamenewvb.buf = fnamevb.buf;
++  }
+ 
+   /* Extract whatever it is as .dpkg-new ... */
+   switch (ti->Type) {
+@@ -649,14 +660,19 @@
+     /* We create the file with mode 0 to make sure nobody can do anything with
+      * it until we apply the proper mode, which might be a statoverride.
+      */
++    debug(dbg_eachfiledetail,"Opening new file");
+     fd= open(fnamenewvb.buf, (O_CREAT|O_EXCL|O_WRONLY), 0);
+     if (fd < 0) ohshite(_("unable to create `%.255s'"),ti->Name);
+     push_cleanup(cu_closefd, ehflag_bombout, NULL, 0, 1, &fd);
+     debug(dbg_eachfiledetail,"tarobject NormalFile[01] open size=%lu",
+           (unsigned long)ti->Size);
++    debug(dbg_eachfiledetail,"fd fd copy");
++    //SetFileValidData(fd, ti->Size);
+     { char fnamebuf[256];
+     fd_fd_copy(tc->backendpipe, fd, ti->Size, _("backend dpkg-deb during `%.255s'"),quote_filename(fnamebuf,256,ti->Name));
++    //fd_null_copy(tc->backendpipe, ti->Size, _("backend dpkg-deb during `%.255s'"),quote_filename(fnamebuf,256,ti->Name));
+     }
++    debug(dbg_eachfiledetail,"safe read");
+     r= ti->Size % TARBLKSZ;
+     if (r > 0) r= safe_read(tc->backendpipe,databuf,TARBLKSZ - r);
+     if (nifd->namenode->statoverride) 
+@@ -664,17 +680,20 @@
+ 			  nifd->namenode->statoverride->uid,
+ 			  nifd->namenode->statoverride->gid,
+ 			  nifd->namenode->statoverride->mode);
+-    if (fchown(fd,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
+-      ohshite(_("error setting ownership of `%.255s'"),ti->Name);
++//    if (fchown(fd,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
++//      ohshite(_("error setting ownership of `%.255s'"),ti->Name);
+     am=(nifd->namenode->statoverride ? nifd->namenode->statoverride->mode : ti->Mode) & ~S_IFMT;
+-    if (fchmod(fd,am))
+-      ohshite(_("error setting permissions of `%.255s'"),ti->Name);
++//    if (fchmod(fd,am))
++//      ohshite(_("error setting permissions of `%.255s'"),ti->Name);
+     pop_cleanup(ehflag_normaltidy); /* fd= open(fnamenewvb.buf) */
++    debug(dbg_eachfiledetail,"closing...");
+     if (close(fd))
+       ohshite(_("error closing/writing `%.255s'"),ti->Name);
++    debug(dbg_eachfiledetail,"utime");
+     newtarobject_utime(fnamenewvb.buf,ti);
++    debug(dbg_eachfiledetail,"end");
+     break;
+   case FIFO:
+     if (mkfifo(fnamenewvb.buf,0))
+@@ -708,23 +727,23 @@
+     if (symlink(ti->LinkName,fnamenewvb.buf))
+       ohshite(_("error creating symbolic link `%.255s'"),ti->Name);
+     debug(dbg_eachfiledetail,"tarobject SymbolicLink creating");
+-#ifdef HAVE_LCHOWN
+-    if (lchown(fnamenewvb.buf,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
+-      ohshite(_("error setting ownership of symlink `%.255s'"),ti->Name);
+-#else
+-    if (chown(fnamenewvb.buf,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
+-	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
+-      ohshite(_("error setting ownership of symlink `%.255s'"),ti->Name);
+-#endif
++//#ifdef HAVE_LCHOWN
++//    if (lchown(fnamenewvb.buf,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
++//      ohshite(_("error setting ownership of symlink `%.255s'"),ti->Name);
++//#else
++//    if (chown(fnamenewvb.buf,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->uid : ti->UserID,
++//	    nifd->namenode->statoverride ? nifd->namenode->statoverride->gid : ti->GroupID))
++//      ohshite(_("error setting ownership of symlink `%.255s'"),ti->Name);
++//#endif
+     break;
+   case Directory:
+     /* We've already checked for an existing directory. */
+     if (mkdir(fnamenewvb.buf,0))
+       ohshite(_("error creating directory `%.255s'"),ti->Name);
+-    debug(dbg_eachfiledetail,"tarobject Directory creating");
++    debug(dbg_eachfiledetail,"tarobject Directory creating '%.255s'", fnamenewvb.buf);
+     newtarobject_allmodes(fnamenewvb.buf,ti,nifd->namenode->statoverride);
+     break;
+   default:
+@@ -772,15 +791,15 @@
+       symlinkfn.used= r; varbufaddc(&symlinkfn,0);
+       if (symlink(symlinkfn.buf,fnametmpvb.buf))
+         ohshite(_("unable to make backup symlink for `%.255s'"),ti->Name);
+-#ifdef HAVE_LCHOWN
+-      if (lchown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
+-        ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
+-#else
+-      if (chown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
+-        ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
+-#endif
++//#ifdef HAVE_LCHOWN
++//      if (lchown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
++//        ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
++//#else
++//      if (chown(fnametmpvb.buf,stab.st_uid,stab.st_gid))
++//        ohshite(_("unable to chown backup symlink for `%.255s'"),ti->Name);
++//#endif
+     } else {
+-      debug(dbg_eachfiledetail,"tarobject nondirectory, `link' backup");
++      debug(dbg_eachfiledetail,"tarobject nondirectory, 'link' backup for %s", fnametmpvb.buf);
+       if (link(fnamevb.buf,fnametmpvb.buf))
+         ohshite(_("unable to make backup link of `%.255s' before installing new version"),
+                 ti->Name);
+@@ -806,8 +825,28 @@
+         
+ #endif /* WITH_SELINUX */
+ 
++	// HH: Windows port
++	// In some cases rename seems to fail with permission denied error
++//	if (ti->Type == Directory)
++//		rename(fnamenewvb.buf,fnamevb.buf);
++//	else
++//	{
++//		// FIXME: This operation is not anymore atomic when changed from rename
++//		if (unlink(fnamevb.buf))
++//		{
++//			if (errno != ENOENT) 
++//				ohshite(_("unlink failed: Deleting existing file. Unable to install new version of `%.255s'"),ti->Name);
++//		}
++//		if (link(fnamenewvb.buf, fnamevb.buf))
++//			ohshite(_("link failed: Renaming. Unable to install new version of `%.255s'"),ti->Name);
++//		if (unlink(fnamenewvb.buf))
++//			ohshite(_("unlink failed: Deleting old file. Unable to install new version of `%.255s'"),ti->Name);
++//	}
++  if (!statr)
++  {
+   if (rename(fnamenewvb.buf,fnamevb.buf))
+     ohshite(_("unable to install new version of `%.255s'"),ti->Name);
++  }
+ 
+   /* CLEANUP: now the new file is in the destination file, and the
+    * old file is in dpkg-tmp to be cleaned up later.  We now need
+@@ -830,6 +869,9 @@
+   nifd->namenode->flags |= fnnf_elide_other_lists;
+ 
+   debug(dbg_eachfiledetail,"tarobject done and installed");
++  
++  fnamenewvb.buf = fnamenewvbbuf_old;
++  
+   return 0;
+ }
+ 
+@@ -1176,9 +1218,14 @@
+   varbufinit(&fnametmpvb);
+   varbufinit(&fnamenewvb);
+ 
+-  varbufaddstr(&fnamevb,instdir); varbufaddc(&fnamevb,'/');
+-  varbufaddstr(&fnametmpvb,instdir); varbufaddc(&fnametmpvb,'/');
+-  varbufaddstr(&fnamenewvb,instdir); varbufaddc(&fnamenewvb,'/');
++  // Workaround for POSIX. POSIX says paths that begin with
++  // '//' have implementation-dependent behaviour.
++  // See also:
++  // http://www.opengroup.org/onlinepubs/9699919799/ \
++  //   basedefs/V1_chap03.html#tag_03_266
++  varbufaddstr(&fnamevb,instdir); varbufaddstr(&fnamevb,"///");
++  varbufaddstr(&fnametmpvb,instdir); varbufaddstr(&fnametmpvb,"///");
++  varbufaddstr(&fnamenewvb,instdir); varbufaddstr(&fnamenewvb,"///");
+   fnameidlu= fnamevb.used;
+ 
+   ensure_diversions();
+diff -uwrBN dpkg-1.14.23/src/configure.c dpkg-1.14.23-win/src/configure.c
+--- dpkg-1.14.23/src/configure.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/configure.c	2010-03-25 19:31:29.173501000 +0200
+@@ -444,8 +444,8 @@
+ 	if (chown(target, stab.st_uid, stab.st_gid)==-1)
+ 		ohshite(_("unable to change ownership of new dist conffile `%.250s'"), target);
+ 
+-	if (chmod(target, (stab.st_mode & 07777))==-1)
+-		ohshite(_("unable to set mode of new dist conffile `%.250s'"), target);
++//	if (chmod(target, (stab.st_mode & 07777))==-1)
++//		ohshite(_("unable to set mode of new dist conffile `%.250s'"), target);
+ }
+ 
+ 
+diff -uwrBN dpkg-1.14.23/src/help.c dpkg-1.14.23-win/src/help.c
+--- dpkg-1.14.23/src/help.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/help.c	2010-03-25 19:31:29.213559400 +0200
+@@ -30,6 +30,8 @@
+ #include <sys/wait.h>
+ #include <signal.h>
+ #include <time.h>
++#include <stdio.h>
++#include <sys/time.h>
+ 
+ #include <dpkg.h>
+ #include <dpkg-db.h>
+@@ -37,6 +39,9 @@
+ #include "filesdb.h"
+ #include "main.h"
+ 
++#include <Windows.h>
++#include <process.h>
++
+ const char *const statusstrings[]= {
+   N_("not installed"),
+   N_("not installed but configs remain"),
+@@ -77,12 +82,12 @@
+ 
+ void checkpath(void) {
+ /* Verify that some programs can be found in the PATH. */
+-  static const char *const checklist[]= { "ldconfig", 
++  static const char *const checklist[]= { /*"ldconfig", 
+ #if WITH_START_STOP_DAEMON
+     "start-stop-daemon",
+ #endif    
+     "install-info",
+-    "update-rc.d",
++    "update-rc.d",*/
+     NULL
+   };
+ 
+@@ -263,8 +268,8 @@
+ 
+ static void setexecute(const char *path, struct stat *stab) {
+   if ((stab->st_mode & 0555) == 0555) return;
+-  if (!chmod(path,0755)) return;
+-  ohshite(_("unable to set execute permissions on `%.250s'"),path);
++  //if (!chmod(path,0755)) return;
++  //ohshite(_("unable to set execute permissions on `%.250s'"),path);
+ }
+ static int do_script(const char *pkg, const char *scriptname, const char *scriptpath, struct stat *stab, char *const arglist[], const char *desc, const char *name, int warn) {
+   const char *scriptexec;
+@@ -453,11 +458,30 @@
+ void debug(int which, const char *fmt, ...) {
+   va_list ap;
+   if (!(f_debug & which)) return;
+-  fprintf(stderr,"D0%05o: ",which);
++    
++ //QueryPerformanceFrequency();
++        
++ struct timeval tv;
++ struct tm* ptm;
++ char time_string[40];
++ long milliseconds;
++
++ /* Obtain the time of day, and convert it to a tm struct. */
++ gettimeofday(&tv, NULL);
++ ptm = localtime(&tv.tv_sec);
++ /* Format the time, down to a single second. */
++ strftime(time_string, sizeof(time_string), "%H:%M:%S", ptm);
++ /* Compute milliseconds from microseconds. */
++ milliseconds = tv.tv_usec / 1000;
++ /* Print the formatted time, in seconds, followed by a decimal point
++   and the milliseconds. */
++        
++  fprintf(stderr, "%s.%03ld D0%05o: ", time_string, milliseconds, which);
+   va_start(ap,fmt);
+   vfprintf(stderr,fmt,ap);
+   va_end(ap);
+   putc('\n',stderr);
++  //fflush(stderr);
+ }
+ 
+ int hasdirectoryconffiles(struct filenamenode *file, struct pkginfo *pkg) {
+@@ -529,34 +553,43 @@
+ 			     const char **failed) {
+   /* Sets *failed to `chmod'' if that call fails (which is always
+    * unexpected).  If unlink fails it leaves *failed alone. */
+-  if (S_ISREG(stab->st_mode) ? (stab->st_mode & 07000) :
+-      !(S_ISLNK(stab->st_mode) || S_ISDIR(stab->st_mode) ||
+-	S_ISFIFO(stab->st_mode) || S_ISSOCK(stab->st_mode))) {
+-    /* We chmod it if it is 1. a sticky or set-id file, or 2. an unrecognised
+-     * object (ie, not a file, link, directory, fifo or socket)
+-     */
+-    if (chmod(pathname,0600)) { *failed= N_("chmod"); return -1; }
+-  }
++//  if (S_ISREG(stab->st_mode) ? (stab->st_mode & 07000) :
++//      !(S_ISLNK(stab->st_mode) || S_ISDIR(stab->st_mode) ||
++//	S_ISFIFO(stab->st_mode) || S_ISSOCK(stab->st_mode))) {
++//    /* We chmod it if it is 1. a sticky or set-id file, or 2. an unrecognised
++//     * object (ie, not a file, link, directory, fifo or socket)
++//     */
++//    //if (chmod(pathname,0600)) { *failed= N_("chmod"); return -1; }
++//  }
+   if (unlink(pathname)) return -1;
+   return 0;
+ }
+ 
+ void ensure_pathname_nonexisting(const char *pathname) {
+-  int c1;
++  int c1, unlink_errno;
+   const char *u, *failed;
+ 
+   u= skip_slash_dotslash(pathname);
+   assert(*u);
+ 
+   debug(dbg_eachfile,"ensure_pathname_nonexisting `%s'",pathname);
++  // HH: Cygwin 1.7 port: moved unlink as the first to be tried.
++  // rmdir does not handle special exe extension
++  if (!chmodsafe_unlink(pathname, &failed)) return; /* OK, it was a file */
++  unlink_errno = errno;
+   if (!rmdir(pathname)) return; /* Deleted it OK, it was a directory. */
+   if (errno == ENOENT || errno == ELOOP) return;
+   failed= N_("delete");
+-  if (errno == ENOTDIR) {
+-    /* Either it's a file, or one of the path components is.  If one
+-     * of the path components is this will fail again ...
+-     */
+-    if (!chmodsafe_unlink(pathname, &failed)) return; /* OK, it was */
++//  if (errno == ENOTDIR) {
++//    /* Either it's a file, or one of the path components is.  If one
++//     * of the path components is this will fail again ...
++//     */
++//    if (!chmodsafe_unlink(pathname, &failed)) return; /* OK, it was */
++//    if (errno == ENOTDIR) return;
++//  }
++  if (errno == ENOTDIR)
++  {
++    errno = unlink_errno;
+     if (errno == ENOTDIR) return;
+   }
+   if (errno != ENOTEMPTY && errno != EEXIST) { /* Huh ? */
+@@ -564,13 +597,17 @@
+     snprintf(mbuf, sizeof(mbuf), N_("failed to %s `%%.255s'"), failed);
+     ohshite(_(mbuf),pathname);
+   }
+-  c1= m_fork();
+-  if (!c1) {
+-    execlp(RM, "rm", "-rf", "--", pathname, NULL);
+-    ohshite(_("failed to exec rm for cleanup"));
+-  }
++  
+   debug(dbg_eachfile,"ensure_pathname_nonexisting running rm -rf");
+-  waitsubproc(c1,"rm cleanup",0);
++  //_spawnlp(_P_WAIT | _P_DETACH, RM, "rm", "-rf", "--", pathname, NULL);
++  _spawnlp(_P_WAIT, RM, "rm", "-rf", "--", pathname, NULL);
++//  c1= m_fork();
++//  if (!c1) {
++//    execlp(RM, "rm", "-rf", "--", pathname, NULL);
++//    ohshite(_("failed to exec rm for cleanup"));
++//  }
++//  debug(dbg_eachfile,"ensure_pathname_nonexisting running rm -rf");
++//  waitsubproc(c1,"rm cleanup",0);
+ }
+ 
+ void log_action(const char *action, struct pkginfo *pkg) {
+diff -uwrBN dpkg-1.14.23/src/processarc.c dpkg-1.14.23-win/src/processarc.c
+--- dpkg-1.14.23/src/processarc.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/processarc.c	2010-03-25 19:31:29.333734600 +0200
+@@ -44,6 +44,8 @@
+ #include "main.h"
+ #include "archives.h"
+ 
++#include <process.h>
++
+ void process_archive(const char *filename) {
+   static const struct TarFunctions tf = {
+     tarfileread,
+@@ -84,6 +86,8 @@
+   struct stat stab, oldfs;
+   struct packageinlist *deconpil, *deconpiltemp;
+   
++  debug(dbg_eachfiledetail, "processarchive start");
++  
+   cleanup_pkg_failed= cleanup_conflictor_failed= 0;
+   admindirlen= strlen(admindir);
+ 
+@@ -101,40 +105,41 @@
+ 
+   if (stat(filename,&stab)) ohshite(_("cannot access archive"));
+ 
+-  if (!f_noact) {
+-    /* We can't `tentatively-reassemble' packages. */
+-    if (!reasmbuf) {
+-      reasmbuf= m_malloc(admindirlen+sizeof(REASSEMBLETMP)+5);
+-      strcpy(reasmbuf,admindir);
+-      strcat(reasmbuf,"/" REASSEMBLETMP);
+-    }
+-    if (unlink(reasmbuf) && errno != ENOENT)
+-      ohshite(_("error ensuring `%.250s' doesn't exist"),reasmbuf);
+-    push_cleanup(cu_pathname, ~0, NULL, 0, 1, (void *)reasmbuf);
+-    c1= m_fork();
+-    if (!c1) {
+-      execlp(SPLITTER, SPLITTER, "-Qao", reasmbuf, filename, NULL);
+-      ohshite(_("failed to exec dpkg-split to see if it's part of a multiparter"));
+-    }
+-    while ((r= waitpid(c1,&status,0)) == -1 && errno == EINTR);
+-    if (r != c1) { onerr_abort++; ohshite(_("wait for dpkg-split failed")); }
+-    switch (WIFEXITED(status) ? WEXITSTATUS(status) : -1) {
+-    case 0:
+-      /* It was a part - is it complete ? */
+-      if (!stat(reasmbuf,&stab)) { /* Yes. */
+-        filename= reasmbuf;
+-        pfilename= _("reassembled package file");
+-        break;
+-      } else if (errno == ENOENT) { /* No.  That's it, we skip it. */
+-        return;
+-      }
+-    case 1:
+-      /* No, it wasn't a part. */
+-      break;
+-    default:
+-      checksubprocerr(status,SPLITTER,0);
+-    }
+-  }
++  // HH: Optimization. We don't need this?
++//  if (!f_noact) {
++//    /* We can't `tentatively-reassemble' packages. */
++//    if (!reasmbuf) {
++//      reasmbuf= m_malloc(admindirlen+sizeof(REASSEMBLETMP)+5);
++//      strcpy(reasmbuf,admindir);
++//      strcat(reasmbuf,"/" REASSEMBLETMP);
++//    }
++//    if (unlink(reasmbuf) && errno != ENOENT)
++//      ohshite(_("error ensuring `%.250s' doesn't exist"),reasmbuf);
++//    push_cleanup(cu_pathname, ~0, NULL, 0, 1, (void *)reasmbuf);
++//    c1= m_fork();
++//    if (!c1) {
++//      execlp(SPLITTER, SPLITTER, "-Qao", reasmbuf, filename, NULL);
++//      ohshite(_("failed to exec dpkg-split to see if it's part of a multiparter"));
++//    }
++//    while ((r= waitpid(c1,&status,0)) == -1 && errno == EINTR);
++//    if (r != c1) { onerr_abort++; ohshite(_("wait for dpkg-split failed")); }
++//    switch (WIFEXITED(status) ? WEXITSTATUS(status) : -1) {
++//    case 0:
++//      /* It was a part - is it complete ? */
++//      if (!stat(reasmbuf,&stab)) { /* Yes. */
++//        filename= reasmbuf;
++//        pfilename= _("reassembled package file");
++//        break;
++//      } else if (errno == ENOENT) { /* No.  That's it, we skip it. */
++//        return;
++//      }
++//    case 1:
++//      /* No, it wasn't a part. */
++//      break;
++//    default:
++//      checksubprocerr(status,SPLITTER,0);
++//    }
++//  }
+   
+   /* Verify the package. */
+   if (!f_nodebsig && (stat(DEBSIGVERIFY, &stab)==0)) {
+@@ -181,13 +186,18 @@
+   ensure_pathname_nonexisting(cidir); cidirrest[-1]= '/';
+   
+   push_cleanup(cu_cidir, ~0, NULL, 0, 2, (void *)cidir, (void *)cidirrest);
+-  c1= m_fork();
+-  if (!c1) {
+-    cidirrest[-1]= 0;
+-    execlp(BACKEND, BACKEND, "--control", filename, cidir, NULL);
+-    ohshite(_("failed to exec dpkg-deb to extract control information"));
+-  }
+-  waitsubproc(c1,BACKEND " --control",0);
++  debug(dbg_eachfiledetail, "extract control start");
++  //cidirrest[-1]= 0; // ?
++  _spawnlp(_P_WAIT, BACKEND, BACKEND, "--control", filename, cidir, NULL);
++  //_spawnlp(_P_WAIT | _P_DETACH, BACKEND, BACKEND, "--control", filename, cidir, NULL);
++  debug(dbg_eachfiledetail, "extract control end");
++  //c1= m_fork();
++//  if (!c1) {
++//    cidirrest[-1]= 0;
++//    execlp(BACKEND, BACKEND, "--control", filename, cidir, NULL);
++//    ohshite(_("failed to exec dpkg-deb to extract control information"));
++//  }
++//  waitsubproc(c1,BACKEND " --control",0);
+   strcpy(cidirrest,CONTROLFILE);
+ 
+   parsedb(cidir, pdb_recordavailable | pdb_rejectstatus | pdb_ignorefiles,
+@@ -279,12 +289,14 @@
+     if (psearch->up->type != dep_conflicts) continue;
+     check_conflict(psearch->up, pkg, pfilename);
+   }
+-  
++  debug(dbg_eachfiledetail, "dbinit");
+   ensure_allinstfiles_available();
+   filesdbinit();
+   trig_file_interests_ensure();
+ 
++  int isPackageReplace = 0;
+   if (pkg->status != stat_notinstalled && pkg->status != stat_configfiles) {
++    isPackageReplace = 1;
+     printf(_("Preparing to replace %s %s (using %s) ...\n"),
+            pkg->name,
+            versiondescribe(&pkg->installed.version,vdew_nonambig),
+@@ -568,7 +580,9 @@
+ 
+   m_pipe(p1);
+   push_cleanup(cu_closepipe, ehflag_bombout, NULL, 0, 1, (void *)&p1[0]);
++  debug(dbg_eachfiledetail, "fork systarfile start");
+   c1= m_fork();
++  debug(dbg_eachfiledetail, "fork systarfile end");
+   if (!c1) {
+     m_dup2(p1[1],1); close(p1[0]); close(p1[1]);
+     execlp(BACKEND, BACKEND, "--fsys-tarfile", filename, NULL);
+@@ -594,6 +608,7 @@
+   fd_null_copy(p1[0], -1, _("dpkg-deb: zap possible trailing zeros"));
+   close(p1[0]);
+   p1[0] = -1;
++  debug(dbg_eachfiledetail, "waiting for decompressor");
+   waitsubproc(c1,BACKEND " --fsys-tarfile",PROCPIPE);
+ 
+   if (oldversionstatus == stat_halfinstalled || oldversionstatus == stat_unpacked) {
+@@ -1124,6 +1137,7 @@
+    * backup files, and we can leave the user to fix that if and when
+    * it happens (we leave the reinstall required flag, of course).
+    */
++   debug(dbg_eachfile,"starting modstatdb");
+   pkg->status= stat_unpacked;
+   modstatdb_note(pkg);
+   
+@@ -1137,6 +1151,9 @@
+    * They stay recorded as obsolete conffiles and will eventually
+    * (if not taken over by another package) be forgotten.
+    */
++  if (isPackageReplace)
++  {
++      debug(dbg_eachfile,"starting backup delete");
+   for (cfile= newfileslist; cfile; cfile= cfile->next) {
+     if (cfile->namenode->flags & fnnf_new_conff) continue;
+     fnametmpvb.used= fnameidlu;
+@@ -1145,6 +1162,7 @@
+     varbufaddc(&fnametmpvb,0);
+     ensure_pathname_nonexisting(fnametmpvb.buf);
+   }
++  }
+ 
+   /* OK, we're now fully done with the main package.
+    * This is quite a nice state, so we don't unwind past here.
+diff -uwrBN dpkg-1.14.23/src/remove.c dpkg-1.14.23-win/src/remove.c
+--- dpkg-1.14.23/src/remove.c	2008-11-18 12:57:34.000000000 +0200
++++ dpkg-1.14.23-win/src/remove.c	2010-03-25 19:31:29.443895200 +0200
+@@ -239,6 +239,7 @@
+       
+       fnvb.used= before;
+       varbufaddc(&fnvb,0);
++      debug(dbg_eachfiledetail, "removal_bulk removing `%s'", fnvb.buf);
+       if (!stat(fnvb.buf,&stab) && S_ISDIR(stab.st_mode)) {
+         debug(dbg_eachfiledetail, "removal_bulk is a directory");
+         /* Only delete a directory or a link to one if we're the only
+@@ -250,8 +251,7 @@
+ 	  continue;
+ 	}
+ 	if (isdirectoryinuse(namenode,pkg)) continue;
+-      }
+-      debug(dbg_eachfiledetail, "removal_bulk removing `%s'", fnvb.buf);
++	    
+       if (!rmdir(fnvb.buf) || errno == ENOENT || errno == ELOOP) continue;
+       if (errno == ENOTEMPTY || errno == EEXIST) {
+ 	debug(dbg_eachfiledetail, "removal_bulk `%s' was not empty, will try again later",
+@@ -267,23 +267,24 @@
+         continue;
+       }
+       if (errno != ENOTDIR) ohshite(_("cannot remove `%.250s'"),fnvb.buf);
++      }
+       debug(dbg_eachfiledetail, "removal_bulk unlinking `%s'", fnvb.buf);
+-      {
++      //{
+         /*
+          * If file to remove is a device or s[gu]id, change its mode
+          * so that a malicious user cannot use it even if it's linked
+          * to another file
+          */
+-        struct stat stat_buf;
+-        if (lstat(fnvb.buf,&stat_buf)==0) {
+-          if (S_ISCHR(stat_buf.st_mode) || S_ISBLK(stat_buf.st_mode)) {
+-            chmod(fnvb.buf,0);
+-          }
+-          if (stat_buf.st_mode & (S_ISUID|S_ISGID)) {
+-            chmod(fnvb.buf,stat_buf.st_mode & ~(S_ISUID|S_ISGID));
+-          }
+-        }
+-      }
++//        struct stat stat_buf;
++//        if (lstat(fnvb.buf,&stat_buf)==0) {
++//          if (S_ISCHR(stat_buf.st_mode) || S_ISBLK(stat_buf.st_mode)) {
++//            //chmod(fnvb.buf,0);
++//          }
++//          if (stat_buf.st_mode & (S_ISUID|S_ISGID)) {
++//            //chmod(fnvb.buf,stat_buf.st_mode & ~(S_ISUID|S_ISGID));
++//          }
++//        }
++      //}
+       if (unlink(fnvb.buf)) ohshite(_("cannot remove file `%.250s'"),fnvb.buf);
+     }
+     write_filelist_except(pkg,leftover,0);
+@@ -365,6 +366,10 @@
+       }
+       if (isdirectoryinuse(namenode,pkg)) continue;
+     }
++    else // Not a directory
++    {
++        continue;
++    }
+ 
+     debug(dbg_eachfiledetail, "removal_bulk removing `%s'", fnvb.buf);
+     if (!rmdir(fnvb.buf) || errno == ENOENT || errno == ELOOP) continue;
+diff -uwrBN dpkg-1.14.23/triplettable dpkg-1.14.23-win/triplettable
+--- dpkg-1.14.23/triplettable	2008-11-14 08:54:02.000000000 +0200
++++ dpkg-1.14.23-win/triplettable	2010-03-25 19:31:29.463924400 +0200
+@@ -14,3 +14,4 @@
+ bsd-netbsd-<cpu>	netbsd-<cpu>
+ bsd-darwin-<cpu>	darwin-<cpu>
+ sysv-solaris-<cpu>	solaris-<cpu>
++pc-cygwin-<cpu>		cygwin-<cpu>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/cclient/smoketest.bat	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,45 @@
+@REM
+@REM Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+@REM All rights reserved.
+@REM This component and the accompanying materials are made available
+@REM under the terms of "Eclipse Public License v1.0"
+@REM which accompanies this distribution, and is available
+@REM at the URL "http://www.eclipse.org/legal/epl-v10.html".
+@REM
+@REM Initial Contributors:
+@REM Nokia Corporation - initial contribution.
+@REM
+@REM Contributors:
+@REM
+@REM Description:
+@REM Does a quick test for a blocks client
+@REM
+
+@ECHO OFF
+SETLOCAL
+
+pushd
+
+if not exist c:\temp mkdir c:\temp
+set BLOCKS_METADATA=c:\temp\blocks_metadata
+set path=%~dp0\blocks\bin;%path%¨
+c:
+cd c:\temp
+call blocks --version
+mkdir blocks_workspace
+cd blocks_workspace
+call blocks workspace-add .
+call bundle --version
+mkdir test
+echo test > test\testfile
+call bundle create-xml -t test\testfile test.xml
+call bundle create --directives=single-bundle test.xml
+call blocks bundle-install bundles\bundle_1.0.0-1.deb
+call blocks -f bundle-remove bundle
+call blocks -f workspace-remove 1
+cd ..
+rmdir /s /q blocks_workspace
+
+popd
+
+ENDLOCAL
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/README.txt	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,18 @@
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+
+Blocks Packaging Framework Installation Instructions
+----------------------------------------------------
+
+1. Get all needed binaries to src\SymbianUtils\bin (more information in the directory in README.txt)
+
+2. run these commands in this directory:
+python setup_blocks.py install
+python setup_symbian.py install
+
+NOTE: If you want to create windows installation package use "bdist_wininst" instead of "install"
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/blocks-version	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,1 @@
+0.5.2
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/setup_blocks.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Setup for blocks packaging framework
+#
+
+from setuptools import setup
+
+setup(name='Blocks-PFW',
+    package_dir={'Blocks': 'src/Blocks',
+        'Blocks.Packaging': 'src/Blocks/Packaging',
+        'Blocks.Packaging.DataSources': 'src/Blocks/Packaging/DataSources',
+        'Blocks.Packaging.DependencyProcessors': 'src/Blocks/Packaging/DependencyProcessors',
+        'Blocks.Packaging.Rules': 'src/Blocks/Packaging/Rules'},
+    version=file("blocks-version").read().strip(),
+    description='Utilities for packaging software',
+    packages=['Blocks',
+        'Blocks.Packaging',
+        'Blocks.Packaging.DataSources',
+        'Blocks.Packaging.DependencyProcessors',
+        'Blocks.Packaging.Rules'],
+    package_data={
+    	'Blocks.Packaging.Rules': ['targetRules.xml', 'sourceRules.xml', 'packageDirectives.xml'],
+    	'Blocks.Packaging.DependencyProcessors': ['*.xml'],
+    	'Blocks.Packaging.DataSources': ['*.xsl'],
+    }
+)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/setup_symbian.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Setup for symbian utilities
+#
+
+from setuptools import setup
+
+setup(name='SymbianUtils',
+    package_dir={'SymbianUtils': 'src/SymbianUtils'},
+    version=file("symbian-version").read().strip(),
+    description='Utilities for packaging software',
+    packages=['SymbianUtils' ],
+    package_data={'SymbianUtils': ['bin/*.exe']}
+)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/BuildData.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,369 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Build data
+#
+
+'''
+Classes for reading and formatting data from various sources for consumption by
+Blocks.Packager and DependencyProcessors.
+'''
+
+import os
+
+class BuildData(object):
+    '''
+    The base class for providing component data to Packager:
+        - name of component built
+        - location of targetroot
+        - location of sourceroot
+        - component version
+        - target files belonging to the component (relative to targetroot/
+        epocroot)
+        - source files belonging to the component (relative to sourceroot)
+        - file dependencies to other files
+    '''
+
+    def __init__(self):
+        pass
+
+    def getComponentName(self):
+        raise NotImplementedError
+
+    def setComponentName(self):
+        raise NotImplementedError
+
+    def getComponentVersion(self):
+        raise NotImplementedError
+
+    def setComponentVersion(self):
+        raise NotImplementedError
+
+    def getComponentRevision(self):
+        raise NotImplementedError
+
+    def setComponentRevision(self, revision):
+        raise NotImplementedError
+
+    def getSourceRoot(self):
+        raise NotImplementedError
+
+    def setSourceRoot(self):
+        raise NotImplementedError
+
+    def getTargetRoot(self):
+        raise NotImplementedError
+
+    def setTargetRoot(self):
+        raise NotImplementedError
+
+    def getSourceFiles(self):
+        raise NotImplementedError
+
+    def addSourceFiles(self):
+        raise NotImplementedError
+
+    def getTargetFiles(self):
+        raise NotImplementedError
+
+    def addTargetFiles(self):
+        raise NotImplementedError
+
+    def setUseEpoch(self):
+        raise NotImplementedError
+
+    def hasFiles(self):
+        raise NotImplementedError
+
+class BdFile(object):
+
+    def __init__(self, path):
+        self.path = os.path.normpath(path)
+        if os.sep != "/":
+            self.path = "/".join(self.path.split(os.sep))
+        self.ownerRequirements = []     # Files that the owner package will
+                                        # require
+        self.sourceRequirements = []    # Files that the component source
+                                        # package will require
+        self.variantPlatform = None
+        self.variantType = None
+
+    def baseName(self):
+        ''' Return the filename without the path. '''
+        return os.path.basename(self.path)
+
+    def getPath(self):
+        ''' Return path relative to sourceroot/epocroot. '''
+        return self.path
+
+    def addSourceDependency(self, path):
+        path = os.path.normpath(path)
+        if os.sep != "/":
+            path = "/".join(path.split(os.sep))
+        if path not in self.sourceRequirements:
+            self.sourceRequirements.append(path)
+
+    def addOwnerDependency(self, path):
+        path = os.path.normpath(path)
+        if os.sep != "/":
+            path = "/".join(path.split(os.sep))
+        if path not in self.ownerRequirements:
+            self.ownerRequirements.append(path)
+
+    def setSourceDependencies(self, paths):
+        '''
+        Set paths of files required by the source package of the component
+        owning this Deliverable.
+
+        @type paths: List(String)
+        '''
+        paths = [os.path.normpath(p) for p in paths]
+        if os.sep != "/":
+            paths = [ "/".join(path.split(os.sep)) for path in paths ]
+        self.sourceRequirements = paths
+
+    def setOwnerDependencies(self, paths):
+        '''
+        Set paths of files that the package owning this Deliverable depends
+        on.
+
+        @type paths: List(String)
+        '''
+        paths = [os.path.normpath(p) for p in paths]
+        if os.sep != "/":
+            paths = [ "/".join(path.split(os.sep)) for path in paths ]
+        self.ownerRequirements = paths
+
+    def getSourceDependencies(self):
+        '''
+        Get paths of files required by the source package of the component
+        owning this Deliverable.
+
+        @rtype: List(String)
+        '''
+        return self.sourceRequirements
+
+    def getOwnerDependencies(self):
+        '''
+        Get paths of files that the package owning this Deliverable depends
+        on.
+
+        @rtype: List(String)
+        '''
+        return self.ownerRequirements
+
+    def getVariantPlatform(self):
+        ''' Get stored platform or try to guess from path '''
+        if self.variantPlatform:
+            return self.variantPlatform
+        else:
+            # epoc32/release/armv5/udeb/foo
+            # -> "armv5"
+            parts = os.path.dirname(self.path).split('/')
+            if len(parts) > 2:
+                return parts[2]
+            else:
+                return "noarch"
+
+    def getVariantType(self):
+        ''' Get stored variantType or try to guess from path '''
+        if self.variantType:
+            return self.variantType
+        else:
+            # epoc32/release/armv5/udeb/foo
+            # -> "udeb"
+            parts = os.path.dirname(self.path).split('/')
+            if parts[:2] == ["epoc32", "release"] and len(parts) > 3:
+                return parts[3]
+            else:
+                return None
+
+class PlainBuildData(BuildData):
+    '''
+    Implementation of BuildData. To add simple file paths, use the
+    L{addTargetFiles} and L{addSourceFiles} methods. To incorporate
+    dependency information for a target file, create a BdFile instance and then
+    add it using the L{addDeliverable} method.
+
+    Similarly L{getTargetFiles} and L{getSourceFiles} return simple lists of
+    paths, whereas L{getDependencies} only returns BdFile instances.
+    '''
+
+    def __init__(self):
+        BuildData.__init__(self)
+        self.sourceRoot = ""
+        self.targetRoot = ""
+        # keys are paths of files belonging to this component
+        # values (if any) are Deliverable objects
+        self.sourceFiles = {}
+        self.targetFiles = {}
+        self.apiMapping = {}
+        self.componentName = ""
+        self.previousNames = []
+        self.componentVersion = ""
+        self.componentRevision = None
+        self.significantAttributes = ("vendor", "license", "group")
+        self.attributes = {}
+        # extra data for dependency processors
+        self.dependencyData = {}
+        self.forcePackageAll = False
+        self.useEpoch = False
+
+    def getComponentName(self):
+        return self.componentName
+
+    def setComponentName(self, name, previousNames=None):
+        '''
+        Set component name and optionally previous names of the component
+
+        If you add previous names of the component to previousNames
+        dependencies are handled correctly in generated bundles by replacing previous bundles.
+
+        @param name: Name of the component
+        @type name: String
+        @param previousNames: List of previous names of the component
+        @type previousNames: List(String)
+        '''
+        if name:
+            self.componentName = name
+        else:
+            raise ValueError("Component name not defined")
+        self.previousNames = previousNames or []
+        assert isinstance(self.previousNames, list), "previousNames must be a list"
+
+    def getComponentVersion(self):
+        return self.componentVersion
+
+    def setComponentVersion(self, version):
+        if version:
+            self.componentVersion = version
+        else:
+            raise ValueError("Component version not defined")
+
+    def getComponentRevision(self):
+        return self.componentRevision
+
+    def setComponentRevision(self, revision):
+        if revision:
+            self.componentRevision = revision
+        else:
+            raise ValueError("empty revision")
+
+    def getSourceRoot(self):
+        return self.sourceRoot
+
+    def setSourceRoot(self, path):
+        if os.sep != "/":
+            path = "/".join(path.split(os.sep))
+        if path:
+            self.sourceRoot = os.path.abspath(path)
+        else:
+            raise ValueError("Source root not defined")
+
+    def getTargetRoot(self):
+        return self.targetRoot
+
+    def setTargetRoot(self, path):
+        if os.sep != "/":
+            path = "/".join(path.split(os.sep))
+        if path:
+            self.targetRoot = os.path.abspath(path)
+        else:
+            raise ValueError("Target root not defined")
+
+    def getSourceFiles(self):
+        return self.sourceFiles.keys()
+
+    def normalizePath(self, path):
+        '''
+        Check and clean up path
+
+        @param path: Relative path (to sourceroot or epocroot), Unix or Windows slashes accepted.
+        @type path: String
+        @return: Path converted to Unix slashes
+        '''
+        path = os.path.normpath(path)
+        path = "/".join(path.split("\\"))
+        if not path or path.isspace():
+            raise ValueError, "Path not defined or empty"
+        if os.path.isabs(path):
+            raise ValueError, "Path should not be absolute (%s)" % path
+        if "../" in path:
+            raise ValueError, "Path must not contain '../' (%s)" % path
+        if not os.path.basename(path):
+            raise ValueError, "Path must not contain a filename (%s)" % path
+        if path in ("."):
+            raise ValueError, "Path must not be '.' (%s)" % path
+        return path
+
+    def addSourceFiles(self, list):
+        '''
+        Add a simple list of filenames without dependency data.
+
+        @param list: List of relative paths to source root.
+        @type list: List(String)
+        '''
+        for path in list:
+            path = self.normalizePath(path)
+            if path not in self.sourceFiles:
+                self.sourceFiles[path] = None
+
+    def getTargetFiles(self):
+        return self.targetFiles.keys()
+
+    def addTargetFiles(self, list):
+        '''
+        Add a simple list of filenames without dependency data.
+
+        @param list: paths relative to epocroot/targetroot
+        @type list: List(String)
+        '''
+        for path in list:
+            path = self.normalizePath(path)
+            if path not in self.targetFiles:
+                self.targetFiles[path] = None
+
+    def addNonstandardInterfaces(self, mapping):
+        '''
+        Add nonstandard interfaces for binaries
+
+        @param mapping: Update mappings of binaries to correct api files (.dso).
+        Can be either dictionary or iterable of key/value pairs.
+        Note: Can be called multiple times to add mappings
+        @type mapping: Dict(string)
+        '''
+        self.apiMapping.update(mapping)
+
+    def getDependencies(self):
+        return [file for file in self.targetFiles.values() if file is not None]
+
+    def addDeliverable(self, deliverable):
+        '''
+        Add Deliverable that also contains dependency data.
+
+        @param deliverable: A BdFile instance containing the file path and
+        additional metadata.
+        @type deliverable: BdFile
+        '''
+        deliverable.path = self.normalizePath(deliverable.getPath())
+        self.targetFiles[deliverable.path] = deliverable
+
+    def setUseEpoch(self):
+        self.useEpoch = True
+
+    def getUseEpoch(self):
+        return self.useEpoch
+
+    def hasFiles(self):
+        return bool(self.targetFiles) or bool(self.sourceFiles)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/ComponentBuilder.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,719 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Component builder
+#
+
+'''
+ComponentBuilder is the scaffolding for creating PackageModel.Component objects
+from a set of files introduced via BuildData. This could/should be a giant
+constructor in Component, but the idea was to keep this specific implementation
+separate from the generic package model.
+'''
+
+import re
+import os
+import xml.dom.minidom as dom
+
+from SymbianUtils.Evalid import Evalid
+from SymbianUtils.Readelf import Readelf
+from SymbianUtils import SymbianUtilsError
+
+from PackageModel import *
+from Storage import NoPreviousBuild
+import Blocks.Packaging.Rules
+from Blocks.Packaging.Logging import Logging
+from Blocks.Packaging import PackagingError
+from Blocks.Packaging.PackageWriter import Dependency
+
+class BuilderException(PackagingError):
+    ''' Parent to more specific exceptions. Error in packaging rules, previous metadata or input. '''
+
+class FileError(BuilderException):
+    ''' Missing listed file or non-optional derived file '''
+
+class DerivedFileError(BuilderException):
+    ''' Missing file derived via rules '''
+
+class BuildHistoryError(BuilderException):
+    ''' Previous metadata has errors or does not match current build '''
+
+class ConfigError(BuilderException):
+    ''' ConfigError - problem with rules etc. '''
+
+class ComponentBuilder(object):
+
+    def __init__(self, targetRulesPath=None, sourceRulesPath=None, directivesPath=None, keepGoing=True):
+        self.targetRules = self.loadPackagingRules(targetRulesPath or Blocks.Packaging.Rules.targetRules())
+        self.sourceRules = self.loadPackagingRules(sourceRulesPath or Blocks.Packaging.Rules.sourceRules())
+        self.directives = dom.parse(directivesPath or Blocks.Packaging.Rules.packageDirectives())
+        self.packages = {}
+        self.sources = None
+        self.oldComp = None
+        self.newComp = None
+        self.precLess = []
+        self.precMore = []
+        self.precSame = []
+        self.precDifferent = []
+        self.precDone = []
+        self.precStarted = []
+        self.buildData = None
+        self.keepGoing = keepGoing # Ignore all errors
+
+    @staticmethod
+    def getLogger():
+        ''' Getting the logger each time makes the module picklable '''
+        return Logging.getLogger("pfw.componentbuilder")
+
+    def createComponent(self, storage, buildData):
+        '''
+        The method you want to call to use this class.
+
+        @param buildData: A BuildData containing the files, attributes etc. for
+                          the component.
+        @type buildData: BuildData
+        @return: Component The ready component
+        @rtype: Component Component
+        @raise BuilderException: Error in packaging rules, previous metadata or input.
+        '''
+        self.buildData = buildData
+        self.packages = {}
+        self.sources = None
+        self.oldComp = None
+        self.newComp = None
+        self.precLess = []
+        self.precMore = []
+        self.precSame = []
+        self.precDifferent = []
+        self.precDone = []
+        self.precStarted = []
+
+        if buildData.getTargetFiles() and buildData.getTargetRoot():
+            self.__addDeliverables(buildData.getTargetFiles(), buildData.getTargetRoot())
+        else:
+            self.getLogger().debug("%s: No target being created. TargetRoot '%s', %s targets.",
+                buildData.getComponentName(), buildData.getTargetRoot(), len(buildData.getTargetFiles()))
+        if buildData.getSourceFiles() and buildData.getSourceRoot():
+            self.__addSources(buildData.getSourceFiles(), buildData.getSourceRoot())
+        else:
+            self.getLogger().debug("%s: No source package created. SourceRoot '%s', %s sources.",
+                buildData.getComponentName(), buildData.getSourceRoot(), len(buildData.getSourceFiles()))
+
+        self.__finalizePackages()
+        self.newComp = Component(buildData.getComponentName(), buildData.getComponentVersion(), self.packages.values())
+        if self.sources:
+            self.newComp.setSourcePackage(self.sources)
+        self.newComp.setBuild(storage.getBuildId())
+        self.newComp.setRevision(buildData.getComponentRevision())
+        self.newComp.getIdentifier().setUseEpoch(buildData.getUseEpoch())
+
+        for name, value in buildData.attributes.items():
+            significant = False
+            if name in buildData.significantAttributes:
+                significant = True
+            self.newComp.setAttribute(name, value, significant)
+
+        try:
+            self.oldComp = Component.load(storage.getLastMetaDataFile(componentName=self.newComp.getName()))
+
+            if self.newComp.getName() != self.oldComp.getName():
+                raise BuildHistoryError("Incompatible component. Cannot compare '%s' with previous build named '%s'" %
+                    (self.newComp.getName(), self.oldComp.getName()))
+
+            if buildData.forcePackageAll:
+                self.getLogger().debug("%s: packaging forced" % self.newComp.getName())
+            else:
+                self.getLogger().debug("%s: resolving packaging" % self.newComp.getName())
+                # The default is to package all packages. If not forced, this
+                # is toggled off and resolvePackaging() sets per package values.
+                self.__resolvePackaging()
+        except NoPreviousBuild, e:
+            self.getLogger().debug(str(e))
+
+        if self.oldComp:
+            if self.oldComp.getVersion() == self.newComp.getVersion():
+                if self.oldComp.getBuild() != self.newComp.getBuild():
+                    self.newComp.setRelease(str(int(self.oldComp.getRelease()) + 1))
+
+        self.__resolveApiVersions()
+        self.__setPackageVersions()
+        self.__resolveDependencies()
+        return self.newComp
+
+    @staticmethod
+    def loadPackagingRules(path):
+        '''
+        Parse XML configuration and create packaging rules.
+
+        @param path: Path of configuration file
+        @type path: String
+        @todo: validate xml elsewhere and skip checks
+        @raise ConfigError: Incomplete rules
+        '''
+        rules = []
+        doc = dom.parse(path)
+        for rule in doc.getElementsByTagName("rule"):
+            matchPath = ""
+            theType = ""
+            uids = {}
+            package = ""
+            variant = None
+            extras = []
+            optionalExtras = []
+            api = None
+            try:
+                matchPath = rule.getElementsByTagName("path")[0].firstChild.nodeValue.encode("ascii")
+                theType = rule.getElementsByTagName("type")[0].firstChild.nodeValue.encode("ascii")
+            except Exception:
+                raise ConfigError("Missing rule match or type.")
+            if not theType:
+                raise ConfigError("Missing type in rule.")
+            if not matchPath:
+                raise ConfigError("Missing match path in rule.")
+            if not theType == "ignore":
+                # A package is mandatory for non-ignored files
+                try:
+                    package = rule.getElementsByTagName("package")[0].firstChild.nodeValue.encode("ascii")
+                except IndexError:
+                    raise ConfigError("Package rule missing 'package' element.")
+                # Everything else is optional
+                if rule.getElementsByTagName("variant") and rule.getElementsByTagName("variant")[0].firstChild:
+                    variant = rule.getElementsByTagName("variant")[0].firstChild.nodeValue.encode("ascii")
+                for x in (1, 2, 3):
+                    if rule.getElementsByTagName("uid" + str(x)):
+                        uids[x] = rule.getElementsByTagName("uid" + str(x))[0].firstChild.nodeValue.encode("ascii")
+                extras = [m.firstChild.nodeValue.encode("ascii") for m in rule.getElementsByTagName("mandatory")]
+                optionalExtras = [m.firstChild.nodeValue.encode("ascii") for m in rule.getElementsByTagName("optional")]
+                if rule.getElementsByTagName("api"):
+                    api = rule.getElementsByTagName("api")[0].firstChild.nodeValue.encode("ascii")
+            rules.append(Rule(matchPath, theType, uids, package, variant, api, extras, optionalExtras))
+        return(rules)
+
+    def __addDeliverables(self, pathList, baseDir):
+        '''
+        - Use packaging rules to create and store Deliverable object, as well
+          as any extras.
+        - Add Deliverables to self.packages dictionary where package names are keys.
+
+        @param pathList: Paths relative to baseDir (epocroot)
+        @type pathList: List(String)
+        @param baseDir: Absolute path to epocroot.
+        @type baseDir: String
+        @raise BuilderException: Things broke
+        @raise FileError: No file found in path
+        '''
+        for path in pathList:
+            absPath = os.path.join(baseDir, path)
+            if not os.path.isfile(absPath):
+                if self.keepGoing:
+                    self.getLogger().warning("Missing listed target file %s" % absPath)
+                    continue
+                else:
+                    raise FileError("Missing listed target file %s" % absPath)
+            matched = False
+            for rule in self.targetRules:
+                try:
+                    match, files, extras = rule(path, baseDir, self.keepGoing, absPath, self.buildData.apiMapping)
+                    if match:
+                        matched = True
+                        for pkg, file in files:
+                            if isinstance(file, Deliverable):
+                                if pkg not in self.packages:
+                                    self.packages[pkg] = Package(pkg)
+                                self.packages[pkg].addFile(file)
+                        for e in extras:
+                            self.__addDeliverables([e], baseDir)
+                        break
+                except SymbianUtilsError, e:
+                    if self.keepGoing:
+                        self.getLogger().warning(str(e))
+                        matched = True # Prevent a log entry saying it did not match any rules
+                        break          #
+                    else:
+                        raise BuilderException(str(e))
+
+            if not matched:
+                _e = "%s: deliverable did not match any rules '%s'" % (self.buildData.componentName, path)
+                if self.keepGoing:
+                    self.getLogger().info(_e)
+                else:
+                    raise ConfigError(_e)
+
+    def __addSources(self, pathList, baseDir):
+        '''
+        - Use packaging rules to create and store Deliverable object.
+        - Add Sources to source package.
+
+        @param pathList: Paths relative to baseDir (epocroot)
+        @type pathList: List(String)
+        @param baseDir: Absolute path to epocroot.
+        @type baseDir: String
+        @raise BuilderException: Things broke
+        @raise FileError: No file found in path
+        '''
+        for path in pathList:
+            absPath = os.path.join(baseDir, path)
+            if not os.path.isfile(absPath):
+                if self.keepGoing:
+                    self.getLogger().warning("Missing listed source file %s" % absPath)
+                    continue
+                else:
+                    raise FileError("Missing listed source file %s" % absPath)
+            matched = False
+            for rule in self.sourceRules:
+                try:
+                    match, files, extras = rule(path, baseDir, self.keepGoing, absPath)
+                    if match:
+                        matched = True
+                        for pkg, file in files:
+                            if isinstance(file, Deliverable):
+                                if not self.sources:
+                                    self.sources = Package(pkg)
+                                self.sources.addFile(file)
+                        for e in extras:
+                            self.__addSources([e], baseDir)
+                        break
+                except SymbianUtilsError, e:
+                    if self.keepGoing:
+                        self.getLogger().warning(str(e))
+                        matched = True # Prevent a log entry saying it did not match any rules
+                        break          #
+                    else:
+                        raise BuilderException(str(e))
+
+            if not matched:
+                _e = "%s: source did not match any rules '%s'" % (self.buildData.componentName, path)
+                if self.keepGoing:
+                    self.getLogger().info(_e)
+                else:
+                    raise ConfigError(_e)
+
+    def __finalizePackages(self):
+        '''
+        Postprocess and finalize packages, renaming and applying
+        dependencies and preconditions using package directives.
+        '''
+        # First pass get names
+        componentName = self.buildData.getComponentName()
+        if self.sources:
+            self.sources.name = self.resolvePackageName(self.sources.name, componentName, self.directives)
+        for k, v in self.packages.items():
+            out = self.resolvePackageName(v.name, componentName, self.directives)
+            if out:
+                # Add possible replaces by using previous names of the component if given
+                replaces = [self.resolvePackageName(v.name, n, self.directives) for n in self.buildData.previousNames]
+                self.packages[k].replaces = replaces
+                self.packages[k].name = out
+            else:
+                raise Exception("failed to add name to package %s" % k)
+
+        # After this the final names of deps and precs can be resolved
+        for k, v in self.packages.items():
+            out = self.__getPackageInfo(k)
+            if out:
+                self.packages[k].arch, \
+                self.packages[k].depends, \
+                self.packages[k].preconditions = out
+                self.getLogger().debug("Applied directives. Name: %s Arch: %s Depends: %s Preconditions: %s"%(
+                            self.packages[k].getName(),
+                            self.packages[k].arch,
+                            str([str(d) for d in self.packages[k].depends]),
+                            str(self.packages[k].preconditions))
+                        )
+            else:
+                raise Exception("failed to add data to package %s" % k)
+
+    @staticmethod
+    def resolvePackageName(pkgName, componentName, directives):
+        '''
+        Use packageDirectives to work out full name.
+
+        For example::
+            >>> resolvePackageName("exec.arm", "foo", Rules.packageDirectives())
+            'foo.exec-arm'
+
+        @param pkgName: The package name as determined by packaging rules
+        @return: The full name of the package from component and suffix
+        @rtype: String or None
+        '''
+        for d in directives.getElementsByTagName("rule"):
+            rex = re.compile(d.getElementsByTagName("package")[0].firstChild.nodeValue.encode("ascii"))
+            mo = rex.match(pkgName)
+            if not mo:
+                continue
+            else:
+                if d.getElementsByTagName("suffix")[0].firstChild:
+                    fullName = componentName + d.getElementsByTagName("suffix")[0].firstChild.nodeValue.encode("ascii")
+                else:
+                    fullName = componentName
+                for i in range(1, len(mo.groups())+1):
+                    fullName = fullName.replace("$%s" % i, mo.group(i))
+                return(fullName)
+        return None
+
+    def __getPackageInfo(self, pkgName):
+        '''
+        Use packageDirectives to work out architecture and relations to other
+        packages in the component.
+
+        Running this only makes sense once all packages have been added and
+        names updated using getPackageName()
+
+        @param pkgName: The package name as determined by packaging rules
+        @return: (arch, depends, preconditions).
+            - The architecture of the package
+            - Packages this depends on
+            - list of Package names
+        @rtype: String, List(Dependency), List(String)
+        '''
+        for d in self.directives.getElementsByTagName("rule"):
+            rex = re.compile(d.getElementsByTagName("package")[0].firstChild.nodeValue.encode("ascii"))
+            mo = rex.match(pkgName)
+            if not mo:
+                continue
+            else:
+                arch = d.getElementsByTagName("arch")[0].firstChild.nodeValue.encode("ascii")
+                deps = []
+                preconditions = []
+                try:
+                    deps = [Dependency(m.firstChild.nodeValue.encode("ascii"), m.getAttribute("type").encode("ascii"))
+                            for m in d.getElementsByTagName("depends")]
+                except IndexError:
+                    pass
+                try:
+                    preconditions = [m.firstChild.nodeValue.encode("ascii") for m in d.getElementsByTagName("precondition")]
+                except IndexError:
+                    pass
+                for i in range(1, len(mo.groups())+1):
+                    arch = arch.replace("$%s" % i, mo.group(i))
+                    for dep in deps:
+                        dep.package = dep.package.replace("$%s" % i, mo.group(i))
+                    preconditions = [prec.replace("$%s" % i, mo.group(i)) for prec in preconditions]
+                deps = [d for d in deps if d.package in self.packages]
+                for dep in deps:
+                    dep.package = self.packages[dep.package].name
+                preconditions = [self.packages[p].name for p in preconditions if p in self.packages]
+                return(arch, deps, preconditions)
+        return None
+
+    def __resolvePackaging(self):
+        '''
+        Toggle package flags in newComp packages based on oldComp
+
+         - No significant changes (checksums are the same, significant metadata
+           same): No repackaging
+         - Significant component metadata changed: All packages are written
+         - If preconditions, write package only if there are significant changes
+           in packages listed in preconditions
+        '''
+        # source package always compared:
+        if self.newComp.getSourcePackage():
+            if self.oldComp.getSourcePackage():
+                if self.newComp.getSourcePackage() == self.oldComp.getSourcePackage():
+                    self.newComp.getSourcePackage().setPackageFlag(False)
+                    self.getLogger().debug("%s: source package unchanged", self.newComp.getName())
+                else:
+                    self.getLogger().debug("%s: source package has changed", self.newComp.getName())
+            else:
+                self.getLogger().debug("%s: source package has been added", self.newComp.getName())
+        if self.newComp == self.oldComp:
+            self.getLogger().debug("identical component: %s", self.newComp.getName())
+            self.newComp.setPackageAllFlag(False)
+            for p in self.newComp.getPackages():
+                p.setPackageFlag(False)
+            return
+        # force writing of all packages if vital metadata has changed
+        elif self.newComp.getSignificantAttributes() != self.oldComp.getSignificantAttributes():
+            self.getLogger().debug("vital metadata changed, force repackage: %s", self.newComp.getName())
+            self.newComp.setPackageAllFlag(True)
+            return
+        # resort to package by package
+        else:
+            self.newComp.setPackageAllFlag(False)
+            self.precLess, self.precMore, self.precSame, self.precDifferent = self.newComp.diff(self.oldComp)
+            # different packages repackaged only if a precondition has changed
+            for d in self.precDifferent:
+                self.getLogger().debug("check changed package for preconditions: %s"%d)
+                self.__resolvePreconditions(d)
+            for p in self.precSame:
+                self.newComp.getPackage(p).setPackageFlag(False)
+
+    def __resolvePreconditions(self, package):
+        '''
+        A precondition is another package. A package may have preconditions,
+        indicating that it may not have to be rewritten even though it has
+        changed - only if any precondition has changed.
+
+        Assign to self.precSame those packages that have preconditions and none
+        of the packages listed as preconditions has changed. Preconditions can
+        also have preconditions, and the relationship can be circular.
+
+        @param package: The name of a package
+        '''
+        if package in self.precStarted:
+            self.getLogger().debug("Preconditions already started: %s", package)
+            return
+        else:
+            self.precStarted.append(package)
+        if self.newComp.getPackage(package).preconditions:
+            for p in self.newComp.getPackage(package).preconditions:
+                self.getLogger().debug("Prec %s: %s", package, p)
+                self.__resolvePreconditions(p)
+            preconditionsWithChanges = [pre for pre in self.newComp.getPackage(package).preconditions if pre not in self.precSame]
+            if not preconditionsWithChanges:
+                self.getLogger().debug("%s: preconditions not changed, tagging as unchanged", package)
+                self.precSame.append(package)
+                self.precDifferent = [d for d in self.precDifferent if d != package]
+
+    def __resolveApiVersions(self):
+        ''' Set or update file and package API versions. '''
+        for p in self.newComp.getPackages():
+            filesWithApi = [f for f in p.getFiles() if (isinstance(f, Executable) and f.api)]
+            if filesWithApi:
+                if self.oldComp and self.oldComp.getPackage(p.getName()):
+                    oldP = self.oldComp.getPackage(p.getName())
+                    if oldP.getApiVersion():
+                        oldMajor, oldMinor = oldP.getApiVersion().split(".")
+                    else:
+                        self.getLogger().warning("Previous version of %s has no API version", p.getName())
+                        oldMajor = "1"
+                        oldMinor = "0"
+                    majorChange = False
+                    minorChange = False
+                    for f in filesWithApi:
+                        oldF = oldP.getFile(f.getPath())
+                        if oldF and hasattr(oldF, "api"):
+                            try:
+                                fOldMajor, fOldMinor = oldF.api.version.split(".")
+                                assert (int(fOldMajor) > 0)
+                                assert (int(fOldMinor) >= 0)
+                            except AssertionError:
+                                self.getLogger().warning("Previous version of %s has bad API version", f.getPath())
+                                fOldMajor = "1"
+                                fOldMinor = "0"
+                            if f.api.isIdentical(oldF.api):
+                                continue
+                            elif f.api.provides(oldF.api):
+                                f.api.version = (fOldMajor + "." + str(int(fOldMinor) + 1))
+                                minorChange = True
+                                continue
+                            else:
+                                f.api.version = (str(int(fOldMinor) + 1) + ".0")
+                                majorChange = True
+                        else:
+                            self.getLogger().warning("Previous version of %s has no API version", f.getPath())
+                            # the first API version will be 1.0 by default
+                    if majorChange:
+                        p.setApiVersion(str(int(oldMajor) + 1) + ".0")
+                    elif minorChange:
+                        p.setApiVersion(oldMajor + "." + str(int(oldMinor) + 1))
+                    else:
+                        p.setApiVersion(oldMajor + "." + oldMinor)
+                else:
+                    if not p.getApiVersion():
+                        p.setApiVersion("1.0")
+
+    def __setPackageVersions(self):
+        if self.newComp.getSourcePackage():
+            p = self.newComp.getSourcePackage()
+            p.setIdentifier(self.newComp.getIdentifier())
+        for p in self.newComp.getPackages():
+            # Inherit version and build id from parent component
+            p.setIdentifier(self.newComp.getIdentifier())
+            if p.getPackageFlag():
+                p.setLastPackagedVersion(p)
+            else:
+                p.setLastPackagedVersion(self.oldComp.getPackage(p.getName()))
+
+    def __resolveDependencies(self):
+        '''
+        Each dependency is given a version and evaluator.
+
+         - If the required package is being packaged in this release, the
+           dependency is on this version.
+         - If a dependency is not packaged this time, get the version from
+           previous package metadata.
+
+        The evaluation operator is ">=" if a version is defined, but for now we
+        always get the version - we even raise an exception if the version has
+        not been set.
+        '''
+        eval = ">="
+        for p in self.newComp.getPackages():
+            # dep is a Dependency, currently only holds the name of a Package
+            # that package p depends on. See __getPackageInfo().
+            for dep in p.depends:
+                depPkg = self.newComp.getPackage(dep.package)
+                if depPkg.getPackageFlag():
+                    # It's being packaged in this release so we require the latest version
+                    dep.setIdentifier(depPkg.getIdentifier())
+                else:
+                    # Not packaged now, use old metadata to get version info
+                    if not self.oldComp:
+                        raise BuildHistoryError("%s: error while resolving dependencies: metadata for previous builds not available" % self.newComp.getName())
+                    depPkg = self.oldComp.getPackage(dep.package)
+                    dep.setIdentifier(depPkg.getLastPackagedVersion())
+                if (not dep.getVersion()) or (not dep.getBuild()):
+                    raise BuildHistoryError("%s: unable to resolve version of dependency: %s -> %s" %
+                        (self.newComp.getName(), p.getName(), dep.package))
+                dep.setEval(eval)
+
+class Rule(object):
+    """
+    A set of criteria used to create Deliverable or subclass objects.
+
+    When a Rule object is called it acts as a Deliverable factory:
+
+    r = Rule(criteria)
+    deliverable = r(path, baseDirectory)
+    """
+
+    def __init__(self, matchPath, type, matchUids=None, package="", variant=None, api=None, extras=None, optionalExtras=None):
+        '''
+        @param matchPath: Regex that matches the file name and path
+        @param matchUids: UIDs
+        @param type:
+        @param package:
+        @param variant:
+        @param api: Path to related .dso file
+        @param extras:
+        @param optionalExtras:
+        '''
+        self.matchPath = matchPath
+        self.pathRe = re.compile(matchPath)
+        self.matchUids = matchUids or {}
+        self.type = type
+        self.package = package
+        self.variant = variant
+        self.api = api
+        self.extras = extras or []
+        self.optionalExtras = optionalExtras or []
+
+    @staticmethod
+    def getLogger():
+        ''' Getting the logger each time makes the module picklable '''
+        return Logging.getLogger("pfw.componentbuilder.rule")
+
+    def __call__(self, path, baseDir="", keepGoing=True, absPath=None, apiMapping=None):
+        """
+        @param path: String
+        @param baseDir: String
+        @param keepGoing: Ignore DerivedFileError and ConfigError. FileError is never ignored.
+        @param absPath: baseDir and path joined (optional)
+        @return: isAMatch, files, extras
+        @rtype: Boolean, List((packageName, Deliverable)), List(String)
+        @raise DerivedFileError: A file derived from rules is missing, excluding optional extras.
+        @raise ConfigError: The rule specifies an unrecognized Deliverable type
+        @raise SymbianUtils.SymbianUtilsError: A SymbianUtils tool failed while trying to process a file
+        """
+        if not path:
+            raise ValueError("Cannot create a Deliverable from an empty path")
+        if absPath is None:
+            absPath = os.path.join(baseDir, path)
+        apiMapping = apiMapping or {}
+        isAMatch = False
+        files = [] # contains (packageName, Deliverable) tuples
+        extras = []
+        m = self.pathRe.match(path) # Path matches
+        u = True                    # UIDs match
+        if m:
+            for k, v in self.matchUids.items():
+                u = u and Evalid.getUid(k, absPath) == eval("0x" + str(v))
+        if m and u:
+            isAMatch = True
+            if not self.type == "ignore":
+                # replace variables in rules to obtain derived values
+                derivedPackage = self.package
+                derivedVariant = self.variant
+                apiPath = apiMapping.get(path) or apiMapping.get(os.path.basename(path))
+                if apiPath:
+                    derivedApi = apiPath
+                else:
+                    derivedApi = self.api           # path to relevant dso file
+                for i in range(1, len(m.groups())+1):
+                    if m.group(i) == None:
+                        continue
+                    derivedPackage = derivedPackage.replace("$%s"%i, m.group(i))
+                    if derivedVariant:
+                        derivedVariant = derivedVariant.replace("$%s"%i, m.group(i))
+                    if derivedApi:
+                        derivedApi = derivedApi.replace("$%s"%i, m.group(i))
+                checksum = Evalid.generateSignature(absPath, self.type)
+                theApi = None
+                if derivedApi:
+                    try:
+                        theApi = API(interface=Readelf.getInterface(os.path.join(baseDir, derivedApi)))
+                    except Exception, e:
+                        _e = "Failed to get API for %s. Rule: '%s', error: %s" % (path, self.matchPath, str(e))
+                        print _e
+                        if keepGoing:
+                            self.getLogger().warning(_e)
+                        elif _e.endswith("No such file"):
+                            # don't raise exception if missing file
+                            self.getLogger().warning(_e)
+                        else:
+                            # wrong filetype etc. still cause exception
+                            # preserve traceback while modifying message
+                            _args = list(e.args)
+                            _args[0] = _e
+                            e.args = tuple(_args)
+                            raise
+
+                if self.type in ("exe", "plugin", "dll"):
+                    files.append((derivedPackage, Executable(path, self.type, os.path.getsize(absPath), checksum, derivedVariant, theApi)))
+                elif self.type in ("staticlib", "dso"):
+                    files.append((derivedPackage, Library(path, self.type, os.path.getsize(absPath), checksum, derivedVariant)))
+                elif self.type in ("file", "preprocessed_text"):
+                    files.append((derivedPackage, Deliverable(path, self.type, os.path.getsize(absPath), checksum)))
+                elif self.type in ("intel_pe", "intel"):
+                    files.append((derivedPackage, PEBinary(path, self.type, os.path.getsize(absPath), checksum)))
+                else:
+                    if keepGoing:
+                        self.getLogger().error("Unrecognized deliverable type '%s' in rule '%s', adding as Deliverable", self.type, self.matchPath)
+                        files.append((derivedPackage, Deliverable(path, self.type, os.path.getsize(absPath), checksum)))
+                    else:
+                        raise ConfigError("Unrecognized deliverable type '%s' in rule '%s'" % (self.type, self.matchPath))
+
+                for extra in self.extras:
+                    for i in range(1, len(m.groups())+1):
+                        if m.group(i) == None:
+                            continue
+                        extra = extra.replace("$%s"%i, m.group(i))
+                    if os.path.isfile(os.path.join(baseDir, extra)):
+                        extras.append(extra)
+                    else:
+                        _e = "Missing required file %s (derived from %s) in rule '%s'" % (extra, path, self.matchPath)
+                        if keepGoing:
+                            self.getLogger().warning(_e)
+                        else:
+                            raise DerivedFileError(_e)
+
+                for extra in self.optionalExtras:
+                    for i in range(len(m.groups())+1):
+                        if m.group(i) == None:
+                            continue
+                        extra = extra.replace("$%s" % i, m.group(i))
+                    if os.path.isfile(os.path.join(baseDir, extra)):
+                        extras.append(extra)
+                    else:
+                        self.getLogger().debug("Missing optional file %s (derived from %s)", extra, path)
+        return isAMatch, files, extras
+
+    def __str__(self):
+        thestr = """
+        path: %s
+        uid: %s
+        package: %s
+        """ % (self.matchPath, self.matchUids, self.package)
+        return thestr
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DataSources/LinkInfoToBuildData.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,261 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Read generic link info XML generated by SbsLinkInfoReader, create BuildData.
+#
+
+''' Read generic link info XML generated by SbsLinkInfoReader, create BuildData. '''
+
+import xml.sax
+import os
+import sys
+
+from Blocks.Packaging.BuildData import PlainBuildData, BdFile
+from Blocks.Packaging.Logging import Logging
+
+class LinkInfo(object):
+    '''
+    Created from LinkInfo XML output. Used to add dependencies to a buildData
+    instance.
+
+    Usage::
+        lf = LinkInfo(xml)
+        lf.addDependencies(myBuildData)
+
+    Doctest::
+        >>> import StringIO
+        >>> xml = StringIO.StringIO("""<linkdata epocroot="/data/dragonfly/workspaces/1733/">
+        ...     <inf path="/data/dragonfly/workspaces/1733/src/common/generic/graphics/egl/group/bld.inf" ddir="foo/bar">
+        ...       <target path="epoc32/release/armv5/udeb/libegl.dll" component="EGL Implementation" platform="armv5" dso="">
+        ...         <libraries>
+        ...           <lib>epoc32/release/armv5/lib/dfpaeabi.dso</lib>
+        ...           <lib>epoc32/release/armv5/lib/dfprvct2_2.dso</lib>
+        ...           <lib>epoc32/release/armv5/lib/drtrvct2_2.dso</lib>
+        ...         </libraries>
+        ...       </target>
+        ...     </inf>
+        ... </linkdata>""")
+        >>> lf = LinkInfo(xml)
+        >>> data = PlainBuildData()
+        >>> data.addTargetFiles(['epoc32/release/armv5/udeb/libegl.dll'])
+        >>> data = lf.addDependencies(data)
+        >>> assert len(data.getDependencies()[0].getOwnerDependencies()) == 3
+        >>> assert len(data.getDependencies()[0].getSourceDependencies()) == 3
+        >>> assert data.dependencyData["DotDeeDependencyProcessor"] == {"ddirs": ["foo/bar"]}
+    '''
+
+    def __init__(self, linkInfoXml):
+        '''
+        @param linkInfoXml: Link info XML
+        @type linkInfoXml: Filename or file-like object
+        '''
+        self.reader = LinkInfoXmlReader()
+        self.dependencies = self.reader.getBuildData(linkInfoXml)
+
+    def getNonstandardInterfaces(self):
+        '''
+        Get nonstandard interfaces extracted from link info
+
+        @return: Mapping of binary to correct api file (.dso)
+        @rtype: Dictionary(String)
+        '''
+        apiMapping = {}
+        for bd in self.dependencies.itervalues():
+            apiMapping.update(bd.apiMapping)
+        return apiMapping
+
+    def addDependencies(self, buildData, bldInfPath=None, newTarget=None):
+        '''
+        Add dependencies to buildData. If bldInfPath is not given, the entire
+        LinkInfo is searched for matching files and newTarget is ignored.
+
+        Also adds dependencyData["DotDeeDependencyProcessor"]["ddirs"]
+
+        @param buildData: A BuildData object to add dependencies to
+        @type buildData: BuildData
+        @param bldInfPath: Optional absolute path to bldInf if it is known. Speeds up the operation.
+        @type bldInfPath: String
+        @param newTarget: What to do with new targets from the link info, one of "add", "log" or anything else to ignore
+        @type newTarget: String or None
+        '''
+        if bldInfPath:
+            bldInfPath = os.path.normpath(bldInfPath)
+            if bldInfPath in self.dependencies:
+                return self._addDeps(buildData, bldInfPath, newTarget)
+        else:
+            for bldInfPath in self.dependencies:
+                self._addDeps(buildData, bldInfPath, newTarget=None)
+
+    def _addDeps(self, buildData, bldInfPath, newTarget=None):
+        '''
+        Add dependencies to buildData.
+
+        @param buildData: A BuildData object to add dependencies to
+        @type buildData: BuildData
+        @param bldInfPath: absolute path to bldInf, speeds up the operation
+        @type bldInfPath: String
+        @param newTarget: What to do with new targets from the link info, one of "add", "log" or or anything else to ignore
+        @type newTarget: String or None
+        '''
+        for bdFile in self.dependencies[bldInfPath].getDependencies():
+            if bdFile.getPath() in buildData.getTargetFiles():
+                # no dependency data above, only paths - OK to overwrite
+                buildData.addDeliverable(bdFile)
+            elif newTarget == "add":
+                buildData.addDeliverable(bdFile)
+            elif newTarget == "log":
+                Logging.getLogger("pfw.datasources.linkinfo").warning(
+                    "Link data from %s target %s does not exist in %s", bldInfPath, bdFile.getPath(), buildData.getComponentName())
+
+            # Don't overwrite existing dependencyData, add any missing bits
+            for d in self.dependencies[bldInfPath].dependencyData:
+                if not buildData.dependencyData.get(d):
+                    buildData.dependencyData[d] = self.dependencies[bldInfPath].dependencyData[d]
+                else:
+                    for k, v in self.dependencies[bldInfPath].dependencyData[d].items():
+                        if not buildData.dependencyData.get(d).get(k):
+                            buildData.dependencyData[d][k] = v
+
+class LinkInfoXmlReader(xml.sax.ContentHandler):
+    '''
+    Process linkdata:
+        - inf -> BuildData
+        - target path -> BdFile
+        - libraries -> indirectDependencies
+
+    Sample linkdata::
+        <linkdata epocroot="/data/dragonfly/workspaces/1733/">
+            <inf path="/data/dragonfly/workspaces/1733/src/common/generic/graphics/egl/group/bld.inf" ddir="foo/bar">
+              <target path="epoc32/release/armv5/udeb/libegl.dll" component="EGL Implementation" platform="armv5" dso="">
+                <libraries>
+                  <lib>epoc32/release/armv5/lib/euser.dso</lib>
+                  <lib>epoc32/release/armv5/lib/estlib.dso</lib>
+                  <lib>epoc32/release/armv5/lib/ws32.dso</lib>
+                  <lib>epoc32/release/armv5/lib/cone.dso</lib>
+                  <lib>epoc32/release/armv5/lib/bitgdi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/fbscli.dso</lib>
+                  <lib>epoc32/release/armv5/lib/hal.dso</lib>
+                  <lib>epoc32/release/armv5/lib/gdi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/palette.dso</lib>
+                  <lib>epoc32/release/armv5/lib/drtaeabi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/dfpaeabi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/dfprvct2_2.dso</lib>
+                  <lib>epoc32/release/armv5/lib/drtrvct2_2.dso</lib>
+                </libraries>
+              </target>
+              <target path="epoc32/release/armv5/urel/libegl.dll" component="EGL Implementation" platform="armv5" dso="">
+                <libraries>
+                  <lib>epoc32/release/armv5/lib/euser.dso</lib>
+                  <lib>epoc32/release/armv5/lib/estlib.dso</lib>
+                  <lib>epoc32/release/armv5/lib/ws32.dso</lib>
+                  <lib>epoc32/release/armv5/lib/cone.dso</lib>
+                  <lib>epoc32/release/armv5/lib/bitgdi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/fbscli.dso</lib>
+                  <lib>epoc32/release/armv5/lib/hal.dso</lib>
+                  <lib>epoc32/release/armv5/lib/gdi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/palette.dso</lib>
+                  <lib>epoc32/release/armv5/lib/drtaeabi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/dfpaeabi.dso</lib>
+                  <lib>epoc32/release/armv5/lib/dfprvct2_2.dso</lib>
+                  <lib>epoc32/release/armv5/lib/drtrvct2_2.dso</lib>
+                </libraries>
+              </target>
+            </inf>
+        </linkdata>
+    '''
+    def __init__(self):
+        xml.sax.ContentHandler.__init__(self)
+        self.logger = Logging.getLogger("pfw.datasources.linkinforeader")
+
+    @staticmethod
+    def getBuildData(source):
+        linkReader = LinkInfoXmlReader()
+        p = xml.sax.make_parser()
+        p.setContentHandler(linkReader)
+        if hasattr(source, "read"):
+            file = source
+        else:
+            file = open(source)
+        while True:
+            data = file.read(1000000)
+            if not data:
+                break
+            p.feed(data)
+        p.close()
+        return linkReader.infAndBuildData
+
+    def characters(self, data):
+        self.cdata.append(data)
+
+    def startDocument(self):
+        self.infAndBuildData = {}
+        self.epocroot = None
+        # currently handled item
+        self.cdString = ""
+        self.buildData = None
+        self.inf = None
+        self.bdFile = None
+
+    def startElement(self, tag, attributes):
+        self.cdata = []
+        self.cdString = ""
+        if tag == "linkdata":
+            self.epocroot = attributes.get("epocroot")
+        elif tag == "inf":
+            self.inf = os.path.normpath(attributes.get("path"))
+            self.buildData = PlainBuildData()
+            self.buildData.setTargetRoot(self.epocroot)
+            self.buildData.dependencyData["DotDeeDependencyProcessor"] = {"ddirs": [attributes.get("ddir")]}
+        elif tag == "target":
+            path = attributes.get("path")
+            self.bdFile = BdFile(path)
+            self.bdFile.variantPlatform = attributes.get("platform")
+            dso = attributes.get("dso")
+            if dso:
+                self.buildData.addNonstandardInterfaces({path: dso})
+            if attributes.get("component"):
+                # Not used for component naming but store it anyway.
+                if self.buildData.getComponentName():
+                    if self.buildData.getComponentName() != attributes.get("component"):
+                        # Multiple components from same INF possible?
+                        # Hmm ... got to watch for this if we ever use component names from this output
+                        self.logger.info("Targets from same INF %s have different component names: %s vs. %s",
+                            self.inf, self.buildData.getComponentName(), attributes.get("component"))
+                else:
+                    self.buildData.setComponentName(attributes.get("component"))
+
+    def endElement(self, tag):
+        self.cdString = "".join(self.cdata).strip()
+        if tag == "inf":
+            if self.inf not in self.infAndBuildData:
+                self.infAndBuildData[self.inf] = []
+            self.infAndBuildData[self.inf] = self.buildData
+            self.buildData = None
+        elif tag == "target":
+            self.buildData.addDeliverable(self.bdFile)
+            self.bdFile = None
+        elif tag == "lib":
+            # dependency on epoc32/release/armv5/lib/drtrvct2_2.dso
+            # -> source of bdFile depends on owner of drtrvct2_2.dso
+            self.bdFile.addSourceDependency(self.cdString)
+            # -> owner of bdFile depends on epoc32/release/armv5/urel|udeb/drtrvct2_2.dll
+            dep = os.path.join("epoc32/release/",
+                    self.bdFile.getVariantPlatform(),
+                    self.bdFile.getVariantType(),
+                    os.path.basename(self.cdString).split(".dso")[0] + ".dll")
+            self.bdFile.addOwnerDependency(dep)
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod(sys.modules[__name__])
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DataSources/WhatLog.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,405 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Read build log output using two alternative methods
+#
+
+'''
+Read build log output using two alternative methods::
+    - WhatLogReader reads current SBS (2.4.2) output
+    - GenericLogReader reads a more generic format converted from SBS output
+    using XSL.
+
+The collected information is::
+    - INF file path
+    - Target files generated
+    - Target file types (resource|build|bitmap|member)
+
+    A member is a file stored in an archive. The parent archive can be obtained
+    via WhatLogReader.getFullInfo()
+
+Examples are included at the end in the command line tool.
+'''
+
+import os
+import sys
+import xml.sax
+
+from Blocks.Packaging.BuildData import PlainBuildData
+from Blocks.Packaging.Logging import Logging
+
+class WhatLogReader(xml.sax.ContentHandler):
+    '''
+    Read output from makefiles created using::
+
+        sbs -c <foo>.whatlog
+
+    The build log (namespace http://symbian.com/xml/build/log) with interesting
+    parts italicized::
+
+        <buildinfo>
+            <whatlog bldinf='I{/path/to/src/path/to/group/bld.inf}' mmp='' config='foo.whatlog'>
+                <export destination='I{/path/to/epoc32/foo/bar}' source='/path/to/src/path/to/baz'/>
+            </whatlog>
+
+            <whatlog bldinf='I{/path/to/src/path/to/group/bld.inf}' mmp='' config='foo.whatlog'>
+                <archive zipfile='/path/to/src/path/to/foo.zip'>
+                    <member>I{/path/to/epoc32/foo/bar}</member>
+                </archive>
+            </whatlog>
+
+            <whatlog bldinf='I{/path/to/src/path/to/group/bld.inf}' mmp='' config='foo.whatlog'>
+                <resource>I{foo/bar}</resource>
+            </whatlog>
+
+            <whatlog bldinf='I{/path/to/src/path/to/group/bld.inf}' mmp='' config='foo.whatlog'>
+                <build>I{foo/bar}</build>
+            </whatlog>
+
+            <whatlog bldinf=I{/path/to/src/path/to/group/bld.inf}' mmp='' config='foo.whatlog'>
+                <bitmap>I{foo/bar}</bitmap>
+            </whatlog>
+        </buildinfo>
+
+    createBuildData() outputs PlainBuildData objects from a set of INF paths, while
+    raw data can be obtained using getFullInfo() and getFilePaths().
+    '''
+    def __init__(self):
+        self.cdata = []
+        self.cdString = ""
+        self.infs = {}
+        self.currentInfPath = ""
+        self.currentArchive = {}
+        self.logger = Logging.getLogger("pfw.datasources.whatlog")
+
+    def characters(self, data):
+        self.cdata.append(data)
+
+    def startElement(self, tag, attributes):
+        self.cdata = []
+        if tag == "whatlog":
+            self.currentInfPath = os.path.abspath(attributes.get("bldinf"))
+            if self.currentInfPath not in self.infs:
+                self.infs[self.currentInfPath] = []
+        elif tag == "export":
+            self.infs[self.currentInfPath].append({'type': 'file', 'path': attributes.get('destination'), 'fileType': tag})
+        elif tag == "archive":
+            self.currentArchive = {'type': 'archive', 'path': attributes.get("zipfile"), 'members': []}
+
+    def endElement(self, tag):
+        self.cdString = "".join(self.cdata).strip()
+        if tag == "archive":
+            self.infs[self.currentInfPath].append(self.currentArchive)
+            self.currentArchive = {}
+        elif tag == "member":
+            self.currentArchive['members'].append(self.cdString)
+        elif tag in ("resource", "build", "bitmap"):
+            self.infs[self.currentInfPath].append({'type': 'file', 'path': self.cdString, 'fileType': tag})
+
+    def getInfs(self):
+        return self.infs.keys()
+
+    def getFullInfo(self, inf):
+        if inf not in self.infs:
+            return
+        for entry in self.infs[inf]:
+            if entry["type"] == "file":
+                yield entry
+            elif entry["type"] == "archive":
+                for path in entry["members"]:
+                    yield {'type': 'file', 'path': path, 'fileType': "member", 'archive': entry["path"]}
+
+    def getFilePaths(self, inf):
+        if inf not in self.infs:
+            self.logger.warning("No such inf %s"%inf)
+            return
+        for entry in self.infs[inf]:
+            if entry["type"] == "file":
+                yield entry["path"]
+            elif entry["type"] == "archive":
+                for path in entry["members"]:
+                    yield path
+
+    def createBuildData(self, infs, name=None, version=None, epocRoot=None):
+        '''
+        Create a PlainBuildData object from one or more infs.
+
+        @param infs: Absolute paths to INF files
+        @type infs: List
+        @param name: Component name
+        @type name: String
+        @param version: Component version
+        @type version: String
+        @param epocRoot: Absolute path to epocroot. If not specified it will be
+        deduced from the paths.
+        @type epocRoot: String
+        '''
+        if not infs:
+            raise ValueError, "Require at least one inf"
+        bd = PlainBuildData()
+        if epocRoot:
+            bd.targetRoot = epocRoot
+        if name:
+            bd.componentName = name
+        if version:
+            bd.componentVersion = version
+        for inf in infs:
+            for absPath in self.getFilePaths(inf):
+                absPath = os.path.abspath(absPath)
+                path = ""
+                if epocRoot and absPath.startswith(epocRoot):
+                    path = absPath[len(epocRoot):].lstrip(os.sep)
+                else:
+                    if "epoc32" not in absPath:
+                        self.logger.warning("Ignoring file - unable to deduce epocRoot - 'epoc32' not in file path '%s'." % absPath)
+                        continue
+                    epocRoot = absPath[:absPath.index("epoc32")]
+                    path = absPath[absPath.index("epoc32"):]
+                bd.addTargetFiles([path])
+        return bd
+
+    @staticmethod
+    def getBuildData(source, epocRoot=None):
+        '''
+        Create a dictionary of BuildData objects with INFs as keys, one
+        BuildData per INF.
+
+        @param source: A file path or file-like object containing generic
+        whatlog data.
+        @type source: String or file-like object
+        @return: Dictionary with INF:BuildData pairs
+        '''
+        reader = WhatLogReader()
+        p = xml.sax.make_parser()
+        p.setContentHandler(reader)
+        if hasattr(source, "read"):
+            file = source
+        else:
+            file = open(source)
+        while True:
+            data = file.read(1000000)
+            if not data:
+                break
+            p.feed(data)
+        p.close()
+        infsAndBuildData = {}
+        for inf in reader.getInfs():
+            infsAndBuildData[inf] = reader.createBuildData(infs=[inf], epocRoot=epocRoot)
+        return infsAndBuildData
+
+class GenericLogReader(xml.sax.ContentHandler):
+    '''
+    Read output from whatlogs transformed using buildLogToGeneric.xsl.
+
+    The format is::
+
+        <components>
+          <component bldinf="/path/to/src/path/to/bld.inf" config="tools2_deb">
+              <file type="export">/path/to/epoc32/foo/bar</file>
+          </component>
+        </components>
+
+    Where I{type} is xpath:whatlog/node() name and I{config} is xpath:whatlog@config.
+
+    createBuildData() outputs PlainBuildData objects from a set of INF paths, while
+    raw data can be obtained using getFullInfo() and getFilePaths().
+    '''
+
+    defaultStyleSheet = os.path.join(os.path.dirname(__file__), "buildLogToGeneric.xsl")
+
+    def __init__(self):
+        self.cdata = []
+        self.cdString = ""
+        self.logger = Logging.getLogger("pfw.datasources.genericlog")
+        self.components = {}
+        self.bldinf = ""
+        self.config = ""
+        self.type = ""
+
+    def characters(self, data):
+        self.cdata.append(data)
+
+    def startElement(self, tag, attributes):
+        self.cdata = []
+        if tag == "component":
+            self.bldinf = os.path.abspath(attributes.get("bldinf"))
+            self.config = attributes.get("config")
+            if self.bldinf not in self.components:
+                self.components[self.bldinf] = []
+        elif tag == "file":
+            self.type = attributes.get("type")
+
+    def endElement(self, tag):
+        self.cdString = "".join(self.cdata).strip()
+        if tag == "component":
+            self.bldinf = ""
+            self.config = ""
+        elif tag == "file":
+            self.components[self.bldinf].append({'type': self.type, 'path': self.cdString})
+
+    def getInfs(self):
+        return self.components.keys()
+
+    def getFullInfo(self, inf):
+        return self.components.get(inf)
+
+    def getFilePaths(self, inf):
+        if inf not in self.components:
+            self.logger.warning("No such inf %s"%inf)
+            return
+        return [ f["path"] for f in self.components[inf] ]
+
+    def createBuildData(self, infs, name=None, version=None, epocRoot=None):
+        '''
+        Create a PlainBuildData object from one or more infs.
+
+        @param infs: Absolute paths to INF files
+        @type infs: List
+        @param name: Component name
+        @type name: String
+        @param version: Component version
+        @type version: String
+        @param epocRoot: Absolute path to epocroot. If not specified it will be
+        deduced from the paths.
+        @type epocRoot: String
+        '''
+        if not infs:
+            raise ValueError, "Require at least one inf"
+        bd = PlainBuildData()
+        if epocRoot:
+            bd.targetRoot = epocRoot
+        if name:
+            bd.componentName = name
+        if version:
+            bd.componentVersion = version
+        for inf in infs:
+            for absPath in self.getFilePaths(inf):
+                absPath = os.path.abspath(absPath)
+                if epocRoot and absPath.startswith(epocRoot):
+                    path = absPath[len(epocRoot):].lstrip(os.sep)
+                else:
+                    if "epoc32" not in absPath:
+                        self.logger.warning("Ignoring file - unable to deduce epocRoot - 'epoc32' not in file path '%s'." % absPath)
+                        continue
+                    epocRoot = absPath[:absPath.index("epoc32")]
+                    path = absPath[absPath.index("epoc32"):]
+                bd.addTargetFiles([path])
+        return bd
+
+    @staticmethod
+    def getBuildData(source):
+        '''
+        Create a dictionary of BuildData objects with INFs as keys, one
+        BuildData per INF.
+
+        @param source: A file path or file-like object containing generic
+        whatlog data.
+        @type source: String or file-like object
+        '''
+        reader = GenericLogReader()
+        p = xml.sax.make_parser()
+        p.setContentHandler(reader)
+        if hasattr(source, "read"):
+            file = source
+        else:
+            file = open(source)
+        while True:
+            data = file.read(1000000)
+            if not data:
+                break
+            p.feed(data)
+        p.close()
+        infsAndBuildData = {}
+        for inf in reader.getInfs():
+            infsAndBuildData[inf] = reader.createBuildData(infs=[inf])
+        return infsAndBuildData
+
+if __name__ == "__main__":
+    #
+    # An example of how to use WhatLog.
+    #
+    # - Parse Symbian build log directly using WhatLogReader:
+    #
+    #   WhatLog.py [-p|--print] logfile [inf inf2 ...]
+    #
+    # - Parse generic build log using GenericLogReader:
+    #
+    #   WhatLog.py -g|--generic [-p|--print] logfile [inf inf2 ...]
+    #
+    # - Convert Symbian build log using XSLT then parse using GenericLogReader
+    #
+    #   WhatLog.py -s|--stylesheet stylesheet.xsl [-p|--print] logfile [inf inf2 ...]
+    #
+    import logging
+    from getopt import getopt
+    from Ft.Xml.Xslt import Transform
+    from Blocks.Packaging.BuildData import BuildData
+
+    opts, leftovers = getopt(sys.argv[1:], "hvs:pg", ["stylesheet=", "print", "generic"])
+
+    stylesheet = None
+    isGeneric = False
+    printInfs = False
+    loglevel = logging.WARNING
+
+    for k, v in opts:
+        if k == "-v":
+            loglevel = logging.DEBUG
+
+    logging.basicConfig(level=loglevel)
+
+    for k, v in opts:
+        if k in ("-s", "--stylesheet"):
+            stylesheet = v
+        elif k in ("-p", "--print"):
+            printInfs = True
+        elif k in ("-g", "--generic"):
+            isGeneric = True
+
+    if leftovers:
+        p = xml.sax.make_parser()
+        if stylesheet:
+            logging.debug("Using stylesheet %s", stylesheet)
+            i = GenericLogReader()
+            p.setContentHandler(i)
+            p.feed(Transform(leftovers[0], stylesheet))
+        else:
+            if isGeneric:
+                i = GenericLogReader()
+                logging.debug("Reading generic XML")
+            else:
+                i = WhatLogReader()
+                logging.debug("Reading Symbian XML")
+            p.setContentHandler(i)
+            f = open(leftovers[0])
+            while True:
+                data = f.read(1000000)
+                if not data:
+                    break
+                p.feed(data)
+        p.close()
+
+        infs = i.getInfs()
+        infs.sort()
+        if printInfs:
+            for inf in infs:
+                print "%s" % inf
+                for path in i.getFilePaths(inf):
+                    print "\t", path
+
+        # create a BuildData object from a list of INF paths
+        if len(leftovers) > 1:
+            bd = i.createBuildData("testcomponent", "1", leftovers[1:], "foo")
+            assert isinstance(bd, BuildData), "not a build data object"
+            assert bd.getComponentVersion() == "1", "bad version"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DataSources/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,17 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Data sources
+#
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DataSources/buildLogToGeneric.xsl	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,84 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Style sheet for converting raptor whatlog into a generic log format
+
+-->
+
+<xsl:stylesheet version="1.0" 
+	xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+	xmlns:s="http://symbian.com/xml/build/log">
+  <xsl:template match="/">
+	<components>
+	<xsl:for-each select="s:buildlog/s:whatlog">
+		<xsl:text> 
+    	</xsl:text>
+    	<xsl:element name="component">
+    		<xsl:attribute name="bldinf"><xsl:value-of select="@bldinf"/></xsl:attribute>
+    		<xsl:attribute name="config"><xsl:value-of select="substring-before(@config,'.')"/></xsl:attribute>
+    	  		<xsl:for-each select="s:resource">
+					<xsl:text> 
+	    			</xsl:text>
+	    			<xsl:element name="file">
+	    				<xsl:attribute name="type">resource</xsl:attribute>
+	    				<xsl:value-of select="."/>
+	    			</xsl:element>
+    			</xsl:for-each>
+    			
+				<xsl:for-each select="s:bitmap">
+					<xsl:text> 
+	    			</xsl:text>
+	    			<xsl:element name="file">
+	    				<xsl:attribute name="type">bitmap</xsl:attribute>
+	    				<xsl:value-of select="."/>
+	    			</xsl:element>
+    			</xsl:for-each>
+
+    	  		<xsl:for-each select="s:build | s:resource | s:bitmap">
+					<xsl:text> 
+	    			</xsl:text>
+	    			<xsl:element name="file">
+	    				<xsl:attribute name="type">build</xsl:attribute>
+	    				<xsl:value-of select="."/>
+	    			</xsl:element>
+    			</xsl:for-each>
+
+    	  		<xsl:for-each select="s:export">
+					<xsl:text> 
+	    			</xsl:text>
+	    			<xsl:element name="file">
+	    				<xsl:attribute name="type">export</xsl:attribute>
+	    				<xsl:value-of select="@destination"/>
+	    			</xsl:element>
+	    		</xsl:for-each>
+
+    	  		<xsl:for-each select="s:archive">
+					<xsl:text> 
+	    			</xsl:text>
+	    			<xsl:for-each select="s:member">
+						<xsl:text> 
+		    			</xsl:text>
+		    			<xsl:element name="file">
+		    				<xsl:attribute name="type">member</xsl:attribute>
+		    				<xsl:value-of select="."/>
+		    			</xsl:element>
+	    			</xsl:for-each>
+	    		</xsl:for-each>
+	    	</xsl:element>
+    </xsl:for-each>
+    </components>
+  </xsl:template>
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/DefaultProcessors.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,184 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Default processors
+#
+
+import os
+import re
+import xml.sax
+
+from Blocks.Packaging.PackageWriter import Dependency
+from Blocks.Packaging.Logging import Logging
+
+class BuildDataDependencyProcessor(object):
+    '''
+    Call this class with a Component as argument to add dependencies from a
+    BuildData object. Normally this is the same data that was used to make the
+    component in the first place.
+
+    Indirect dependencies are handled here, by applying rules to source package
+    dependencies.
+    '''
+
+    def __init__(self, indirectDependencyRules):
+        '''
+        Reads dependency data from buildData and applies it to the component's
+        member packages. BuildData is expected to yield file->file dependency
+        data when the buildData.getDependencies() is called.
+
+        If a configuration is specified, indirect dependency rules are applied
+        to the component. The configuration can be passed as one of:
+            - The path to the configuration file
+            - A file-like object from which the configuration can be read
+            - A List containing a previously parsed configuration
+
+        @param indirectDependencyRules: Configuration
+        @type indirectDependencyRules: None, String, file-like object or List
+        '''
+        self.globalFileMap = None
+        if indirectDependencyRules:
+            if isinstance(indirectDependencyRules, list):
+                # previously parsed ruleset
+                self.indirectDependencyRules = indirectDependencyRules
+            else:
+                # string containing path
+                self.indirectDependencyRules = IndirectDependencyRules(indirectDependencyRules).getRules()
+        else:
+            self.indirectDependencyRules = None
+        self.logger = Logging.getLogger("pfw.processors.builddata")
+
+    def __call__(self, component, buildData, globalFileMap):
+        self.globalFileMap = globalFileMap
+        if component.getName() != buildData.componentName:
+            raise ValueError("Bad dependency info for '%s'. BuildData is for another component '%s'." %
+                (component.getName(), buildData.componentName))
+
+        for dble in buildData.getDependencies():
+            # dble is a file belonging to this component (BuildData.BdFile)
+            # with dependencies on other files.
+            ownerPackageName = self.globalFileMap.getPackageName(dble.getPath())
+            if not ownerPackageName:
+                self.logger.warning("Dependency data includes file '%s', not in filemap.", dble.getPath())
+                continue
+            ownerPackage = component.getPackage(ownerPackageName)
+            if not ownerPackage:
+                self.logger.warning("Dependency data includes file '%s', not in the component '%s'", dble.getPath(), component.getName())
+                continue
+
+            for dependency in dble.getOwnerDependencies():
+                self.fileDep(ownerPackage, dependency)
+
+            for dependency in dble.getSourceDependencies():
+                if component.getSourcePackage():
+                    self.fileDep(component.getSourcePackage(), dependency)
+                else:
+                    self.logger.warning("Cannot add dependency to %s in non-existent source package (%s)", dependency, component.getName())
+
+            if self.indirectDependencyRules:
+                for library in dble.getSourceDependencies():
+                    library = os.path.basename(library)    # rules only use filename
+                    for matchExp, pkgExp, fileExp in self.indirectDependencyRules:
+                        if matchExp.match(library):
+                            for exp, depType in fileExp:
+                                if dble.variantPlatform:
+                                    exp = exp.replace("$(VARIANTPLATFORM)", dble.getVariantPlatform())
+                                if dble.variantType:
+                                    exp = exp.replace("$(VARIANTTYPE)", dble.getVariantType())
+                                self.fileDep(ownerPackage, exp, depType)
+                            for exp, depType in pkgExp:
+                                if dble.variantPlatform:
+                                    exp = exp.replace("$(VARIANTPLATFORM)", dble.getVariantPlatform())
+                                if dble.variantType:
+                                    exp = exp.replace("$(VARIANTTYPE)", dble.getVariantType())
+                                if ownerPackage.addDependency(Dependency(exp, type=depType)):
+                                    self.logger.debug("Indirect dependency (file): %s now depends on %s",
+                                        ownerPackage.getName(), Dependency(exp, type=depType))
+
+    def fileDep(self, package, fileName, depType="strong"):
+        depName = self.globalFileMap.getPackageName(fileName)
+        if depName:
+            if package.addDependency(Dependency(depName, type=depType)):
+                self.logger.debug("%s now depends on %s", package.getName(), depName)
+        else:
+            self.logger.warning("Unable to find package owning dependency '%s'", fileName)
+
+class IndirectDependencyRules(object):
+    ''' Indirect dependency rules '''
+    class ConfigReader(xml.sax.ContentHandler):
+        '''
+        Read the configuration and store the results in self.rules, which is a
+        list of tuples: ("<match>", [<pkgrules>], [<filerules>]).
+
+        Each <pkgrule> and <filerule> is composed of a ("strong|weak", "path")
+        tuple. The path may contain variables, currently supported:
+            - $(VARIANTPLATFORM)
+            - $(VARIANTTYPE)
+        '''
+        def __init__(self):
+            self.rules = []
+            self.matchExp = ""
+            self.packageExp = []
+            self.fileExp = []
+            self.cdata = []
+            self.cdString = ""
+            self.depType = None
+
+        def characters(self, data):
+            self.cdata.append(data)
+
+        def startElement(self, tag, attributes):
+            self.cdata = []
+            if tag == "depends":
+                self.depType = attributes.get("type", "weak").encode("ascii")
+
+        def endElement(self, tag):
+            self.cdString = "".join(self.cdata).strip()
+            self.cdata = []
+            if tag == "library":
+                self.matchExp = re.compile(self.cdString)
+            elif tag == "file":
+                self.fileExp.append((self.cdString, self.depType))
+            elif tag == "package":
+                self.packageExp.append((self.cdString, self.depType))
+            elif tag == "dep":
+                self.rules.append((self.matchExp, self.packageExp, self.fileExp))
+                self.matchExp = ""
+                self.packageExp = []
+                self.fileExp = []
+
+    def __init__(self, configuration):
+        '''
+
+        @param configuration: Configuration for indirect dependencies
+        @type configuration: Path to file or file-like object
+        '''
+        p = xml.sax.make_parser()
+        i = IndirectDependencyRules.ConfigReader()
+        p.setContentHandler(i)
+        if hasattr(configuration, "read"):
+            f = configuration
+            f.seek(0)
+        else:
+            f = open(configuration)
+        while True:
+            data = f.read()
+            if not data:
+                break
+            p.feed(data)
+        p.close()
+        self.rules = i.rules
+
+    def getRules(self):
+        return self.rules
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/RaptorDependencyProcessor.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,394 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Raptor dependency processor
+#
+
+import re
+import os
+import xml.sax
+import xml.dom.minidom as dom
+
+from Blocks.Packaging.ComponentBuilder import ComponentBuilder
+from Blocks.Packaging.PackageWriter import Dependency
+from Blocks.Packaging.Rules import Rules
+from Blocks.Packaging.Logging import Logging
+
+class DotDeeDependencyProcessor(object):
+    '''
+    Get dependency info from ".d" dependency files found under the build
+    directory. Each .d file is read for a list of files used during the build.
+    The owner package of each of these is added as a dependency to the component
+    source package.
+
+    When calling dotDeeDependencyProcessor(component, buildData, globalFileMap)
+    the relative directories under which dotDee files can be found must be
+    listed in buildData.dependencyData["DotDeeDependencyProcessor"]["ddirs"] and
+    buildData.dependencyData["DotDeeDependencyProcessor"]["buildDir"] overrides
+    the default path to the build directory (epoc32/build).
+
+    If additional dependency rules are used, the processor needs to replicate
+    ComponentBuilder name resolution, which requires the location to the same
+    directives as used by ComponentBuilder.
+    '''
+    # reduce log spam by not duplicating messages
+    notInFileMap = []
+
+    stats = {}
+    stats["totalFiles"] = 0
+    stats["notFoundSources"] = 0
+    stats["notFoundAdditionals"] = 0
+    stats["uniqueSourceDeps"] = 0
+    stats["startedAdditionals"] = 0
+    stats["badAdditionalPlatform"] = {}
+    stats["matchedAdditionals"] = 0
+    stats["badAdditionalPackageName"] = 0
+    stats["additionals"] = 0
+    stats["uniqueAdditionals"] = 0
+
+    def __init__(self, config):
+        '''
+        Create a processor with default rules. These can be overridden using
+        the I{config} parameter.
+
+        @param config: None or a dictionary that may contain any of the
+        following key-value pairs::
+            "additionalDependencyRules" - parsed additional rules.
+            "additionalDependencyRulesPath" - path to additional rules config.
+            "directives" - parsed package directives.
+            "directivesPath" - path to package directives config.
+            "dependencyFilter" - Only include dependencies if the path starts
+                                 with this string.
+        Parsed configurations take precedence over paths if both are present.
+
+        Both filepaths and configurations are supported to provide speed
+        (less parsing) and convenience.
+        '''
+        self.logger = Logging.getLogger("pfw.processors.dotdee")
+        self.deps = {}
+        self.pathPrefix = ""
+        if not isinstance(config, dict):
+            config = {}
+
+        self.additionalDependencyRules = config.get("additionalDependencyRules") or \
+            AdditionalDependencyRules(config.get("additionalDependencyRulesPath")).getRules() or \
+            AdditionalDependencyRules().getRules()
+
+        self.directives = config.get("directives") or \
+            dom.parse(config.get("directivesPath") or Rules.packageDirectives())
+
+        self.setDependencyFilter(config.get("dependencyFilter", ""))
+
+    def dReader(self, arg, dirPath, nameList):
+        '''
+        Read .d files in dirPath, adding paths to a list of dependencies with
+        associated variantPlatform and variantType. These are obtained from the
+        path. For example::
+            full path == epoc32/release/armv5/urel/foo.dll
+            variantPlatform == "armv5"
+            variantType == "urel"
+        Results in a dependecy on foo.dll associated with armv5-urel.
+
+        @param arg: tuple(baseDir, epocRoot, sourceRoot)
+        @param dirPath: Current dir being walk()'ed
+        @param nameList: List of files and directories in dirPath
+
+        arg is a tuple containing
+            - baseDir is the top dir where walk() starts, needed to extrapolate
+            the relative path of each .d, which in turn determines
+            variantPlatform and variantType
+            - epocRoot
+            - sourceRoot
+        '''
+        baseDir, epocRoot, sourceRoot = arg
+        epocRoot = os.path.normpath(epocRoot)
+        sourceRoot = os.path.normpath(sourceRoot)
+        dFiles = [n for n in nameList if n.lower().endswith(".d")]
+        if not dFiles:
+            return
+        relativeDir = dirPath[len(baseDir):]
+        parts = relativeDir.split(os.sep)
+        for dFile in [os.path.join(dirPath, f) for f in dFiles if os.path.isfile(os.path.join(dirPath, f))]:
+            deps = []
+            fh = open(dFile)
+            depString = ""
+            for line in fh.readlines():
+                line = line.strip()
+                if line.endswith("\\"):
+                    depString += line.rstrip("\\")
+                else:
+                    depString += line
+                    for filePath in depString.split():
+                        filePath = os.path.normpath(filePath)
+                        if not filePath or filePath.endswith(":"):
+                            continue
+                        rel = os.path.relpath(filePath, epocRoot)
+                        if not ".." in rel:
+                            deps.append(rel)
+                        else:
+                            self.logger.debug("Ignoring '%s' - not in target root (%s)" % (filePath, epocRoot))
+                    depString = ""
+
+            fh.close()
+            if deps:
+                if len(parts) > 3:
+                    self.addDepList(deps, variantPlatform=parts[2], variantType=parts[3])
+                else:
+                    self.addDepList(deps, "noarch")
+
+    def addDepList(self, fileList, variantPlatform, variantType=None):
+        '''
+        Store dependencies in dictionary of dictionaries,
+        self.deps[variantPlatform][variantType]
+
+        @param fileList: List of paths relative to epocRoot. The component is
+        dependent on the owners of these files.
+        @type fileList: List(String)
+        @param variantPlatform: The variant platform (e.g. armv5)
+        @type variantPlatform: String
+        @param variantType: e.g. udeb or urel
+        @type variantType: String
+        '''
+        if variantPlatform != "noarch" and variantType == None:
+            raise ValueError("Must have variantType if variantPlatform is %s" % variantPlatform)
+        if variantPlatform == "noarch":
+            variantType = "default"
+        if variantPlatform not in self.deps:
+            self.deps[variantPlatform] = {}
+        if variantType not in self.deps[variantPlatform]:
+            self.deps[variantPlatform][variantType] = []
+        self.deps[variantPlatform][variantType].extend([f for f in fileList if f not in self.deps[variantPlatform][variantType]])
+
+    def getDepList(self, variantSpec=None, filter=""):
+        '''
+        Get all dependencies by default; variantSpec is a list of tuples
+        (variantPlatform, variantType) that can be used to get only those
+        dependencies that are associated with them. An additional filter can
+        be used to get only dependencies matching the filter path prefix.
+
+        @param variantSpec: Variant platform and type.
+        @type variantSpec: Tuple(String,String)
+        @param filter: A string that must a path must match to be considered a
+        valid dependency.
+        @type filter: String
+        '''
+        ret = []
+        if variantSpec:
+            for platform, vtype in variantSpec:
+                if platform == "noarch":
+                    vtype = "default"
+                if platform in self.deps and vtype in self.deps[platform]:
+                    ret.extend([d for d in self.deps[platform][vtype] if d not in ret and d.startswith(filter)])
+        else:
+            for type in self.deps.values():
+                for deps in type.values():
+                    for dep in deps:
+                        if dep not in ret and dep.startswith(filter):
+                            ret.append(dep)
+        return ret
+
+    def setDependencyFilter(self, pathPrefix):
+        ''' Does the same job as filter in getDepList, but for __call__. '''
+        self.pathPrefix = pathPrefix
+
+    def __call__(self, component, buildData, globalFileMap):
+        buildDir = os.path.join(buildData.dependencyData.get(self.__class__.__name__, {}).get("buildDir"))
+        if not buildDir:
+            assert os.path.isdir(buildData.getTargetRoot())
+            buildDir = os.path.join(buildData.getTargetRoot(), "epoc32", "build")
+            self.logger.info("Build directory not given, using %s" % buildDir)
+        if not os.path.isdir(buildDir):
+            self.logger.info("Build directory %s does not exist" % buildDir)
+            return
+
+        ddirs = buildData.dependencyData.get(self.__class__.__name__, {}).get("ddirs", [])
+        if not ddirs:
+            self.logger.debug("%s: no dotDee directories" % buildData.getComponentName())
+            return
+
+        for d in ddirs:
+            ddir = os.path.join(buildDir, d)
+            assert os.path.isdir(ddir), "DotDee directory %s does not exist" % ddir
+            os.path.walk(ddir, self.dReader, (ddir, buildData.getTargetRoot(), buildData.getSourceRoot()))
+
+        for variantPlatform, typeDict in self.deps.items():
+            for variantType, depList in typeDict.items():
+                if variantPlatform == "noarch":
+                    variantType = None
+                for dep in depList:
+                    DotDeeDependencyProcessor.stats["totalFiles"] += 1
+                    if dep.startswith(self.pathPrefix):
+                        if component.getSourcePackage():
+                            requiredPackageName = globalFileMap.getPackageName(dep)
+                            if not requiredPackageName:
+                                if dep not in DotDeeDependencyProcessor.notInFileMap:
+                                    if dep.startswith("epoc32"):
+                                        self.logger.info("Source package dependency not in filemap: %s"%dep)
+                                    DotDeeDependencyProcessor.notInFileMap.append(dep)
+                                    DotDeeDependencyProcessor.stats["notFoundSources"] += 1
+                                continue
+                            if component.getSourcePackage().addDependency(Dependency(requiredPackageName, type="strong")):
+                                DotDeeDependencyProcessor.stats["uniqueSourceDeps"] += 1
+                                self.logger.debug("%s now depends on %s"%(component.getSourcePackage().getName(), requiredPackageName))
+                        if self.additionalDependencyRules:
+                            self.doAdditionalDependencies(dep, variantPlatform, variantType, component, globalFileMap)
+
+    def doAdditionalDependencies(self, requiredFile, variantPlatform, variantType, component, globalFileMap):
+        DotDeeDependencyProcessor.stats["startedAdditionals"] += 1
+        for pathRegex, matchVariantPlatform, packages in self.additionalDependencyRules:
+            if matchVariantPlatform and matchVariantPlatform != variantPlatform:
+                if variantPlatform not in DotDeeDependencyProcessor.stats["badAdditionalPlatform"]:
+                    DotDeeDependencyProcessor.stats["badAdditionalPlatform"][variantPlatform] = 1
+                else:
+                    DotDeeDependencyProcessor.stats["badAdditionalPlatform"][variantPlatform] += 1
+                continue
+            match = pathRegex.match(requiredFile)
+            if match:
+                DotDeeDependencyProcessor.stats["matchedAdditionals"] += 1
+                # If component requires a matching file, the member package
+                # identified by id also requires listed files.
+                for id, files in packages: # packages are rulesets enclosed in <package> tags in the config
+                    pkgKey = id.replace("$(VARIANTPLATFORM)", variantPlatform)
+                    # I{pkgKey} is the raw name of the package depending on the
+                    # first matching i{file} tag.
+                    # By the time dependencyProcessors are being run, the
+                    # original compname.variantplatform name has been lost
+                    # so we must resolve the actual package name from pkgKey.
+                    pkgName = ComponentBuilder.resolvePackageName(pkgKey, component.getName(), self.directives)
+                    if pkgName == None:
+                        self.logger.warning("%s: Unable to resolve package name from %s", component.getName(), pkgKey)
+                        DotDeeDependencyProcessor.stats["badAdditionalPackageName"] += 1
+                        continue
+                    for file in files:
+                        if "$(VARIANTTYPE)" in file and variantType == None:
+                            self.logger.warning("Required placeholder VARIANTTYPE is None. Path: %s Filetag: %s", requiredFile, file)
+                            continue
+                        file = file.replace("$(VARIANTPLATFORM)", variantPlatform).replace("$(VARIANTTYPE)", variantType)
+                        for matchId in range(len(match.groups())):
+                            token = "$" + str(matchId + 1)
+                            file = file.replace(token, match.groups()[matchId])
+                        if file in DotDeeDependencyProcessor.notInFileMap:
+                            continue
+                        fileOwner = globalFileMap.getPackageName(file)
+                        if not fileOwner:
+                            self.logger.debug("Additional file %s not in filemap"%file)
+                            DotDeeDependencyProcessor.notInFileMap.append(file)
+                            DotDeeDependencyProcessor.stats["notFoundAdditionals"] += 1
+                            continue
+                        DotDeeDependencyProcessor.stats["additionals"] += 1
+                        if not component.getPackage(pkgName):
+                            self.logger.warning("Additional dependency failed: component %s has no package %s"%(component.getName(), pkgName))
+                            continue
+                        if component.getPackage(pkgName).addDependency(Dependency(fileOwner)):
+                            DotDeeDependencyProcessor.stats["uniqueAdditionals"] += 1
+                            self.logger.debug("Additional: %s now depends on %s"%(pkgName, fileOwner))
+                # A match -> stop processing
+                return True
+
+
+class AdditionalDependencyRules(object):
+    '''
+    Read rules from XML file.
+
+    Usage::
+        rules = AdditionalDependencyRules().getRules()
+
+    To specify alternate location::
+        rules = AdditionalDependencyRules("my/rules.xml").getRules()
+
+    '''
+    defaultRules = os.path.join(os.path.dirname(__file__), "additionalDependencyRules.xml")
+
+    class ConfigReader(xml.sax.ContentHandler):
+        '''
+        Read the configuration and store the results in self.rules, which is a
+        list of tuples::
+            (
+                pathRegex, # regex
+                variantPlatform, # string
+                packages # list of tuples
+            )
+
+        Each package in turn is a tuple::
+            (
+                id,  # string
+                files # list of strings
+            )
+
+        The id may contain placeholders, currently supported:
+            - $(VARIANTPLATFORM)
+            - $(VARIANTTYPE)
+            - $(n) # n is a positive integer
+
+        Counter to the example in the architecture document there is no
+        $(COMPONENT), it will be hard coded.
+        '''
+        def __init__(self):
+            self.rules = []
+            self.pathRegex = None
+            self.variantPlatform = None
+            self.packages = []
+            self.id = ""
+            self.files = []
+
+        def characters(self, data):
+            self.cdata.append(data)
+
+        def startElement(self, tag, attributes):
+            self.cdata = []
+
+        def endElement(self, tag):
+            self.cdString = "".join(self.cdata).strip()
+            self.cdata = []
+            if tag == "path":
+                self.pathRegex = re.compile(self.cdString)
+            elif tag == "variantPlatform":
+                self.variantPlatform = self.cdString
+            elif tag == "id":
+                self.id = self.cdString
+            elif tag == "file":
+                self.files.append(self.cdString)
+            elif tag == "package":
+                self.packages.append((self.id, self.files))
+                self.files = []
+            elif tag == "dep":
+                self.rules.append((self.pathRegex, self.variantPlatform, self.packages))
+                self.packages = []
+
+    def __init__(self, configuration=None):
+        '''
+        Read the rules.
+
+        @param configuration: Filename or file-like object containing the XML rules
+        @type configuration: Openable or readable
+        '''
+        if not configuration:
+            configuration = AdditionalDependencyRules.defaultRules
+        p = xml.sax.make_parser()
+        i = AdditionalDependencyRules.ConfigReader()
+        p.setContentHandler(i)
+        if hasattr(configuration, "read"):
+            f = configuration
+        else:
+            f = open(configuration)
+        while True:
+            data = f.read()
+            if not data:
+                break
+            p.feed(data)
+        p.close()
+        self.rules = i.rules
+
+    def getRules(self):
+        return self.rules
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/RomPatchProcessor.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,53 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Rom patch processor
+#
+
+import os
+from Blocks.Packaging.PackageWriter import Dependency
+from Blocks.Packaging.Logging import Logging
+
+class RomPatchDependencyProcessor(object):
+    '''
+    Adds weak dependency from binary to trace package if .iby file is present
+    and contains a line starting with "patchdata".
+
+    The matching buildData must be present.
+    @todo: make file match a configurable expression
+    '''
+    def __init__(self, *args):
+        self.logger = Logging.getLogger("pfw.processors.rompatch")
+
+    def __call__(self, component, buildData, globalFileMap):
+        if component.getName() != buildData.getComponentName():
+            raise ValueError("Bad dependency info for '%s'. BuildData is for another component '%s'." %
+                (component.getName(), buildData.componentName))
+
+        binPackage = component.getBinaryPackage()
+        tracePackage = component.getTracePackage()
+        if not (binPackage and tracePackage):
+            self.logger.debug("Could not find binary and trace packages for '%s'", buildData.componentName)
+            return False
+
+        for d in component.getFiles():
+            if d.getPath().lower().endswith(".iby") and os.path.normpath(d.getPath()).lower().startswith(os.path.normpath("epoc32/rom/")):
+                if os.path.isfile(os.path.join(buildData.getTargetRoot(), d.getPath())):
+                    for line in file(os.path.join(buildData.getTargetRoot(), d.getPath())):
+                        if line.lower().startswith("patchdata"):
+                            if binPackage.addDependency(Dependency(tracePackage.getName(), type="weak")):
+                                self.logger.debug("%s now depends on %s", binPackage.getName(), tracePackage.getName())
+                            return True
+                else:
+                    self.logger.info("'%s' was not found", file)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,71 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Dependency processors
+#
+
+'''
+Dependency processors provide a modular way to add dependency data to
+PackageModel.Components.
+
+A Packager is given dependency processor class + initarg pairs using the
+addProcessor method. When it has created a Component, it iterates through this
+list, initialising a processor with the corresponding initarg from each entry.
+Finally, it calls the processor instance with Component, BuildData and FileMap
+instances as arguments.
+
+BuildData.dependencyData is a dictionary carrying Component-specific data for
+dependency processors. The keys are processor class names.
+
+To summarize:
+    - The processor __call__() method is responsible for adding the dependency
+    data.
+    - The __call__() method takes Component, FileMap and BuildData as arguments
+    - The processor __init__() takes a single argument. Every instance of a
+    processor class is initialized using the same argument.
+    - Component-specific data is passed to  dependency processors via BuildData.
+
+Example:
+    - Dependency processor I{Foo} uses config file I{/bar/baz.conf} containing
+    instructions how to process the components.
+    - Each Component is given unique dependency info for I{Foo} in a I{FooData}
+    instance::
+
+        pack = Packager(storage,                # create a packager
+                epocRoot,
+                sourceRoot)
+        pack.addProcessor(Foo, "bar/baz.conf")  # specify a dependency processor
+        build = PlainBuildData()                # create builddata
+        build.setComponentName("myComponent")
+        build.dependencyData["Foo"] = FooData("Dependency info for myComponent")
+                                                # tag on data for use by Foo
+        pack.addComponent(build)                # make component and package it
+
+    - Packager I{pack} will effectively do::
+
+        component = Component(build.getComponentName())
+                                                # create Component
+        processor = Foo("bar/baz.conf")         # create a Foo
+        processor(component, build, filemap)    # call Foo to apply dependencies
+
+    - I{Foo} will contain the following::
+
+        def __init__(self, configfile):
+            self.readConfig(configfile)
+
+        def __call__(self, component, buildData, fileMap):
+            uniquedata = buildData.dependencyData.get(self.__class__.__name__)
+            # next use config, uniquedata, buildData and fileMap to apply
+            # dependencies to component
+'''
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/DependencyProcessors/additionalDependencyRules.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Additional dependency rules
+
+-->
+
+<deps> 
+  <dep>
+    <match> 
+      <path>^epoc32/include/(.*).rsg$</path>
+      <variantPlatform>winscw</variantPlatform> 
+    </match>
+    <package>
+      <id>exec.emul</id>
+      <depends>
+        <file>epoc32/release/$(VARIANTPLATFORM)/$(VARIANTTYPE)/z/resource/apps/$1.rsc</file>
+        <file>epoc32/release/$(VARIANTPLATFORM)/$(VARIANTTYPE)/z/resource/$1.rsc</file>
+        <file>epoc32/release/$(VARIANTPLATFORM)/$(VARIANTTYPE)/$1.rsc</file>
+        <file>epoc32/release/$(VARIANTPLATFORM)/$(VARIANTTYPE)/plugins/$1.rsc</file>
+      </depends>
+    </package>
+  </dep>
+  <dep>
+    <match> 
+      <path>^epoc32/include/(.*).rsg$</path>
+    </match>
+    <package>
+      <id>exec.$(VARIANTPLATFORM)</id>
+      <depends>
+        <file>epoc32/data/z/resource/apps/$1.rsc</file>
+      </depends>
+    </package>
+  </dep>
+  <dep>
+    <match> 
+      <path>^epoc32/include/(.*).mbg$</path>
+      <variantPlatform>winscw</variantPlatform> 
+    </match>
+    <package>
+      <id>exec.$(VARIANTPLATFORM)</id>
+      <depends>
+        <file>epoc32/release/$(VARIANTPLATFORM)/$(VARIANTTYPE)/z/resource/apps/$1.mbm</file>
+      </depends>
+    </package>
+  </dep>
+  <dep>
+    <match> 
+      <path>^epoc32/include/(.*).mbg$</path>
+    </match>
+    <package>
+      <id>exec.$(VARIANTPLATFORM)</id>
+      <depends>
+        <file>epoc32/data/z/resource/apps/$1.mbm</file>
+      </depends>
+    </package>
+  </dep>
+</deps>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/FileMapping.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,239 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# File mapping
+#
+
+'''
+FileMap is a filename -> owner package lookup table.  It stores minimal information about
+the package owning a file. Packager compiles a single large FileMap of all files
+in a release, enabling it to resolve dependencies between packages from
+file->file or package->file dependencies.
+'''
+
+from cPickle import dump, load
+
+from Blocks.Packaging.PackageWriter import VersionInfo, FileSet, Packageable
+from Blocks.Packaging.Logging import Logging
+from Blocks.Packaging import PackagingError
+
+class MissingPackage(PackagingError):
+    ''' No such package in filemap '''
+
+class DuplicateFile(PackagingError):
+    ''' The path already exists in the filemap '''
+
+class DuplicatePackage(PackagingError):
+    ''' The package already exists in the filemap '''
+
+def externalsources(fn):
+    '''
+    A decorator to use external data sources if the filemap does not have
+    the requested data.
+    '''
+    def wrapper(self, *args):
+        out = fn(self, *args)
+        if out:
+            return out
+        for source in self._externalSources:
+            method = getattr(source, fn.func_name, None)
+            if method:
+                return method(*args)
+        return None
+    return wrapper
+
+class StubPackage(VersionInfo):
+    ''' A way to store minimal Package data '''
+    def __init__(self, package=None):
+        if package:
+            self._name = package.getName()
+            self.setIdentifier(package.getIdentifier())
+            self._apiVersion = package.getApiVersion() or None
+            self._apiFingerprint = package.getApiFingerprint() or None
+        else:
+            self._name = None
+            self._apiVersion = None
+            self._apiFingerprint = None
+
+    def getName(self):
+        return self._name
+
+    def getApiVersion(self):
+        return self._apiVersion
+
+    def getApiFingerprint(self):
+        return self._apiFingerprint
+
+    def __str__(self):
+        return "name: %s\ncomponentName: %s\napiVersion: %s\napiFingerprint: %s\nversion: %s\nbuild: %s\n" % (
+            self.getName(),
+            self.getComponentName(),
+            self.getApiVersion(),
+            self.getApiFingerprint(),
+            self.getVersion(),
+            self.getBuild())
+
+class FileMap(object):
+    ''' Contains file -> package mappings. '''
+    def __init__(self, *args):
+        '''
+        Store the package name as well as version and API data of the package
+        each file belongs to.
+
+        @param args: A component or package from which to store the filenames
+        @type args: [Packageable, FileSet or FileMap]
+        '''
+        self.files = {}
+        self.stubs = {}
+        self._externalSources = []
+        for arg in args:
+            self.addAny(arg)
+
+    @staticmethod
+    def getLogger():
+        return Logging.getLogger("pfw.filemap")
+
+    def registerExternalSource(self, source):
+        self._externalSources.append(source)
+
+    def addAny(self, any):
+        '''
+        Add files to FileMap
+        @param any: Something that contains file and package information
+        @type any: Packageable, FileSet or FileMap
+        '''
+        if isinstance(any, FileSet):
+            self._addPackage(any)
+        elif isinstance(any, Packageable):
+            for package in any.getPackages():
+                self._addPackage(package)
+        elif isinstance(any, FileMap):
+            for name in any.getPackageNames():
+                if name in self.stubs:
+                    raise DuplicatePackage("Will not add from Filemap containing data for package %s, which already exists.")
+
+            for path in any.getPaths():
+                try:
+                    self._addPath(path, any.getPackageName(path))
+                except MissingPackage:
+                    stub = any.getPackage(path)
+                    self.stubs[stub.getName()] = stub
+                    self._addPath(path, any.getPackageName(path))
+        else:
+            raise TypeError("Unable to add this type to FileMap: '%s'" % type(any))
+
+    def _addPackage(self, package):
+        '''
+        @param package: Package to add
+        @type package: FileSet
+        '''
+        if package.getName() in self.stubs:
+            raise DuplicatePackage, "Will not add Package %s, already exists."
+        for file in package.getFiles():
+            try:
+                self._addPath(file.path, package.getName())
+            except MissingPackage:
+                self.stubs[package.getName()] = StubPackage(package)
+                self._addPath(file.path, package.getName())
+
+    def _addPath(self, path, packageName):
+        assert isinstance(path, basestring)
+        if path in self.files:
+            self.getLogger().warning("Non-unique file %s, not adding to filemap. Already a member of %s", path, self.getPackageName(path))
+        elif packageName not in self.stubs:
+            raise MissingPackage, "Add stub for package %s before adding files to it."
+        else:
+            self.files[path] = packageName
+
+    def getPaths(self):
+        return self.files.__iter__()
+
+    def getPackageNames(self):
+        return self.stubs.__iter__()
+
+    @externalsources
+    def getPackage(self, filename):
+        return self.stubs.get(self.getPackageName(filename))
+
+    @externalsources
+    def getPackageName(self, filename):
+        '''
+        @param filename: filename
+        @type filename: String
+        @rtype: String or None
+        '''
+        return self.files.get(filename)
+
+#    @externalsources
+#    def getComponentName(self, filename):
+#        if filename in self._filemap:
+#            return self._filemap[filename].getComponentName()
+#        return None
+
+    @externalsources
+    def getPackageVersion(self, filename):
+        if filename in self.files:
+            return self.getPackage(filename).getVersion()
+
+    @externalsources
+    def getPackageBuild(self, filename):
+        if filename in self.files:
+            return self.getPackage(filename).getBuild()
+
+    @externalsources
+    def getPackageApiVersion(self, filename):
+        if filename in self.files:
+            return self.getPackage(filename).getApiVersion()
+
+    @externalsources
+    def getPackageApiFingerprint(self, filename):
+        if filename in self.files:
+            return self.getPackage(filename).getApiFingerprint()
+
+    def loadFragment(self, filename):
+        '''
+        Appends previously pickled FileMap to this.
+
+        @param filename: The filename or file-like object of a pickled FileMap
+        @type filename: String
+        '''
+        self.addAny(FileMap.load(filename))
+
+    @staticmethod
+    def load(f):
+        '''
+        Read a previously pickled FileMap from a file.
+
+        @param f: filename or file-like object
+        @return: A FileMap loaded from f
+        @rtype: FileMap
+        '''
+        if not hasattr(f, "read"):
+            try:
+                f = open(f, "r")
+            except Exception:
+                raise ValueError("not openable: '%s'" % f)
+        f.seek(0)
+        return load(f)
+
+    def dump(self, f):
+        '''
+        Dump the FileMap using pickle
+
+        @param f: filename or file-like object
+        @type f: String
+        '''
+        if isinstance(f, basestring):
+            f = open(f, "wb")
+        dump(self, f, 0)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Logging.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,123 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Logging system
+#
+
+''' Logging '''
+
+import logging
+import os
+import multiprocessing
+
+class NullHandler(logging.Handler):
+    def __init__(self, level=None):
+        logging.Handler.__init__(self, level=level)
+
+    def emit(self, value):
+        pass
+
+    def setLevel(self, level):
+        pass
+
+class Logging(object):
+
+    STATUSNAME = "pfw-status"
+    STATUSLEVEL = logging.DEBUG
+
+    logLevel = None
+    logFormat = '%(asctime)s %(levelname)s %(name)s %(message)s'
+    logTimeFormat = '%Y-%m-%d %H:%M:%S'
+    logDir = None
+    doStatusLogs = False
+    defaultHandler = None
+    lock = None
+
+    @classmethod
+    def getLogger(cls, name="pfw", level=None, addHandler=True):
+        '''
+        Get a pre-configured logger with handler etc.
+        '''
+        l = logging.getLogger(name)
+        l.propagate = False
+        l.setLevel(level or cls.logLevel)
+
+        if addHandler:
+            if not l.handlers:
+                l.addHandler(cls.getDefaultHandler())
+        return l
+
+    @classmethod
+    def getStatusLogger(cls):
+        l = cls.getLogger(name=cls.STATUSNAME, level=cls.STATUSLEVEL, addHandler=False)
+        statusHandler = logging.StreamHandler()
+        statusHandler.setFormatter(logging.Formatter('Status: %(message)s', cls.logTimeFormat))
+        if not l.handlers:
+            l.addHandler(statusHandler)
+        if not cls.doStatusLogs:
+            statusHandler.addFilter(logging.Filter("nevergonnamatch"))
+        return l
+
+    @classmethod
+    def getDefaultHandler(cls):
+        if cls.logLevel == None:
+            # prevent "no handler" messages
+            defaultHandler = NullHandler()
+        elif cls.logDir:
+            defaultHandler = logging.FileHandler(os.path.join(cls.logDir, "pfw.log"))
+        elif cls.defaultHandler:
+            defaultHandler = cls.defaultHandler
+        else:
+            defaultHandler = logging.StreamHandler()
+        if cls.logFormat and defaultHandler.formatter is None:
+            defaultHandler.setFormatter(logging.Formatter(cls.logFormat, cls.logTimeFormat))
+
+        # Wrap handlers handle method with lock if lock is given
+        if cls.lock:
+            def lockWrapper(method):
+                def lockCall(*args, **kwargs):
+                    with cls.lock:
+                        ret = method(*args, **kwargs)
+                    return ret
+                return lockCall
+            defaultHandler.handle = lockWrapper(defaultHandler.handle)
+
+        return defaultHandler
+
+    @classmethod
+    def setupLogHandling(cls, logLevel=None, logDir=None, logFormat=None, logTimeFormat=None,
+        doStatusLogs=None, handler=None, lock=None):
+
+        if logLevel != None:
+            cls.logLevel = logLevel
+
+        if logDir != None:
+            cls.logDir = logDir
+
+        if logFormat != None:
+            cls.logFormat = logFormat
+
+        if logTimeFormat != None:
+            cls.logTimeFormat = logTimeFormat
+
+        if doStatusLogs != None:
+            cls.doStatusLogs = doStatusLogs
+
+        if handler != None:
+            cls.defaultHandler = handler
+
+        if lock is None:
+            cls.lock = multiprocessing.Lock()
+        else:
+            cls.lock = lock
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/MultiprocessPackager.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,266 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Multiprocess packager
+#
+
+'''
+Packager reads BuildData to create PackageModel.Components;
+these are passed to a PackageWriter to create actual packages.
+
+Use multiprocessor module for concurrent packaging.
+'''
+
+import multiprocessing
+import time
+import itertools
+
+from Blocks.Packaging.PackageWriter import DebWriter, ZipWriter, RpmWriter
+from Blocks.Packaging.ComponentBuilder import ComponentBuilder, BuilderException
+from Blocks.Packaging.Logging import Logging
+from Blocks.Packaging.FileMapping import FileMap
+
+class BuilderWorker(object):
+    def __init__(self, builder, storage):
+        self.builder = builder
+        self.storage = storage
+
+    def __call__(self, buildData):
+        try:
+            return self.builder.createComponent(self.storage, buildData)
+        except KeyboardInterrupt, e:
+            return e
+        except BuilderException:
+            raise
+        except Exception, e:
+            Logging.getLogger("pfw.packager.multiproc.builder").exception(e)
+            raise
+
+class WriterWorker(object):
+
+    def __init__(self, writer, writerOptions, packageOutputDirectory,
+                  storage, filemap, dependencyProcessors=None):
+        self.writer = writer
+        self.writerOptions = writerOptions
+        self.packageOutputDirectory = packageOutputDirectory
+        self.storage = storage
+        self.globalFileMap = filemap
+        self.dependencyProcessors = dependencyProcessors or []
+        self.logger = None
+        self.statusLogger = None
+
+    def getWriter(self):
+        if self.writer == "zip":
+            return ZipWriter(self.writerOptions)
+        elif self.writer == "rpm":
+            return RpmWriter(self.writerOptions)
+        elif self.writer == "deb":
+            return DebWriter(self.writerOptions)
+        else:
+            return None
+
+    def __call__(self, bdCompTuple):
+        try:
+            buildData, component = bdCompTuple
+            self.logger = Logging.getLogger("pfw.packager.multiproc.writer")
+            self.statusLogger = Logging.getStatusLogger()
+            compName = buildData.getComponentName()
+            self.statusLogger.info("Processing dependencies: %s" % compName)
+            tick = time.time()
+            for processorInitData in self.dependencyProcessors:
+                processor = processorInitData[0]
+                if len(processorInitData) == 1:
+                    p = processor()
+                else:
+                    args = processorInitData[1]
+                    p = processor(args)
+                self.logger.debug("Running processor %s on %s" % (processor.__name__, compName))
+                p(component, buildData, self.globalFileMap)
+            tock = time.time()
+            self.logger.info("Resolved dependencies for %s in %s seconds" % (compName, tock - tick))
+            w = self.getWriter()
+            if w:
+                self.statusLogger.info("Sending to writer: %s" % compName)
+                tick = time.time()
+                written = w.write(component, buildData.getTargetRoot(), self.packageOutputDirectory, buildData.getSourceRoot())
+                tock = time.time()
+                self.logger.info("%s: wrote %s bundles to %s in %.3f seconds", compName, len(written), self.packageOutputDirectory, tock - tick)
+            self.storage.writeMetaData(component)
+            self.storage.writeFileMap(component)
+            self.logger.debug("%s: writer done." % compName)
+            return component
+        except KeyboardInterrupt, e:
+            return e
+        except Exception, e:
+            self.logger.exception(e)
+            raise
+
+class Packager(object):
+    ''' Packager '''
+    def __init__(self, storage, packageOutputDirectory,
+                 maxWorkers=None, writer="deb", targetRules=None,
+                 sourceRules=None, directives=None, writerOptions=None,
+                 loadAllPreviousMetaData=True, keepGoing=True):
+        '''
+        @param storage: Storage info
+        @type storage: Blocks.Packaging.Storage.PackagerStorage
+        @param packageOutputDirectory: Where to write package files
+        @type packageOutputDirectory: String
+        @param maxWorkers: Maximum number of concurrent workers to launch. Default is number of CPUs.
+        @type maxWorkers: None or Integer
+        @param targetRules: Alternatives packaging rules for target files
+        @type targetRules: String or file-like object
+        @param sourceRules: Alternative packaging rules for source files
+        @type sourceRules: String or file-like object
+        @param directives: Alternative packaging directives
+        @type directives: String or file-like object
+        @param keepGoing: Ignore errors as best as possible
+        '''
+        self.logger = Logging.getLogger("pfw.packager.multiproc")
+        self.globalFileMap = FileMap()
+        self.nonBuiltComponentNames = []    # only filemapping is loaded for these
+        self.builtComponentNames = []       # components built in this session
+        self.storage = storage
+        self.packageOutputDirectory = packageOutputDirectory
+        self.maxWorkers = maxWorkers
+        self.writer = writer
+        self.writerOptions = writerOptions
+        self.dependencyProcessors = []
+        self.loadAllPreviousMetaData = loadAllPreviousMetaData # query storage for all components
+        self.keepGoing = keepGoing
+        self.targetRulesPath = targetRules
+        self.sourceRulesPath = sourceRules
+        self.directivesPath = directives
+        self.times = [time.time()]
+        self.rawBuildData = {}  # buildData indexed by component name
+        self.components = [] # created components
+        self.written = None  # written components
+        self.builderWorker = None
+        logSetup = (Logging.logLevel, Logging.logDir, Logging.logFormat, Logging.logTimeFormat,
+                    Logging.doStatusLogs, None, Logging.lock)
+        self.workerPool = multiprocessing.Pool(maxWorkers, Logging.setupLogHandling, logSetup)
+        self.logger.info("Packager starting build #%s.", self.storage.getBuildId())
+        self.logger.info("Packager storage: %s.", self.storage)
+
+    def addProcessor(self, processorClass, arg=None):
+        '''
+        Register a dependency processor. All processors must be registered
+        before starting writer workers.
+
+        @param processorClass: The dependency processor class
+        @type processorClass: Class
+        @param arg: The init arg passed when instantiating procClass
+        @type arg: Any
+        '''
+        self.dependencyProcessors.append((processorClass, arg))
+
+    def addComponent(self, buildData):
+        '''
+        Add a component for packaging.
+
+        @param buildData: A BuildData object
+        @type buildData: BuildData
+        '''
+        if not self.builderWorker:
+            builder = ComponentBuilder(self.targetRulesPath, self.sourceRulesPath, self.directivesPath, self.keepGoing)
+            self.builderWorker = BuilderWorker(builder, self.storage)
+        self.rawBuildData[buildData.getComponentName()] = buildData
+        self.components.append(self.workerPool.apply_async(self.builderWorker, [buildData]))
+
+    def addNonBuiltComponent(self, componentName):
+        '''
+        Use the filemap of a previously created component. This is normally
+        automatic for all components found via the PackagerStorage instance.
+        Only call this method explicitly if loadAllPreviousMetadata is set to
+        False.
+
+        @param componentName: Component name
+        @type componentName: String
+        '''
+        self.nonBuiltComponentNames.append(componentName)
+
+    def mapNonBuiltComponents(self):
+        '''
+        After builders have finished (and before writing), any non-built
+        components that were not among the "normal" ones are added to the
+        filemap. Normally the storage is queried and all the latest metadata
+        is loaded - disable it by setting loadAllPreviousMetadata to False.
+        '''
+        if self.loadAllPreviousMetaData:
+            for componentName in self.storage.getComponentNames():
+                if componentName not in self.nonBuiltComponentNames:
+                    self.nonBuiltComponentNames.append(componentName)
+
+        ignoreComponents = set(itertools.chain.from_iterable(bd.previousNames for bd in self.rawBuildData.itervalues()))
+        ignoreComponents.update(self.builtComponentNames)
+        for componentName in self.nonBuiltComponentNames:
+            if componentName not in ignoreComponents:
+                self.logger.info("Mapping non-built component: %s" % componentName)
+                oldCompLookupFile = self.storage.getLastFileMapFile(componentName)
+                self.globalFileMap.loadFragment(oldCompLookupFile)
+
+    def wait(self):
+        '''
+        Do everything needed after builders have been started, dependency
+        processors have been registered and all components added.
+        '''
+        try:
+            self.logger.debug("waiting for builder workers")
+            while False in [r.ready() for r in self.components]:
+                for result in self.components:
+                    result.wait()
+                    if result.successful():
+                        self.logger.debug("success %s " % result.get().getName())
+                        if result.get().getName() not in self.builtComponentNames:
+                            self.globalFileMap.addAny(result.get())
+                            self.builtComponentNames.append(result.get().getName())
+                    else:
+                        # raise the exception
+                        result.get()
+            self.logger.debug("built all components")
+            self.times.append(time.time())
+            self.logger.info('Deps processing: %.3fs' % (self.times[1]-self.times[0]))
+            self.mapNonBuiltComponents()
+            self.times.append(time.time())
+            self.logger.info('Map non built components: %.3fs' % (self.times[2]-self.times[1]))
+            if not self.builtComponentNames:
+                self.logger.warning("exiting without starting writer workers - no components were built.")
+            else:
+                writerWorker = WriterWorker(self.writer, self.writerOptions, self.packageOutputDirectory,
+                                       self.storage, self.globalFileMap, self.dependencyProcessors)
+                bdCompList = [(self.rawBuildData[result.get().getName()], result.get()) for result in self.components if result.successful()]
+                self.written = self.workerPool.map_async(writerWorker, bdCompList)
+                #for result in [result.get() for result in self.components if result.successful()]:
+                #    writerWorker(self.rawBuildData[result.getName()], result)
+                self.logger.debug("waiting for writer workers")
+                self.written.wait()
+                if not self.written.successful():
+                    # raise exceptions
+                    self.written.get()
+                self.times.append(time.time())
+                self.logger.info('Create bundles: %.3fs' % (self.times[3]-self.times[2]))
+
+            self.logger.info('Execution summary:')
+            self.logger.info('Deps processing: %.3fs' % (self.times[1]-self.times[0]))
+            self.logger.info('Map non built components: %.3fs' % (self.times[2]-self.times[1]))
+            if len(self.times) > 3:
+                self.logger.info('Create bundles: %.3fs' % (self.times[3]-self.times[2]))
+            self.logger.info("done")
+        except KeyboardInterrupt:
+            self.logger.critical("Terminated by keyboard interrupt")
+            self.workerPool.terminate()
+            self.workerPool.join()
+        finally:
+            self.workerPool.close()
+            self.workerPool.join()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/PackageModel.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,702 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Package model
+#
+
+'''
+PackageModel contains a hierarchy of classes that store metadata about files
+for use by PackageWriters::
+
+    + Component [ Files that together provide some functionality are grouped as
+    |             a Component. ]
+    |
+    -- + Package [ The files in a component are sorted together by type, e.g.
+       |           sources, executables or libraries, into Packages. A Package
+       |           is stored as one archive, e.g. armv5 executables in component
+       |           foo could be written into foo.exec-arm-VERSION.deb ]
+       |
+       -- Deliverable [ A file ]
+
+A L{Packager} uses L{Blocks.Packaging.Rules} to determine which subclass of Deliverable to use
+when storing file metadata, as well as which Package it belongs to.
+'''
+
+from cPickle import dump, load
+import os
+
+try:
+    from hashlib import md5
+except ImportError:
+    import md5
+
+from Blocks.Packaging.PackageWriter import Packageable, FileSet
+
+class Component(Packageable):
+    ''' A Component is used to store Packages and associated metadata. '''
+
+    def __init__(self, name, version, packages=None):
+        '''
+        @param name: Component name
+        @param version: Component version
+        @param packages: list of packages belonging to component
+        @type name: String
+        @type version: Version string
+        @type packages: List(Package)
+        '''
+        Packageable.__init__(self)
+        self.name = name
+        if version:
+            self.setVersion(version)
+        self.__attributes = {}
+        self.__packages = {}
+        packages = packages or []
+        for p in packages:
+            self.addPackage(p)
+
+    def __eq__(self, other):
+        if self.getName() == other.getName() \
+            and self.getSignificantAttributes() == other.getSignificantAttributes() \
+            and self.getPackages() == other.getPackages():
+            return True
+        else:
+            return False
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __str__(self):
+        out = "Component:\n\
+        name: %s\n\
+        version: %s\n\
+        buildId: %s\n\
+        filecount: %s\n\
+        packageAllFlag: %s\n\
+        attributes: %s\n\
+        packages: %s\n" % (
+            self.name,
+            self.getVersion(),
+            self.getBuild(),
+            len(self.getFiles()),
+            self.getPackageAllFlag(),
+            [str(k) + ":" + str(v) for k, v in self.getComponentAttributes().items()],
+            [p.getName() for p in self.getPackages()])
+        return out
+
+    @staticmethod
+    def load(f):
+        '''
+        Read a previously pickled component from a file.
+
+        @param f: file or file-like object
+        @return: A Component loaded from f
+        @rtype: Component
+        '''
+        if isinstance(f, basestring) and os.path.isfile(f):
+            f = open(f, "r")
+        return load(f)
+
+    def dump(self, f):
+        '''
+        Dump the Component using pickle
+
+        @param f: filename or file-like object
+        '''
+        if isinstance(f, basestring):
+            f = open(f, "wb")
+        dump(self, f, 0)
+
+    def getFiles(self):
+        '''
+        Return list of files belonging to the member packages.
+
+        @return: files
+        @rtype: List(Deliverable)
+        '''
+        allFiles = []
+        for p in self.getPackages():
+            allFiles += p.getFiles()
+        allFiles.sort()
+        return allFiles
+
+    def addPackage(self, package):
+        '''
+        @param package:
+        @type package: Package
+        '''
+        if type(package) != Package:
+            raise ValueError("Not a Package: %s" % type(package))
+        elif package.getName() in self.__packages:
+            raise ValueError("A package with that name exists already")
+        else:
+            self.__packages[package.getName()] = package
+
+    def getPackage(self, name):
+        '''Get package by name.
+
+        @param name: The package name
+        @type name: String
+        @return: Package
+        @rtype: Package
+        '''
+        if name == "":
+            raise ValueError("getPackage: name cannot be an empty string")
+        return self.__packages.get(name)
+
+    def getPackages(self):
+        '''
+        Return list of all packages.
+
+        @return: The packages that make up the component
+        @rtype: List(Package)
+        '''
+        p = self.__packages.values()
+        p.sort(lambda x, y: cmp(x.getName(), y.getName()))
+        return p
+
+    def getBinaryPackage(self):
+        for package in self.getPackages():
+            if package.getName().count(".exec-"):
+                return package
+        return None
+
+    def getTracePackage(self):
+        for package in self.getPackages():
+            if package.getName().count("trace"):
+                return package
+        return None
+
+    def getName(self):
+        '''
+        @return: name
+        @rtype: String
+        '''
+        return self.name
+
+    def getComponentAttributes(self):
+        '''
+        Get attributes in the raw, as ComponentAttributes.
+
+        @return: Attributes
+        @rtype: Dictionary(ComponentAttribute)
+        '''
+        return self.__attributes
+
+    def getSignificantAttributes(self):
+        '''
+        Get attributes that are significant for packaging purposes.
+
+        @return: Attributes
+        @rtype: Dictionary(any)
+        '''
+        ret = {}
+        for key in self.__attributes.keys():
+            if self.__attributes[key].significant:
+                ret[key] = self.__attributes[key].value
+        return ret
+
+    def getAttributes(self):
+        '''
+        Get attributes stripped of ComponentAttribute metadata.
+
+        @return: Attributes
+        @rtype: Dictionary(Any)
+        '''
+        ret = {}
+        for key in self.__attributes.keys():
+            ret[key] = self.__attributes[key].value
+        return ret
+
+    def setAttribute(self, name, value, significant=False):
+        '''
+        Any type can be passed as a value, but attributes are stored internally
+        as ComponentAttributes. When setting attributes that are significant for
+        packaging rules, i.e. the value is taken into consideration when
+        comparing the equality of two Components, you must wrap the value in a
+        ComponentAttribute or specify significant as True.
+
+        @param name: Key for attribute
+        @param value: Value for attribute
+        '''
+        if not isinstance(value, ComponentAttribute):
+            value = ComponentAttribute(value, significant)
+        self.__attributes[name] = value
+
+    def diff(self, other):
+        '''
+        Get details of differences between packages. For quick comparison just
+        use the normal comparison operators == and !=.
+
+        @param other: Another Component
+        @return: less, more, same, different;
+         - Names of Packages not present in self
+         - Names of Packages not present in other
+         - Names of Packages that are the same in both
+         - Names of Packages that are present in both but differ
+        @rtype: (List, List, List, List)
+        '''
+
+        if not isinstance(other, Component):
+            raise ValueError, "Other is not a Component"
+
+        less = []   # What packages does other have that I don't?
+        more = []   # What packages do I have that other doesn't?
+        same = []
+        different = []
+
+        for o in other.getPackages():
+            p = self.getPackage(o.getName())
+            if not p:
+                less.append(o.getName())
+            elif o == p:
+                same.append(o.getName())
+            else:
+                different.append(o.getName())
+
+        for p in self.getPackages():
+            if p.getName() not in less + same + different:
+                more.append(p.getName())
+
+        return (less, more, same, different)
+
+class ComponentAttribute(object):
+    '''
+    Store extra metadata about Component attributes in addition to the attribute
+    value. The main motivation for using ComponentAttribute is to include a flag
+    indicating whether the attribute is relevant when comparing the equality of
+    two Components.
+    '''
+    def __init__(self, value, significant=False):
+        if type(significant) != bool:
+            raise TypeError("ComponentAttribute.significant must be a Boolean")
+        # you've got to be kidding
+        if isinstance(value, ComponentAttribute):
+            raise TypeError("Trying to play nested dolls with ComponentAttribute.")
+        self.value = value
+        self.significant = significant
+
+    def __eq__(self, other):
+        return isinstance(other, ComponentAttribute) and self.value == other.value
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __str__(self):
+        if self.significant:
+            return str(self.value)+"(S)"
+        else:
+            return str(self.value)
+
+class Package(FileSet):
+    '''
+    A Package is essentially a named list of Deliverables with associated
+    metadata.
+    '''
+    def __init__(self, name, files=None):
+        '''
+
+        @param name: Package name
+        @type name: String
+        @param files: List of Deliverables
+        @type files: List(Deliverable)
+        '''
+        FileSet.__init__(self)
+        if not name:
+            raise ValueError('Unacceptable name "%s" for a Package' % name)
+        self.name = name
+        self.files = {}
+        for f in files or []:
+            self.addFile(f)
+        self.type = None
+        self.arch = None
+        self.group = None
+        self.depends = []       # Dependencies to list the Packages this one depends on
+        self.preconditions = [] # This is packaged only if any of these packages are flagged for packaging
+        self.replaces = []
+
+    def __eq__(self, other):
+        return self.name == other.name and self.getFilesSorted() == other.getFilesSorted()
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __str__(self):
+        return "Package:\n\
+        name: %s\n\
+        filecount: %s\n\
+        type: %s\n\
+        versioninfo: %s\n\
+        arch: %s\n\
+        group: %s\n\
+        packageFlag: %s\n\
+        lastPackagedVersion: %s\n\
+        apiVersion: %s\n\
+        depends: %s\n\
+        preconditions: %s\n" % (
+            self.name,
+            len(self.getFiles()),
+            self.type,
+            self.getIdentifier(),
+            self.arch,
+            self.group,
+            self.getPackageFlag(),
+            self.getLastPackagedVersion(),
+            self.getApiVersion(),
+            [str(v) for v in self.getDependencies()],
+            self.preconditions)
+
+    def diff(self, other):
+        '''
+        Get details of differences between packages. For quick comparison just
+        use the normal comparison operators == and !=.
+
+        @param other: Another Package
+        @return: less, more, same, different;
+         - Paths of Deliverables not present in self
+         - Paths of Deliverables not present in other
+         - Paths of Deliverables of Deliverables that are the same in both
+         - Paths of Deliverables that are present in both but differ
+        @rtype: (List, List, List, List)
+        '''
+
+        if not isinstance(other, Package):
+            raise ValueError("Other is not a Package")
+
+        less = []   # What does other Package have that I don't?
+        more = []   # What do I have that other doesn't?
+        same = []
+        different = []
+
+        for o in other.getFiles():
+            f = self.getFile(o.path)
+            if not f:
+                less.append(o.path)
+            elif o == f:
+                same.append(o.path)
+            else:
+                different.append(o.path)
+
+        for f in self.getFiles():
+            if f.path not in less + same + different:
+                more.append(f.path)
+
+        return (less, more, same, different)
+
+    def getFiles(self):
+        '''
+        Return list of files belonging to this package.
+
+        @return: files
+        @rtype: List(Deliverable)
+        '''
+        return self.files.values()
+
+    def getFilesSorted(self):
+        '''
+        Return list of files belonging to this package, sorted by path
+
+        @return: files
+        @rtype: List(Deliverable)
+        '''
+        keys = self.files.keys()
+        keys.sort()
+        return [self.getFile(k) for k in keys]
+
+    def getFile(self, path):
+        '''
+        Get a file by path
+        @param path: path
+        '''
+        return self.files.get(path)
+
+    def addFile(self, file):
+        '''
+        Add a new Deliverable to the package
+
+        @param file: The file to be added
+        @type file: Deliverable
+        @return: True on success, False if a Deliverable with the path already exists
+        @raise TypeError: Tried to add a non-Deliverable
+        '''
+        if isinstance(file, Deliverable):
+            if not self.getFile(file.path):
+                self.files[file.path] = file
+                return True
+            else:
+                return False
+        else:
+            raise TypeError("Will only add Deliverables to package. (%s)" % type(file))
+
+    def getName(self):
+        '''
+        @return: name
+        @rtype: String
+        '''
+        return self.name
+
+    def getDependencies(self):
+        '''
+        @return: dependencies
+        @rtype: List(Dependency)
+        '''
+        return self.depends
+
+    def addDependency(self, dependency):
+        if dependency not in self.depends and dependency.package != self.getName():
+            self.depends.append(dependency)
+            return True
+        else:
+            return False
+
+    def getApiFingerprint(self):
+        '''
+        @return: API Fingerprint, a 12-character unique identifer for the API
+        @rtype: String
+        '''
+        apis = {}
+        filenames = []
+        apiKeys = []
+        separator = "\t"
+        if hasattr(md5, "new"):
+            m = md5.new()
+        else:
+            m = md5()
+        if self.getApiVersion():
+            for f in self.getFiles():
+                if (isinstance(f, Executable) and f.api):
+                    # The assumption is that executables with the same name have
+                    # the same API, so only include one of them.
+                    apis[os.path.basename(f.path)] = f.api.interface
+            filenames = apis.keys()
+            filenames.sort()
+            for filename in filenames:
+                m.update(filename + separator)
+                apiKeys = apis[filename].keys()
+                apiKeys.sort()
+                for apiKey in apiKeys:
+                    m.update(str(apiKey) + str(apis[filename][apiKey]))
+                m.update(separator)
+            return m.hexdigest()[:12]
+        else:
+            return None
+
+class Deliverable(object):
+    ''' A class for storing metadata about an individual file. '''
+    def __init__(self, path, type="file", size=None, checksum=None):
+        '''
+        The type can be omitted, by default it is "file"
+
+        @param path: The relative location of the file
+        @param type: The type of Deliverable
+        @param size: Size in bytes
+        @param checksum: md5 hexdigest
+        @type path: String
+        @type type: String
+        @type size: Integer
+        @type checksum: String
+        '''
+        self.path = path
+        self.type = type
+        self.size = size
+        self.checksum = checksum
+
+    def __eq__(self, other):
+        if isinstance(other, Deliverable):
+            return self.path == other.path and self.checksum == other.checksum
+
+    def __ne__(self, other):
+        return not self == other
+
+    # TODO: Are these extra comparison operators necessary?
+    def __lt__(self, other):
+        if self.path < other.path:
+            return True
+        elif self.path == other.path:
+            if self.checksum < other.checksum:
+                return True
+        return False
+
+    def __gt__(self, other):
+        if self.path > other.path:
+            return True
+        elif self.path == other.path:
+            if self.checksum > other.checksum:
+                return True
+        return False
+
+    def __str__(self):
+        return "Deliverable:\npath: %s\ntype: %s\nsize: %s\nchecksum: %s\n" % (self.path, self.type, self.size, self.checksum)
+
+    def getPath(self):
+        return self.path
+
+    def getChecksum(self):
+        return self.checksum
+
+    def baseName(self):
+        return os.path.basename(self.path)
+
+class PEBinary(Deliverable):
+    ''' A Deliverable with additional arch, mappath and sympath attributes. '''
+
+    def __init__(self, path, type, size=None, checksum=None):
+        '''
+        The type is mandatory
+
+        @param path: The relative location of the file
+        @param type: The type of Deliverable
+        @type path: String
+        @type type: String
+        '''
+        Deliverable.__init__(self, path, type, size, checksum)
+        self.arch = None
+        self.mappath = None
+
+    def __str__(self):
+        out = Deliverable.__str__(self).replace("Deliverable:", "PEBinary:")
+        out += """arch: %s\nmappath: %s\n""" % (self.arch, self.mappath)
+        return out
+
+class E32Binary(Deliverable):
+    ''' A Deliverable with additional arch, mappath and sympath attributes. '''
+
+    def __init__(self, path, type, size=None, checksum=None, variant=None):
+        '''
+        The type is mandatory
+
+        @param path: The relative location of the file
+        @param type: The type of Deliverable
+        @type path: String
+        @type type: String
+        '''
+        Deliverable.__init__(self, path, type, size, checksum)
+        self.variant = variant
+        self.arch = None
+        self.mappath = None
+        self.sympath = None
+
+    def __str__(self):
+        out = Deliverable.__str__(self).replace("Deliverable:", "E32Binary:")
+        out += "variant: %s\narch: %s\nmappath: %s\nsympath: %s\n" % (self.variant, self.arch, self.mappath, self.sympath)
+        return out
+
+class Resource(Deliverable):
+    ''' A Deliverable with target, targetpath and source attributes. '''
+    def __init__(self, path, type, size=None, checksum=None):
+        '''
+        @param path: The relative location of the file
+        @param type: The type of Deliverable
+        @type path: String
+        @type type: String
+        '''
+        Deliverable.__init__(self, path, type, size, checksum)
+        self.target = None
+        self.targetpath = None
+        self.source = None
+
+    def __str__(self):
+        out = Deliverable.__str__(self).replace("Deliverable:","Resource:")
+        out += "target: %s\ntargetpath: %s\nsource: %s\n" % (self.target, self.targetpath, self.source)
+        return out
+
+class Executable(E32Binary):
+    ''' An E32Binary with an optional API attribute. The API is obtained from the associated DSO file.
+
+    Examples: exe, plugin or dll
+    '''
+    def __init__(self, path, type, size=None, checksum=None, variant=None, api=None):
+        '''
+        @param path: The relative location of the file
+        @param type: The type of Executable
+        @type path: String
+        @type type: String
+        '''
+        E32Binary.__init__(self, path, type, size, checksum, variant)
+        if api:
+            if not isinstance(api, API):
+                raise ValueError("The api must be an instance of API")
+        self.api = api
+
+    def __str__(self):
+        out = E32Binary.__str__(self).replace("E32Binary:","Executable:")
+        out += "api: %s\n" % self.api
+        return out
+
+class Library(E32Binary):
+    ''' A non-executable binary.
+
+    Examples: staticlib or dso
+    '''
+    def __init__(self, path, type, size=None, checksum=None, variant=None):
+        '''
+        @param path: The relative location of the file
+        @param type: The type of Library
+        @type path: String
+        @type type: String
+        '''
+        E32Binary.__init__(self, path, type, size, checksum, variant)
+
+    def variantPlatform(self):
+        return self.path.lstrip(os.sep).split(os.sep)[2]
+
+    def variantType(self):
+        out = self.path.lstrip(os.sep).split(os.sep)[3]
+        if out == "lib":
+            return None
+        return out
+
+class API(object):
+    ''' An API defines an interface and a version, with methods for comparison to another API. '''
+    def __init__(self, version="1.0", interface=None):
+        if type(version) != str:
+            raise TypeError("The version for an API must be a string")
+        if not version:
+            raise ValueError("API version cannot be an empty string")
+        if type(interface) != dict:
+            raise TypeError("The interface for an API must be a dictionary")
+        for k, v in interface.items():
+            if not (k and v):
+                raise ValueError("Cannot have empty keys or values in an interface")
+        self.version = version
+        self.interface = interface
+
+    def __str__(self):
+        return "API:{version: %s, interface_keycount: %s}" % (self.version, len(self.interface.keys()))
+
+    def provides(self, other):
+        ''' This API is compatible with other. This interface may be identical to or a superset of other.
+
+        @param other: Another API to compare with
+        @type other: API
+        @return: This provides same functions as other
+        @rtype: Boolean
+        '''
+        ret = True
+        for index, function in other.interface.items():
+            if index in self.interface:
+                if self.interface[index] != function:
+                    ret = False
+            else:
+                ret = False
+        return ret
+
+    def isIdentical(self, other):
+        ''' The interface exports the exact same functions as other.
+
+        @param other: Another API to compare with
+        @type other: API
+        @return: This API is identical to other
+        @rtype: Boolean
+        '''
+        return self.interface == other.interface
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/PackageWriter.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,1032 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Package writer
+#
+
+'''
+Writer classes create packages, others describe the interface for passing
+package data to the writers.
+@todo: zip and rpm writers are not thread safe.
+'''
+
+import os
+import re
+import copy
+from math import ceil
+import textwrap
+import time
+from zipfile import ZipFile, ZIP_DEFLATED
+from StringIO import StringIO
+import tarfile
+
+try:
+    from hashlib import md5
+except ImportError:
+    import md5
+
+from Blocks.Packaging.Logging import Logging
+from Blocks import arfile
+from Blocks import debfile
+
+class VersionInfo(object):
+    '''
+    Bestows ability to set and get version and build IDs when inherited.
+
+    Used by PackageWriter when handling Packageables and Filesets.
+    '''
+    def __init__(self, version=None, buildId=None, revision=None):
+        self.__identifier = Identifier(version, buildId, revision)
+
+    def getIdentifier(self):
+        '''
+        @return: The identifier - version and build Id info
+        @rtype: Identifier
+        '''
+        return self.__identifier
+
+    def setIdentifier(self, id):
+        '''
+        @param id: A version string or Identifier instance
+        @type id: String or Identifier
+        '''
+        if isinstance(id, str):
+            self.__identifier = Identifier(id)
+        elif isinstance(id, Identifier):
+            self.__identifier = id
+        else:
+            raise TypeError, "Identifier must be a string or Identifier"
+
+    def setVersion(self, version):
+        '''
+        Convenience method
+        @param version: The version component of the identifier:
+        @type version: String
+        '''
+        self.getIdentifier().setVersion(version)
+
+    def getVersion(self):
+        '''
+        Convenience method
+        @return: The version component of the identifier
+        @rtype: String
+        '''
+        return self.getIdentifier().getVersion()
+
+    def setBuild(self, id):
+        '''
+        Convenience method
+        @param id: The build component of the identifier
+        @type id: String
+        '''
+        self.getIdentifier().setBuild(id)
+
+    def getBuild(self):
+        '''
+        Convenience method
+        @return: The build component of the identifier
+        @rtype: String
+        '''
+        return self.getIdentifier().getBuild()
+
+    def setRevision(self, revision):
+        '''
+        Convenience method
+        @param revision: The revision component of the identifier
+        @type revision: String
+        '''
+        self.getIdentifier().setRevision(revision)
+
+    def getRevision(self):
+        '''
+        Convenience method
+        @return: The revision component of the identifier
+        @rtype: String
+        '''
+        return self.getIdentifier().getRevision()
+
+    def setRelease(self, value):
+        '''
+        Convenience method
+        @param value: The release component of the identifier
+        @type value: String
+        '''
+        self.getIdentifier().setRelease(value)
+
+    def getRelease(self):
+        '''
+        Convenience method
+        @return: The release component of the identifier
+        @rtype: String
+        '''
+        return self.getIdentifier().getRelease()
+
+class Packageable(VersionInfo):
+    '''
+    The top level class in the PackageWriter interface. A collection of
+    FileSets.
+
+    Packageable, FileSet, Dependency and VersionInfo define the interface for
+    PackageWriter and implement some basic functionality.
+    '''
+    def __init__(self):
+        VersionInfo.__init__(self, "1", "1", None)
+        self.__packageAllFlag = True
+        self.__sourcePackage = None
+
+    def getPackages(self):
+        '''
+        @return: List(FileSet)
+        '''
+        raise NotImplementedError
+
+    def getPackage(self):
+        '''
+        @return: FileSet
+        '''
+        raise NotImplementedError
+
+    def getName(self):
+        '''
+        @return: the name
+        @rtype: String
+        '''
+        raise NotImplementedError
+
+    def getPackageAllFlag(self):
+        '''
+        @return: packageAll
+        @rtype: Boolean
+        '''
+        return self.__packageAllFlag
+
+    def setPackageAllFlag(self, value):
+        '''
+        @param value: Should all packages be packaged
+        @type value: Boolean
+        '''
+        if type(value) != bool:
+            raise TypeError, "packageAllFlag must be a Boolean"
+        self.__packageAllFlag = value
+
+    def getAttributes(self):
+        '''
+        Metadata excluding name and version. Can be empty.
+
+        @return: Attributes
+        @rtype: Dictionary(attribute)
+        '''
+        raise NotImplementedError
+
+    def getAttribute(self, name):
+        '''
+        A convenience method to access attributes.
+        @param name: attribute name
+        '''
+        if name in self.getAttributes():
+            return self.getAttributes()[name]
+        else:
+            return None
+
+    def setSourcePackage(self, package):
+        if not isinstance(package, FileSet):
+            raise TypeError("Source package must be a FileSet. Got '%s'" % type(package))
+        self.__sourcePackage = package
+
+    def getSourcePackage(self):
+        return self.__sourcePackage
+
+class FileSet(VersionInfo):
+    '''
+    An interface class for PackageWriter; stores filenames.
+
+    See Package for the concrete implementation.
+    '''
+    def __init__(self):
+        VersionInfo.__init__(self, "1", "1")
+        self.__packageFlag = True
+        self.__lastPackagedVersion = Identifier()
+        self.__apiVersion = None
+
+    def getFiles(self):
+        '''
+        @return: The
+        @rtype: List(path)
+        '''
+        raise NotImplementedError
+
+    def getName(self):
+        '''
+        @return: The name of the fileset
+        '''
+        raise NotImplementedError
+
+    def getPackageFlag(self):
+        '''
+        @return: packageFlag - Should the package be written
+        @rtype: Boolean
+        '''
+        return self.__packageFlag
+
+    def setPackageFlag(self, value):
+        '''
+        @param value: Should the package be written
+        '''
+        if type(value) != bool:
+            raise TypeError, "packageFlag must be a Boolean"
+        self.__packageFlag = value
+
+    def getDependencies(self):
+        '''
+        @return: Dependencies that describe what other filesets this depends on
+        @rtype: List(Dependency)
+        '''
+        raise NotImplementedError
+
+    def setLastPackagedVersion(self, last):
+        '''
+        Store the previously packaged version
+        @param last: A FileSet having the last packaged version, or None to
+            use the current package version, or an Identifier.
+        @type last: FileSet or None or Identifier
+        '''
+        if isinstance(last, FileSet):
+            if last is self:
+                self.__lastPackagedVersion = copy.deepcopy(self.getIdentifier())
+            else:
+                self.__lastPackagedVersion = last.getLastPackagedVersion()
+        elif last == None:
+            self.__lastPackagedVersion = copy.deepcopy(self.getIdentifier())
+        elif isinstance(last, Identifier):
+            self.__lastPackagedVersion = last
+        else:
+            raise TypeError, "setLastPackagedVersion takes a FileSet, an Identifier or None as parameter"
+
+    def getLastPackagedVersion(self):
+        '''
+        @return: version
+        @rtype: String
+        '''
+        return self.__lastPackagedVersion
+
+    def setApiVersion(self, version):
+        '''
+        Store the API version
+
+        @param version: The version of the package API
+        @type version: String
+        '''
+        if type(version) != str:
+            raise TypeError, "API version must be a string"
+        try:
+            major, minor = version.split(".", 1)
+            assert major.isdigit()
+            assert minor.isdigit()
+            assert (int(major) > 0)
+            assert (int(minor) >= 0)
+        except Exception:
+            raise AttributeError("API version must be a dot-separated pair of integers")
+        self.__apiVersion = version
+
+    def getApiVersion(self):
+        '''
+        @return: API version
+        @rtype: String or None
+        '''
+        return self.__apiVersion
+
+    def getApiFingerprint(self):
+        '''
+        @return: API Fingerprint
+        @rtype: String
+        '''
+        raise NotImplementedError
+
+class Dependency(VersionInfo):
+    ''' Describes the dependency relation of a package to another '''
+    def __init__(self, package, type="strong", version=None, buildId=None, eval=None, revision=None):
+        '''
+        By default the dependency defines a strong dependency on a package. The
+        version, build and evaluator have to be explicitly set.
+
+        @param package: The name of the package required
+        @type package: String
+        @param type: Either 'strong' or 'weak'
+        @type type: String
+        @param version: The version of the required package.
+        @type version: String
+        @param buildId: The build ID of the required package
+        @type buildId: String
+        @param eval: Evaluator to define an exact version or a range of versions
+        @type eval: String
+        '''
+        VersionInfo.__init__(self, version=version, buildId=buildId, revision=revision)
+        self.package = package
+        self.setEval(eval)
+        self.setType(type)
+
+    def __str__(self):
+        if self.eval:
+            e = self.eval + " " + str(self.getIdentifier())
+        else:
+            e = ""
+        if self.type == "weak":
+            return "weak(%s %s)" % (self.package, e)
+        else:
+            return "strong(%s %s)" % (self.package, e)
+
+    def __eq__(self, other):
+        '''
+        @param other:
+        @type other:
+        '''
+        if self.package == other.package and \
+            self.eval == other.eval and \
+            self.type == other.type and \
+            self.getIdentifier() == other.getIdentifier():
+            return True
+        else:
+            return False
+
+    def __ne__(self, other):
+        return not self == other
+
+    def rpmString(self):
+        ''' Return RPM-friendly "Requires" string '''
+        if self.eval:
+            if self.eval == "==":
+                e = " = " + str(self.getIdentifier())
+            else:
+                e = " %s %s" % (self.eval, self.getIdentifier().toString())
+        else:
+            e = ""
+        if self.type == "weak":
+            return "Requires(hint): %s%s" % (self.package, e)
+        else:
+            return "Requires: %s%s" % (self.package, e)
+
+    def debString(self):
+        ''' Return Deb-friendly string '''
+        if self.eval:
+            if self.eval == "==":
+                e = "="
+            elif self.eval == "<":
+                e = "<<"
+            elif self.eval == ">":
+                e = ">>"
+            else:
+                e = self.eval
+            return "%s (%s %s)" % (self.package, e, self.getIdentifier().toString())
+        else:
+            return self.package
+
+    def setType(self, type):
+        '''
+        @param type: strong|weak
+        '''
+        if type in ("strong", "weak"):
+            self.type = type
+        elif type == "":
+            self.type = "strong"
+        else:
+            raise ValueError("Not a valid dependency type: '%s'" % type)
+
+    def setEval(self, eval):
+        '''
+        @param eval: "==" | "<=" | ">=" | "<" | ">" | None
+        '''
+        if eval in ("==", "<=", ">=", "<", ">", None):
+            self.eval = eval
+        else:
+            raise ValueError("Not a valid dependency evaluator: '%s'" % eval)
+
+    def getType(self):
+        return self.type
+
+    def getPackage(self):
+        return self.package
+
+    def getEval(self):
+        return str(self.eval)
+
+class Identifier(object):
+    '''
+    A container for version and build information, replacing a simple version
+    string.
+
+    The Identifier consists of three units: version, revision and build. Version
+    and revision make up the externally assigned identifier. Normally these are
+    concatenated to make the identifier string: VERSION-REVISION.
+
+    The build ID identifies a package in the build history, and is thus internal
+    to the packaging framework. It can, however feature in the identifier string
+    in two ways:
+        - the revision can contain "$b", which is replaced by the build ID
+        - if setUseEpoch() is called it becomes the epoch, which overrides
+          the version string in priority. In this case the identifier string
+          becomes EPOCH:VERSION-REVISION.
+
+    Epoch should only be used when the version is not guaranteed to increment in
+    subsequent builds. Epoch can only have the value of the build ID.
+    '''
+
+    VERSION_RE = re.compile("^\w[\w\.~+-]*$")
+    REVISION_RE = re.compile("^\w|\$b|\$c[\w\.~+]|\$b|\$c*$")
+
+    def __init__(self, version=None, build=None, revision=None):
+        '''
+        @param version: Version
+        @type version: String
+        @param build: Build
+        @type build: String
+        '''
+        self.__version = None
+        self.__build = None
+        self.__revision = None
+        self.__releaseCounter = "1"
+        self.__useEpoch = False
+        self.setVersion(version)
+        self.setBuild(build)
+        self.setRevision(revision)
+
+    def getVersion(self):
+        '''
+        @return: Version
+        @rtype: String
+        '''
+        return self.__version
+
+    def setVersion(self, version):
+        '''
+        Allowed values are alphanumeric characters and .~+-. The first character
+        must be alphanumeric.
+
+        @param version: Version
+        @type version: None or String
+        '''
+        if version != None:
+            if not isinstance(version, basestring):
+                raise TypeError("Version must be a string or None; got '%s'" % type(version))
+            if not Identifier.VERSION_RE.match(version):
+                raise ValueError("Empty version or illegal characters in version string: %s" % version)
+            if "\n" in version:
+                raise ValueError("Cannot have linebreaks in version string")
+        self.__version = version
+
+    def getBuild(self):
+        '''
+        @return: Build ID
+        @rtype: String
+        '''
+        return self.__buildId
+
+    def setBuild(self, id):
+        '''
+        Build must be a string containing digits.
+
+        @param id: Build ID
+        @type id: String
+        '''
+        if id is not None:
+            if not isinstance(id, basestring):
+                raise TypeError("buildId must be a string or None")
+            if not id.isdigit():
+                raise ValueError("buildId must be a string containing digits")
+            if not id:
+                raise ValueError("buildId cannot be an empty string")
+        self.__buildId = id
+
+    def getRevision(self):
+        '''
+        @return: Revision
+        @rtype: String
+        '''
+        if self.__revision:
+            return self.__revision.replace("$b", self.getBuild() or "").replace("$c", str(self.__releaseCounter))
+        else:
+            return self.__releaseCounter
+
+    def setRevision(self, revision):
+        '''
+        Allowed values are alphanumeric characters and .~+. The first character
+        must be alphanumeric.
+
+        The exception is "$b", which can occur anywhere and is replaced by the
+        build ID.
+
+        @param revision: Revision
+        @type revision: String
+        '''
+        if revision != None:
+            if not isinstance(revision, basestring):
+                raise TypeError("Revision must be a string or None; got '%s'" % type(revision))
+            if not Identifier.REVISION_RE.match(revision):
+                raise ValueError("Empty version or illegal characters in revision string: %s" % revision)
+            if "\n" in revision:
+                raise ValueError("Cannot have linebreaks in revision string")
+        self.__revision = revision
+
+    def toString(self, format=None):
+        ''' Return the identifier with optional format.
+
+        If format is specified, it is returned with the following replacements::
+
+            $b -> build
+            $v -> version
+            $r -> revision
+            $c -> release counter
+
+        If format is not specified, the default format is used:
+            - normally I{version[-revision]}
+            - If build is epoch: I{[epoch:]version[-revision]}
+
+        "$b" is always replaced by the build in the revision string.
+        "$c" is always replaced by the release counter in the revision string.
+
+        @param format: Format string
+        @type format: String
+        '''
+        if format:
+            format = format.replace("$b", self.getBuild() or "")
+            format = format.replace("$v", self.getVersion() or "")
+            format = format.replace("$r", self.getRevision() or "")
+            format = format.replace("$c", self.getRelease() or "")
+            return format
+        else:
+            out = self.getVersion()
+            if self.getUseEpoch() and self.getBuild() != "0":
+                out = "%s:%s" % (self.getBuild(), out)
+            if self.getRevision():
+                out += "-%s" % self.getRevision()
+            return out
+
+    def setRelease(self, value):
+        assert value.isdigit()
+        self.__releaseCounter = value
+
+    def getRelease(self):
+        return self.__releaseCounter
+
+    def setUseEpoch(self, value=None):
+        if value != False:
+            value = True
+        self.__useEpoch = value
+
+    def getUseEpoch(self):
+        return self.__useEpoch
+
+    def getEpoch(self):
+        if self.getUseEpoch():
+            return self.getBuild()
+        else:
+            return "0"
+
+    def __eq__(self, other):
+        return (isinstance(other, Identifier) and
+            self.getBuild() == other.getBuild() and
+            self.getVersion() == other.getVersion() and
+            self.getRevision() == other.getRevision())
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __str__(self):
+        return self.toString(format="$b:$v:$r")
+
+    @staticmethod
+    def fromString(string):
+        ''' Deserialize from str(Identifier) '''
+        build, version, revision = string.split(":")
+        return Identifier(version or None, build or None, revision or None)
+
+class PackageWriter(object):
+    '''
+    Abstract class.
+
+    A PackageWriter packages files from the filesystem into a set of one or more
+    packages.
+
+    The classes Packageable.FileSet and Packageable.Packageable define how
+    data is passed to a PackageWriter. Files are listed in FileSets, which are
+    are passed to a PackageWriter in a Packageable.
+    '''
+    def __init__(self, options=None):
+        self.packageable = None
+        self.options = options
+        self.logger = Logging.getLogger("pfw.packagewriter")
+
+    def write(self, packageable, epocRoot, destinationDir, sourceRoot):
+        '''
+        Writes package file(s) from a Packageable
+
+        @param packageable: The object (usually Component) to write into bundles.
+        @type packageable: Packageable
+        @param epocRoot: The path to the directory containg epoc32.
+        @type epocRoot: String
+        @param destinationDir: The path where packages should be written.
+        @type destinationDir: String
+        @param sourceRoot: The path to the directory containing src.
+        @type sourceRoot: String
+        '''
+        written = []
+        self.setPackageable(packageable)
+        destinationDir = os.path.normpath(destinationDir)
+        if not os.path.isdir(destinationDir):
+            raise ValueError("Not a directory: %s" % destinationDir)
+
+        if sourceRoot and packageable.getSourcePackage():
+            if self.packageable.getPackageAllFlag() or packageable.getSourcePackage().getPackageFlag():
+                written.append(self.writeSinglePackage(packageable.getSourcePackage(), sourceRoot, destinationDir))
+        else:
+            self.logger.info("%s - Not writing source bundle: sourcelist or sourceroot missing"%packageable.getName())
+
+        if bool(packageable.getPackages()) ^ bool(epocRoot):
+            self.logger.info("%s - Not writing target bundles: packages or epocroot missing"%packageable.getName())
+        else:
+            for p in self.packageable.getPackages():
+                if self.packageable.getPackageAllFlag() or p.getPackageFlag():
+                    out = self.writeSinglePackage(p, epocRoot, destinationDir)
+                    if type(out) == tuple:
+                        written += out
+                    else:
+                        written.append(out)
+                else:
+                    self.logger.debug("%s - Not writing %s: unchanged." % (packageable.getName(), p.getName()))
+        return written
+
+    def setPackageable(self, packageable):
+        if not isinstance(packageable, Packageable):
+            raise TypeError("PackageWriter only handles Packageables")
+        self.packageable = packageable
+
+    def writeSinglePackage(self, package, sourceDir, destinationDir):
+        raise NotImplementedError("Cannot use abstract PackageWriter")
+
+class ZipWriter(PackageWriter):
+    ''' Write packages as compressed Zip files. '''
+    def __init__(self, options=None):
+        PackageWriter.__init__(self, options)
+
+    def writeSinglePackage(self, package, sourceDir, destinationDir):
+        pkgfile = os.path.join(destinationDir, package.getName() + ".zip")
+        out = ZipFile(pkgfile, "w", compression=ZIP_DEFLATED)
+        os.chdir(sourceDir)
+        for p in package.getFiles():
+            out.write(p.path)
+        out.close()
+        return pkgfile
+
+class RpmWriter(ZipWriter):
+    '''
+    Overview
+    ========
+        Writes the ingredients to create RPM packages from a Packageable.
+            - Zip source files
+            - Specfiles
+            - A makefile for creating the actual RPMs.
+
+    Attributes
+    ==========
+        If the metadata required by rpmbuild is not available from Packageable or
+        Fileset attributes, a warning is logged and dummy values are used.
+
+        Below are lists of attributes used. Unlisted attributes are ignored.
+
+        Mandatory Packageable attributes
+        --------------------------------
+            - summary
+            - license
+            - vendor
+            - group
+            - description
+
+        Optional Packageable attributes
+        -------------------------------
+            - url
+    '''
+
+    def __init__(self, options=None):
+        ZipWriter.__init__(self, options)
+        self.specFileName = None
+        self.specFile = None
+        self.logger = Logging.getLogger("pfw.packagewriter.rpm")
+        self.mandatoryPackageableAttributes = [
+            #PackageableAttributeName, SpecAttributeName, default
+            ("summary", "Summary", "-"),
+            ("license", "License", "-"),
+            ("vendor", "Vendor", "-"),
+            ("group", "Group", "devel")]
+        self.mandatorySectionNames = [("description", "description")]
+        self.optionalPackageableAttributes = [("url", "URL")]
+
+    def writeSinglePackage(self, package, sourceDir, destinationDir):
+        relocatePrefix = "/usr/local/src"
+        rpmSpecDir = os.path.join(destinationDir, "SPECS/")
+        rpmSrcDir = os.path.join(destinationDir, "SOURCES/")
+        if not os.path.isdir(rpmSpecDir):
+            os.makedirs(rpmSpecDir)
+        if not os.path.isdir(rpmSrcDir):
+            os.makedirs(rpmSrcDir)
+
+        zip = ZipWriter.writeSinglePackage(self, package, sourceDir, rpmSrcDir)
+
+        versionCharacters = re.compile(r"[^\.a-zA-Z0-9_]")
+        if versionCharacters.search(package.getVersion()):
+            raise ValueError("Unsuitable characters in package version %s" % package.getVersion())
+        self.specFileName = rpmSpecDir + package.getName() + ".spec"
+        self.specFile = open(self.specFileName, "wb") #we want unix linebreaks
+        self.specFile.write("%define __os_install_post %{nil}\n")
+        self.specFile.write("%define __find_requires   /bin/true\n")
+        self.specFile.write("Name: " + package.getName() + "\n")
+        self.specFile.write("Source: " + rpmSpecDir + "%{name}.zip\n")
+        self.specFile.write("Buildroot: %{_tmppath}/%{name}-buildroot\n")
+        self.specFile.write("BuildArch: noarch\n")
+        self.specFile.write("Version: %s\n" % package.getVersion())
+        self.specFile.write("Release: %s\n" % package.getRevision())
+        #self.specFile.write("Epoch: %s\n"%package.getBuild())
+        self.specFile.write("Prefix: %s\n" % relocatePrefix)
+
+        for n, s, d in self.mandatoryPackageableAttributes:
+            if n in self.packageable.getAttributes():
+                self.specFile.write(s + ": " + str(self.packageable.getAttribute(n)) + "\n")
+            else:
+                self.logger.info("Component attribute '%s' is not defined, using '%s'", n, d)
+                self.specFile.write("%s: %s\n" % (s, d))
+
+        for k, v in self.optionalPackageableAttributes:
+            if k in self.packageable.getAttributes():
+                self.specFile.write(v + ": " + str(self.packageable.getAttribute(k)) + "\n")
+
+        for d in package.getDependencies():
+            self.specFile.write(d.rpmString()+"\n")
+
+        if self.packageable.getAttribute("description"):
+            description = self.packageable.getAttribute("description")
+        else:
+            description = "NO DESCRIPTION"
+        self.specFile.write("\n%description\n")
+        self.specFile.write("%s\n\n"%description)
+
+        self.specFile.write("%prep\n")
+        self.specFile.write("%setup -c -n %{name}\n")
+
+        self.specFile.write("\n%install\n")
+        self.specFile.write("rm -rf $RPM_BUILD_ROOT%s\n"%relocatePrefix)
+        self.specFile.write("mkdir -p $RPM_BUILD_ROOT%s\n"%relocatePrefix)
+        self.specFile.write("cp -r * $RPM_BUILD_ROOT%s/\n"%relocatePrefix)
+
+        self.specFile.write("\n%files\n%defattr(644,root,root)\n")
+        for f in package.getFiles():
+            the_path = relocatePrefix + "/" + f.getPath()
+            self.specFile.write(the_path + "\n")
+
+        self.specFile.close()
+        return (zip, self.specFileName)
+
+class DebWriter(PackageWriter):
+    '''
+    Overview
+    ========
+       Writes deb archives from Packageables.
+
+    Attributes
+    ==========
+        Each Deliverable belonging to the Packageable must have a size.
+
+        Recommended Packageable attributes that should be set are:
+            - group
+            - summary
+            - description
+
+        The summary and description are used to create the deb Description, and
+        as such the summary should be brief - it is truncated to 60 characters.
+        The description is meant to be a longer text.
+
+        Unlisted attributes are also included. The attribute name is
+        capitalized and a prefix "X-" is added if necessary.
+    '''
+
+    DEP_MAPPINGS = ("Depends", "Enhances", "Recommends", "Suggests", None)
+
+    def __init__(self, options=None):
+        PackageWriter.__init__(self, options)
+        self.logger = Logging.getLogger("pfw.packagewriter.deb")
+
+    @staticmethod
+    def getFileTarInfo(name, file):
+        '''
+        Create TarInfo for file-like object
+
+        @param name: Name for tar member file
+        @param file: file-like object
+        '''
+        ti = tarfile.TarInfo()
+        ti.name = name
+        if hasattr(file, "len"):
+            try:
+                int(file.len)
+                ti.size = file.len
+            except ValueError:
+                ti.size = 0
+        else:
+            try:
+                ti.size = len(file)
+            except TypeError:
+                pass
+        ti.mtime = long(time.time())
+        ti.mode = int(0644)
+        ti.uname = "user"
+        ti.gname = "group"
+        ti.uid = 0
+        ti.gid = 0
+        return ti
+
+    @staticmethod
+    def getDirTarInfo(path):
+        '''
+        For adding arbitrary directory paths to a tarfile
+        @param path:
+        @type path:
+        '''
+        ti = tarfile.TarInfo()
+        ti.name = path
+        ti.type = tarfile.DIRTYPE
+        ti.mtime = long(time.time())
+        ti.mode = int(0755)
+        ti.uname = "user"
+        ti.gname = "group"
+        ti.uid = 0
+        ti.gid = 0
+        return ti
+
+    def writeSinglePackage(self, package, sourceDir, destinationDir):
+        revision = ""
+        if package.getRevision():
+            revision = "-%s" % package.getRevision()
+        debFileName = "%s_%s%s.deb" % (package.getName(), package.getVersion(), revision)
+
+        # copyright
+        copyright_file = StringIO()
+        if self.packageable.getAttribute("vendor"):
+            v = "Vendor: " + self.packageable.getAttribute("vendor").strip()
+            copyright_file.write(v[:80] + "\n")
+        if "license" in self.packageable.getAttributes():
+            l = "License: " + self.packageable.getAttribute("license")
+            wrap = textwrap.TextWrapper(subsequent_indent=" ", width=80)
+            copyright_file.write("\n .\n".join([wrap.fill(line) for line in l.split("\n")]))
+            copyright_file.write("\n")
+        copyright_file.write("\n")
+        copyright_file.seek(0)
+
+        # md5sum
+        md5sum_file = StringIO()
+        for p in package.getFiles():
+            if hasattr(md5, "new"):
+                m = md5.new()
+            else:
+                m = md5()
+            f = open(os.path.join(sourceDir, p.path), "rb")
+            m.update(f.read())
+            f.close()
+            md5sum_file.write(m.hexdigest() + "  " + p.path + "\n")
+        md5sum_file.seek(0)
+
+        # control
+        control_file = StringIO(self.__getControl(package))
+        deb = debfile.DebFile(os.path.join(destinationDir, debFileName), mode="w")
+        deb.addfile(arfile.ArInfo("debian-binary"))
+        deb.write("2.0\n")
+        deb.addfile(arfile.ArInfo("control.tar.gz"))
+        f = tarfile.TarFile.open(mode="w:gz", fileobj=deb)
+        f.addfile(self.getFileTarInfo("control", control_file), control_file)
+        f.addfile(self.getFileTarInfo("md5sum", md5sum_file), md5sum_file)
+        f.addfile(self.getFileTarInfo("copyright", copyright_file), copyright_file)
+        f.close()
+
+        # data
+        deb.addfile(arfile.ArInfo("data.tar.gz"))
+        d = tarfile.TarFile.open(mode="w:gz", fileobj=deb)
+        seenDirs = []
+        for p in package.getFiles():
+            absPath = os.path.join(sourceDir, p.path)
+            parentDir = None
+            nextDir = os.path.dirname(p.path)
+            # dpkg appears to require that each directory is listed in the tarball
+            while os.path.dirname(p.path) not in seenDirs:
+                parentDir = os.path.split(nextDir)[0]
+                if parentDir in seenDirs or parentDir == "":
+                    seenDirs.append(nextDir)
+                    if nextDir != "":
+                        d.addfile(self.getDirTarInfo(nextDir))
+                    nextDir = os.path.dirname(p.path)
+                else:
+                    nextDir = parentDir
+            # add file to tarfile without sourceDir
+            tInfo = d.gettarinfo(absPath, p.path)
+            tInfo.uname = "user"
+            tInfo.gname = "group"
+            tInfo.uid = 0
+            tInfo.gid = 0
+            if tInfo.mode & 0100:
+                tInfo.mode = int(0755)
+            else:
+                tInfo.mode = int(0644)
+            thefile = open(absPath, "rb")
+            d.addfile(tInfo, thefile)
+            thefile.close()
+        d.close()
+        if self.options:
+            if "SIGN_KEYRING" in self.options and os.path.isdir(self.options.get("SIGN_KEYRING")):
+                signType = self.options.get("SIGN_TYPE", "origin")
+                try:
+                    deb.addSignature(signType, self.options.get("SIGN_KEYRING"), True)
+                except debfile.DebError, e:
+                    self.logger.warning(str(e))
+        deb.close()
+        return debFileName
+
+    def __getControl(self, fileset):
+        '''Create a control file from Packageable and FileSet information
+
+        @param fileset: The FileSet we are creating a Deb from
+        @type fileset: Fileset
+        @return: Text for control file
+        @rtype: String
+        '''
+        installedSize = 0
+        for p in fileset.getFiles():
+            installedSize += int(p.size)
+
+        # size is kbytes, let's round up to the nearest kB
+        if installedSize > 0:
+            installedSize = int(ceil(installedSize / 1024.0))
+
+        control = ("Package: %s\n"
+                   "Version: %s\n"
+                   "Priority: optional\n"
+                   "Installed-Size: %s\n"
+                   "Architecture: all\n") % (fileset.getName(), fileset.getIdentifier().toString(), installedSize)
+
+        section = fileset.group or self.packageable.getAttribute("group") or None
+        if section:
+            control += "Section: %s\n" % section
+
+        strongDep = "Depends"
+        weakDep = "Recommends"
+        if self.options:
+            # None is a valid value, so the default has to be something else
+            option = self.options.get("STRONG_DEP_MAPPING", "illegal")
+            if option in self.DEP_MAPPINGS:
+                strongDep = option
+            option = self.options.get("WEAK_DEP_MAPPING", "illegal")
+            if option in self.DEP_MAPPINGS:
+                weakDep = option
+
+        # If it's None we don't want any deps listed
+        if strongDep:
+            strongDepList = ", ".join([d.debString().strip() for d in fileset.getDependencies() if d.getType() == "strong"])
+            if strongDepList:
+                control += "%s: %s\n" % (strongDep, strongDepList)
+
+        if weakDep:
+            weakDepList = ", ".join([d.debString().strip() for d in fileset.getDependencies() if d.getType() == "weak"])
+            if weakDepList:
+                control += "%s: %s\n" % (weakDep, weakDepList)
+
+        # TODO: This is actually package. Change name
+        if fileset.replaces:
+            packages = ", ".join(fileset.replaces)
+            for field in ("Conflicts", "Replaces", "Provides"):
+                control += "%s: %s\n" % (field, packages)
+
+        for k, v in self.packageable.getAttributes().items():
+            if k not in ("summary", "description", "group", "vendor", "license"):
+                if k.startswith("X-"):
+                    control += "%s: %s\n" % (k.capitalize(), v)
+                else:
+                    control += "X-%s: %s\n" % (k.capitalize(), v)
+
+        description = ""
+        if self.packageable.getAttribute("summary"):
+            description += self.packageable.getAttribute("summary")[:60] + "\n"
+        if self.packageable.getAttribute("description"):
+            if not description:
+                description = "\n"
+            wrap = textwrap.TextWrapper(initial_indent=" ", subsequent_indent=" ", width=80)
+            description += "\n .\n".join([wrap.fill(line) for line in self.packageable.getAttribute("description").split("\n")])
+        if not description:
+            description = "n/a"
+        control += "Description: " + description.rstrip()
+        control += "\n\n"
+        return control
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Rules/Rules.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,47 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Default rules
+#
+
+'''
+The default rules that determine how files are assigned to packages, the naming
+of packages and metadata such as intra-component dependencies.
+'''
+
+import os
+
+def packageDirectives():
+    '''
+    Rules used by PackageBuilder when creating Packages.
+    @return: path to rules configuration file
+    @rtype: String
+    '''
+    return os.path.join(os.path.dirname(__file__), "packageDirectives.xml")
+
+def sourceRules():
+    '''
+    Rules used by PackageBuilder when creating files (Deliverables) for source packages
+    @return: path to rules configuration file
+    @rtype: String
+    '''
+    return os.path.join(os.path.dirname(__file__), "sourceRules.xml")
+
+def targetRules():
+    '''
+    Rules used by PackageBuilder when creating files (Deliverables) for binary packages
+    @return: path to rules configuration file
+    @rtype: String
+    '''
+    return os.path.join(os.path.dirname(__file__), "targetRules.xml")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Rules/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,18 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Rules
+#
+
+from Blocks.Packaging.Rules.Rules import *
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Rules/packageDirectives.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,128 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Packaging directives
+
+-->
+
+<rules>
+  <rule>
+    <match>
+      <package>^dev\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.dev</suffix>
+  </rule>
+   <rule>
+    <match>
+      <package>^doc\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.doc</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^dev\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.dev-$1</suffix>
+    <depends>dev.noarch</depends>
+  </rule>
+  <rule>
+    <match>
+      <package>^resource\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.resource</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^resource-l10n\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.resource-l10n</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^resource-l10n\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.resource-l10n-$1</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^exec\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.exec-$1</suffix>
+    <depends>resource.noarch</depends>
+    <depends>resource.$1</depends>
+  </rule>
+  <rule>
+    <match>
+      <package>^exec-trace\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.trace-$1</suffix>
+    <depends>exec.$1</depends>
+    <precondition>exec.$1</precondition>
+  </rule>
+  <rule>
+    <match>
+      <package>^resource\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.resource-$1</suffix>
+    <depends>resource.noarch</depends>
+  </rule>
+  <rule>
+    <match>
+      <package>^tools\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.tools</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^tools\.(.+)$</package>
+    </match>
+    <arch>$1</arch>
+    <suffix>.tools-$1</suffix>
+    <depends>tools.noarch</depends>
+  </rule>
+  <rule>
+    <match>
+      <package>^src\.noarch$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.src</suffix>
+  </rule>
+<!-- Catch known and unexpected problems -->
+  <rule>
+    <match>
+      <package>^legacy$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.legacy</suffix>
+  </rule>
+  <rule>
+    <match>
+      <package>^extras$</package>
+    </match>
+    <arch>noarch</arch>
+    <suffix>.extras</suffix>
+  </rule>
+</rules>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Rules/sourceRules.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Source rules
+
+-->
+
+<rules>
+  <rule>
+    <match>
+      <path>.*</path>
+    </match>
+    <type>file</type>
+    <package>src.noarch</package>
+  </rule>
+</rules>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Rules/targetRules.xml	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,475 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+Initial Contributors:
+Nokia Corporation - initial contribution.
+
+Contributors:
+
+Description:
+Target rules
+
+-->
+
+<rules>
+  <rule>
+    <match>
+      <path>^epoc32/(include|sdk_special|stdapis)/.*</path>
+    </match>
+    <type>file</type>
+    <package>dev.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/cshlpcmp_template/.*</path>
+    </match>
+    <type>file</type>
+    <package>doc.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/engdoc/.*</path>
+    </match>
+    <type>file</type>
+    <package>doc.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/rom/tools/.*\.(bat|cmd|dll|exe)$</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+    <rule>
+    <match>
+      <path>^epoc32/rom/tools/.*</path>
+<!-- identify ELF32 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010101</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/rom/tools/.*</path>
+<!-- identify ELF64 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010102</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/rom/tools/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/(rom|rombuild|s60)/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/data/.*\.r[0-9][0-9]$</path>
+    </match>
+    <type>file</type>
+    <package>resource-l10n.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/data/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/winscw/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/z/.*\.r[0-9][0-9]$</path>
+    </match>
+    <type>file</type>
+    <package>resource-l10n.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/z/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/.*\.map$</path>
+    </match>
+    <type>file</type>
+    <package>exec-trace.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/.*\.(agt|csy|dll|drv|exe|ext|esy|fsy|ldd|pdd|loc|msy|nif|pdd|pdl|prt|tsy|wsy|pxy)$</path>
+    </match>
+    <type>intel_pe</type>
+    <package>exec.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/.*\.lib$</path>
+    </match>
+    <type>intel</type>
+    <package>exec.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(deb|rel|udeb|urel)/.*</path>
+    </match>
+    <type>file</type>
+    <package>exec.emul</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/(udeb|urel)/z/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/(urel|udeb)/.*\.lib$</path>
+    </match>
+    <type>staticlib</type>
+    <package>dev.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/(urel|udeb)/.*\.lib$</path>
+    </match>
+    <type>staticlib</type>
+    <variant>$3</variant>
+    <package>dev.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/(urel|udeb)/(.*)\.exe$</path>
+    </match>
+    <type>exe</type>
+    <package>exec.arm</package>
+    <extras>
+      <optional>epoc32/release/$1/$3/$4.exe.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/(urel|udeb)/(.*)\.exe$</path>
+    </match>
+    <type>exe</type>
+    <variant>$3</variant>
+    <package>exec.arm</package>
+    <extras>
+      <optional>epoc32/release/$1.$3/$4/$5.exe.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/(urel|udeb)/(.*)\.dll$</path>
+      <uid2>10009d8d</uid2>
+    </match>
+    <type>plugin</type>
+    <package>exec.arm</package>
+    <extras>
+      <optional>epoc32/release/$1/$3/$4.dll.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/(urel|udeb)/(.*)\.dll$</path>
+      <uid2>10009d8d</uid2>
+    </match>
+    <type>plugin</type>
+    <package>exec.arm</package>
+    <variant>$3</variant>
+    <extras>
+      <optional>epoc32/release/$1.$3/$4/$5.dll.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/(urel|udeb)/(.*)\.vmap$</path>
+    </match>
+    <type>file</type>
+    <package>exec.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/(urel|udeb)/(.*)\.vmap$</path>
+    </match>
+    <type>file</type>
+    <package>exec.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/(urel|udeb)/(.*)\.(agt|csy|dll|drv|ext|esy|fsy|ldd|pdd|loc|msy|nif|pdd|pdl|prt|tsy|wsy|pxy)$</path>
+    </match>
+    <type>dll</type>
+    <package>exec.arm</package>
+    <api>epoc32/release/armv5/lib/$4.dso</api>
+    <extras>
+      <optional>epoc32/release/$1/$3/$4.$5.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/(urel|udeb)/(.*)\.(agt|csy|dll|drv|ext|esy|fsy|ldd|pdd|loc|msy|nif|pdd|pdl|prt|tsy|wsy|pxy)$</path>
+    </match>
+    <type>dll</type>
+    <package>exec.arm</package>
+    <variant>$3</variant>
+    <api>epoc32/release/armv5/lib/$5.dso</api>
+    <extras>
+      <optional>epoc32/release/$1.$3/$4/$5.$6.sym</optional>
+    </extras>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)/[^/]+\.bin$</path>
+    </match>
+    <type>file</type>
+    <package>exec.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/(armv[5-7](smp)?)\.([0-9a-zA-Z_-]+)/[^/]+\.bin$</path>
+    </match>
+    <type>file</type>
+    <package>exec.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?/(urel|udeb)/[^/]+\.(bin|fxt|cpm|sc|[0-9][0-9])$</path>
+    </match>
+    <type>exe</type>
+    <package>exec.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?\.([0-9a-zA-Z_-]+)/(urel|udeb)/[^/]+\.(bin|fxt|cpm|sc|[0-9][0-9])$</path>
+    </match>
+    <type>exe</type>
+    <package>exec.arm</package>
+    <variant>$3</variant>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/(urel|udeb)/.*\.sym$</path>
+    </match>
+    <type>file</type>
+    <package>exec-trace.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/(urel|udeb)/.*\.map$</path>
+    </match>
+    <type>file</type>
+    <package>exec-trace.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/lib/.*\.dso$</path>
+    </match>
+    <type>dso</type>
+    <package>dev.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/lib/.*\.lib$</path>
+    </match>
+    <type>dso</type>
+    <package>dev.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/armv[5-7](smp)?(\.([0-9a-zA-Z_-]+))?/[^/]+\.def$</path>
+    </match>
+    <type>file</type>
+    <package>dev.arm</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/localisation/group/.*\.info$</path>
+    </match>
+    <type>file</type>
+    <package>resource.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/localisation/.*/rsc/.*\.rpp$</path>
+    </match>
+    <type>file</type>
+    <package>resource.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/localisation/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/engineeringtools/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/gcc/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/gcc_mingw/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/release/winscw/(udeb|urel)/z/.*</path>
+    </match>
+    <type>file</type>
+    <package>resource.emul</package>
+  </rule>
+<!-- tools -->
+  <rule>
+    <match>
+      <path>^epoc32/release/(tools|tools2)/.*</path>
+    </match>
+    <type>file</type>
+    <package>dev.tools</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/tools/.*\.(bat|cmd|dll|exe)$</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/tools/.*</path>
+<!-- identify ELF32 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010101</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/tools/.*</path>
+<!-- identify ELF64 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010102</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/sbs_config/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/(tools|ost_dictionaries)/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.noarch</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/stubs/tools/.*\.(bat|cmd|dll|exe)$</path>
+    </match>
+    <type>file</type>
+    <package>tools.win</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/stubs/tools/.*</path>
+<!-- identify ELF32 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010101</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/stubs/tools/.*</path>
+<!-- identify ELF64 binaries -->
+      <uid1>464c457f</uid1>
+      <uid2>00010102</uid2>
+      <uid3>00000000</uid3>
+    </match>
+    <type>file</type>
+    <package>tools.linux</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/stubs/tools/.*</path>
+    </match>
+    <type>file</type>
+    <package>tools.noarch</package>
+  </rule>
+<!-- Legacy -->
+  <rule>
+    <match>
+      <path>^epoc32/release/(thumb|armi|arm5|arm9e|marm|arm4|eabi|gcce|wins|winc)/.*</path>
+    </match>
+    <type>file</type>
+    <package>legacy</package>
+  </rule>
+  <rule>
+    <match>
+      <path>^epoc32/wins/.*</path>
+    </match>
+    <type>file</type>
+    <package>legacy</package>
+  </rule>
+<!-- Catch all -->
+  <rule>
+    <match>
+      <path>.*</path>
+    </match>
+    <type>file</type>
+    <package>extras</package>
+  </rule>
+</rules>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/Storage.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,396 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Component metadata storage
+#
+
+''' Component metadata storage '''
+
+import time
+import os
+
+from Blocks.Packaging.FileMapping import FileMap
+from Blocks.Packaging import PackagingError
+from Blocks.singleinstance import SingleInstance
+
+class IncompleteData(PackagingError):
+    ''' There was a previous build but the requested data was not saved. '''
+
+class NoPreviousBuild(PackagingError):
+    ''' There have been no builds for the component. '''
+
+class InternalError(PackagingError):
+    ''' Internal problem '''
+
+class PackagerStorage(object):
+    '''Interface for providing the paths to metadata files required by a Packager.
+
+    When instantiated, the build ID is incremented (if it's stored across uses).
+    '''
+
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def writeFileMap(self, component):
+        '''Write filemap to storage.
+
+        @param component: A component
+        @type component: Blocks.Packaging.PackageModel.Component
+        '''
+        raise NotImplementedError
+
+    def writeMetaData(self, component):
+        '''Write metadata to storage.
+
+        @param component: A component
+        @type component: Blocks.Packaging.PackageModel.Component
+        '''
+        raise NotImplementedError
+
+    def getLastFileMapFile(self, componentName):
+        '''Get path to file map file of most recent build.
+
+        @param componentName: Name of the component
+        @type componentName: String
+        @return: Path
+        @rtype: String
+        '''
+        raise NotImplementedError
+
+    def getLastMetaDataFile(self, componentName):
+        '''Get path to metadata file of most recent build.
+
+        @param componentName: Name of the component
+        @type componentName: String
+        @return: Path
+        @rtype: String
+        '''
+        raise NotImplementedError
+
+    def getBuildId(self):
+        '''Return the ID of the current build
+
+        return: The ID
+        rtype: String
+        '''
+        raise NotImplementedError
+
+    def getComponentNames(self):
+        '''Return a list of components whose metadata is found in this storage
+        instance; this can be either metadata, files2packages or both.
+
+        @return: Names of components
+        @rtype: List(String)
+        '''
+        raise NotImplementedError
+
+    def __str__(self):
+        return self.__class__.__name__
+
+class DefaultPackagerStorage(PackagerStorage):
+    '''
+    Store metadata in a directory hierarchy::
+
+        + Basedir
+        |
+        -- + Component
+           |
+           -- Version
+
+    A running build ID is maintained, but it can be overridden.
+    '''
+
+    def __init__(self, basedir, build=None, lock=False):
+        '''
+        @param basedir: The directory containing a directory for each component's metadata
+        @type basedir: string
+        @param build: Overrides the incrementing build ID
+        @type build: int
+        @param lock: Lock the storage for exclusive access by this instance.
+        The lock remains until the instance is deleted or release() is called.
+        @type lock: bool
+        '''
+        if not os.path.isdir(basedir):
+            raise ValueError("Base directory '%s' does not exist. Cannot continue." % basedir)
+
+        self._baseDir = basedir
+        self._idFile = os.path.join(basedir, "build_id")
+        self._lockDir = os.path.join(basedir, ".lock")
+
+        try:
+            self._lastBuild = self._readId()
+        except NoPreviousBuild:
+            self._lastBuild = 0
+
+        if build:
+            if int(build) < int(self._lastBuild):
+                raise ValueError("Build ID (%s) must be equal or greater than the previous one (%s)" % (build, self._lastBuild))
+            self._build = str(build)
+            self._lastBuild = str(int(self._build) - 1)
+        else:
+            self._build = str(int(self._lastBuild) + 1)
+
+        if lock:
+            si = SingleInstance(basedir, True, ".si-lock")
+            si.waitRelease()
+
+    def writeFileMap(self, component):
+        _mkdir(os.path.join(self._baseDir, component.getName()))
+        if _mkdir(os.path.join(self._baseDir, component.getName(), self._build)):
+            self._writeId()
+        fMap = FileMap(component)
+        fMap.dump(os.path.join(self._baseDir, component.getName(), self._build, "file2pkg.dat"))
+
+    def writeMetaData(self, component):
+        _mkdir(os.path.join(self._baseDir, component.getName()))
+        if _mkdir(os.path.join(self._baseDir, component.getName(), self._build)):
+            self._writeId()
+        component.dump(os.path.join(self._baseDir, component.getName(), self._build, "meta.dat"))
+
+    def getLastFileMapFile(self, componentName):
+        if self._lastBuild == 0:
+            raise NoPreviousBuild("No previous builds")
+        last = os.path.join(self._baseDir, componentName, self._latestBuildFor(componentName), "file2pkg.dat")
+        if not os.path.isfile(last):
+            raise IncompleteData("No file mapping data was stored for the last build of the component; the expected file %s is does not exist." % last)
+        else:
+            return last
+
+    def getLastMetaDataFile(self, componentName):
+        if self._lastBuild == 0:
+            raise NoPreviousBuild("No previous builds")
+        last = os.path.join(self._baseDir, componentName, self._latestBuildFor(componentName, maxId=self._lastBuild), "meta.dat")
+        if not os.path.isfile(last):
+            raise IncompleteData("No metadata was stored for the last build of the component; the expected file %s is does not exist." % last)
+        else:
+            return last
+
+    def getBuildId(self):
+        return self._build
+
+    def getComponentNames(self):
+        return [f for f in os.listdir(self._baseDir) if f not in ("build_id", ".lock", ".si-lock")]
+
+    def _writeId(self, id=None):
+        '''
+        Use lock to make sure only one instance will update the ID file. This is
+        needed by multiprocess packager, it's not for running multiple
+        Packager / PFW instances in parallel.
+        '''
+        id = id or self._build
+        maxTries = 50
+        wait = 0.01
+        for f in range(maxTries):
+            try:
+                os.mkdir(self._lockDir)
+                try:
+                    try:
+                        f = open(self._idFile, "wb")
+                        f.write(id)
+                        f.close()
+                        return True
+                    except Exception, e:
+                        raise PackagerStorage.InternalError("Failed to update %s to ID %s:%s" % (self._idFile, self._build, e))
+                finally:
+                    os.rmdir(self._lockDir)
+            except OSError, e:
+                if e.errno != 17:
+                    raise
+                else:
+                    time.sleep(wait)
+                    continue
+        # timeout, assume something has broken previously
+        try:
+            f = open(self._idFile, "wb")
+            f.write(self._build)
+            f.flush()
+            f.close()
+            return True
+        except Exception:
+            pass
+        os.rmdir(self._lockDir)
+
+    def _readId(self):
+        ''' Get the last build from idFile, or raise NoPreviousBuild. '''
+        maxTries = 50
+        wait = 0.01
+        for tries in range(maxTries):
+            try:
+                f = open(self._idFile)
+                i = f.read().strip()
+                assert i.isdigit(), "Corrupt ID file %s" % self._idFile
+                f.close()
+            except IOError, e:
+                # build_id does not exist (yet?)
+                # give it a chance
+                if e.errno == 2:
+                    time.sleep(wait)
+                    continue
+                else:
+                    raise NoPreviousBuild, str(e)
+            except Exception, e:
+                raise NoPreviousBuild, str(e)
+            return i
+        raise NoPreviousBuild
+
+    def _latestBuildFor(self, componentName, maxId=None):
+        '''Find the directory containing the latest metadata for componentName.
+
+        @param componentName: Component name
+        @type componentName: String
+        @param maxId: The ID shall not be greater than maxId
+        @type maxId: Integer
+        @return: Path to the metadata directory
+        @rtype: String
+        '''
+        compDir = os.path.join(self._baseDir, componentName)
+        if not os.path.isdir(compDir):
+            raise NoPreviousBuild, "No previous builds for %s" % componentName
+        dirs = [ d for d in os.listdir(compDir) if \
+                    d.isdigit() \
+                    and os.path.isdir(os.path.join(compDir, d))
+                ]
+        if not dirs:
+            raise NoPreviousBuild("No metadata in build history for %s" % componentName)
+        dirs.sort(lambda x, y: cmp(int(x), int(y)))
+        latest = dirs.pop()
+        if maxId and latest > maxId:
+            if dirs:
+                return dirs.pop()
+            else:
+                raise NoPreviousBuild("Build history for %s only has entries newer than %s" % (componentName, maxId))
+        else:
+            return latest
+
+    def __str__(self):
+        return "%s at %s" % (self.__class__.__name__, self._baseDir)
+
+class OneoffStorage(PackagerStorage):
+    '''
+    OneoffStorage is for a one-off build without a directory structure for
+    consecutive builds. Previously created metadata can be used, although
+    only for acquiring metadata and not for calculating the build ID.
+    '''
+    def __init__(self, basedir=None, build=1, previousMetaDir=None, flat=False):
+        '''
+        Basedir is the output directory, previousMetaDir contains meta.dat of
+        previous build (optional)
+
+        A subdirectory is created in basedir for each component, unless flat is
+        True. It's not advisable to use the flat option if metadata or filemap
+        for more than one component is being written.
+
+        Note also that when using the flat option the metadata is not easily
+        accessible using OneoffStorage later on.
+
+        @param basedir:         Output directory - if None nothing is written.
+        @type basedir:          String
+        @param build:           Mandatory for OneoffStorage
+        @param previousMetaDir: Set this path to the basedir of the last build
+                                to read old metadata. Assumes flat was not used,
+                                i.e. directories named after components are
+                                found in previousMetaDir.
+        @type previousMetaDir:  String
+        @param flat:            Set to True to put metadata files directly in
+                                the base directory; normally they are stored in
+                                a directory named after the component.
+        @type flat:             Boolean
+        '''
+        if not build:
+            raise AttributeError("Must have a build ID for OneOffStorage")
+        if previousMetaDir and not os.path.isdir(previousMetaDir):
+            raise ValueError("Old metadata path %s is not a directory" % previousMetaDir)
+        PackagerStorage.__init__(self)
+        self._baseDir = basedir
+        self._previousMetaDir = previousMetaDir
+        self._build = str(build)
+        self._flat = flat
+
+    def __str__(self):
+        return "%s with %s metadata directory" % (self.__class__.__name__, self._baseDir or "no")
+
+    def writeFileMap(self, component):
+        if self._baseDir:
+            if self._flat:
+                dir = self._baseDir
+            else:
+                dir = os.path.join(self._baseDir, component.getName())
+                _mkdir(dir)
+            fMap = FileMap(component)
+            fMap.dump(os.path.join(dir, "file2pkg.dat"))
+
+    def writeMetaData(self, component):
+        if self._baseDir:
+            if self._flat:
+                dir = self._baseDir
+            else:
+                dir = os.path.join(self._baseDir, component.getName())
+                _mkdir(dir)
+            component.dump(os.path.join(dir, "meta.dat"))
+
+    def getLastMetaDataFile(self, componentName):
+        ''' Get last metadata file '''
+        if not self._previousMetaDir:
+            raise NoPreviousBuild("Cannot get last metadata - no previous build directory was given to OneOffStorage")
+        if self._flat:
+            componentName = ""
+        path = os.path.join(self._previousMetaDir, componentName, "meta.dat")
+        if os.path.isfile(path):
+            return path
+        else:
+            if not self._flat and os.path.isdir(os.path.join(self._previousMetaDir, componentName)):
+                raise IncompleteData("No metadata was stored for the last build; the expected file %s is does not exist." % path)
+            else:
+                raise NoPreviousBuild("The metadata file %s does not exist" % path)
+
+
+    def getLastFileMapFile(self, componentName):
+        ''' Get last map file '''
+        if not self._previousMetaDir:
+            raise NoPreviousBuild("Cannot get last filemap - no previous build directory was given to OneOffStorage")
+        if self._flat:
+            componentName = ""
+        path = os.path.join(self._previousMetaDir, componentName, "file2pkg.dat")
+        if os.path.isfile(path):
+            return path
+        else:
+            if not self._flat and os.path.isdir(os.path.join(self._previousMetaDir, componentName)):
+                raise IncompleteData("No filemapping was stored for the last build; the expected file %s is does not exist." % path)
+            else:
+                raise NoPreviousBuild("The filemap file %s does not exist" % path)
+
+    def getBuildId(self):
+        return self._build
+
+    def getComponentNames(self):
+        '''
+        Only ever return anything if a previousMetaDir was provided. Any subdirectory
+        with metadata or file2packages is assumed to be a component name.
+        '''
+        if self._previousMetaDir:
+            return [f for f in os.listdir(self._previousMetaDir)
+                    if os.path.isfile(os.path.join(self._previousMetaDir, f, "meta.dat"))
+                    or os.path.isfile(os.path.join(self._previousMetaDir, f, "file2pkg.dat"))]
+        else:
+            return []
+
+def _mkdir(path):
+    try:
+        os.mkdir(path)
+        return True
+    except OSError, e:
+        if e.errno != 17:
+            raise e
+        else:
+            return False
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/Packaging/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,131 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Packaging framework main package
+#
+
+'''
+A Packaging framework
+=====================
+
+Blocks is a framework for packaging files into easily distributed and managed
+units. When a codebase changes, a correctly functioning packaging framework
+provides a way to distribute only the units that have changed as a result. The
+units are referred to as I{bundles}, rather than packages, in Blocks terminology
+to avoid confusion with other uses of the term package.
+
+Blocks is agnostic in terms of bundle formats, though the default Deb format
+is the is best supported.
+
+Blocks also provides separate end user tools for installing, upgrading and
+removing packages created by the packaging framework.
+
+Packaging framework usage - overview
+====================================
+
+L{Packager} provides the main interface for using the packager framework. The
+following inputs are required for a Packager:
+    - A L{PackagerStorage} object to provide paths to metadata files and build
+    ID data to the Packager. Two implementations for different scenarios are
+    provided:
+        - L{DefaultPackagerStorage} maintains a I{component/build}
+        directory hierarchy to store metadata and provides automatic build
+        ID incrementation.
+        - L{OneoffStorage} is used when a full build history is not required.
+    - One or more L{BuildData} objects, each containing the necessary data to
+    create a packageable component:
+        - Component name, version
+        - Lists of source and target files
+        - Dependencies on other files
+    The L{DataSources} module contains classes for creating BuildData from
+    various types of input.
+    - The output directory for written bundles.
+
+Optional inputs include:
+    - One or more names of previously packaged components, the metadata of which
+    we wish to use for calculating dependencies.
+    - L{DependencyProcessors} for creating dependencies between bundles.
+
+Packaging can be tweaked by changing Packager variables, including specifying
+an alternative L{PackageWriter} or packaging L{Rules} used to::
+    - classify and allocate target and source files to bundles
+    - name package files
+    - create dependencies between packages within a component
+
+Packaging framework usage - example
+===================================
+Import modules::
+    import Blocks.Packaging
+
+    # for reading Raptor output
+    from Blocks.Packaging.DataSources.WhatLog import *
+    from Ft.Xml.Xslt import Transform
+    import xml.sax
+
+    # dependency handling
+    from Blocks.Packaging.DependencyProcessors import RaptorDependencyProcessor
+    from Blocks.Packaging.DependencyProcessors.RomPatchProcessor import *
+
+Read Raptor output from file I{input.xml}::
+    parser = xml.sax.make_parser()
+    reader = GenericLogReader()
+    parser.setContentHandler(reader)
+    parser.feed(Transform("input.xml", GenericLogReader.defaultStyleSheet))
+    parser.close()
+
+Use GenericLogReader to create BuildData from a list of files associated with
+I{src/common/generic/my/group/bld.inf}::
+    myBuildData = reader.createBuildData(
+        infs=["/path/to/sourceroot/src/common/generic/my/group/bld.inf"],
+        name="myComponent",
+        version="1",
+        )
+
+    # add source data not provided by GenericLogReader
+    myBuildData.addSourceFiles(["src/common/generic/my/group/bld.inf"])
+    myBuildData.setSourceRoot("/path/to/sourceroot")
+
+Create storage instance for storing package files and metadata in I{/tmp/myComponent}::
+    # Use metadata from previous build, found in "/tmp/oldbuild/"
+    storage = Blocks.Packaging.OneoffStorage(basedir="/tmp", build=2, previousMetaDir="/tmp/oldbuild/")
+
+Create the packager::
+    myPackager = Blocks.Packaging.Packager(storage, "/path/for/writing/packages")
+
+Add dependency processors; indirect dependency rules are found at I{/foo/indirect.rules}::
+    myPackager.addProcessor(RomPatchDependencyProcessor, None)
+    myPackager.addProcessor(DotDeeDependencyProcessor, {})
+    myPackager.addProcessor(Blocks.Packaging.BuildDataDependencyProcessor, "/foo/indirect.rules")
+
+Read metadata of component I{otherComponent}, to which there may be dependencies::
+    myPackager.addNonBuiltComponent("otherComponent")
+
+Write build metadata and packages that have changed since the last build::
+    myPackager.addComponent(myBuildData)
+    myPackager.wait()
+'''
+
+class PackagingError(Exception):
+    ''' Parent class for packaging framework exceptions '''
+
+# Flat namespace with default classes
+
+from Storage import DefaultPackagerStorage, OneoffStorage
+from PackageModel import *
+from BuildData import PlainBuildData, BdFile
+from MultiprocessPackager import Packager
+from Logging import Logging
+from DependencyProcessors.DefaultProcessors import *
+from PackageWriter import PackageWriter
+import Rules
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,18 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Blocks framework main module
+#
+
+''' Blocks '''
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/arfile.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,383 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Ar-file manager
+#
+
+import os
+import struct
+import stat
+import time
+
+_GLOBAL_HEADER = "!<arch>\n"
+_FILE_HEADER_STRING = "16s 12s 6s 6s 8s 10s 2s"
+_FILE_HEADER_STRUCT = struct.Struct(_FILE_HEADER_STRING)
+_FILE_MAGIC = "\140\012"
+
+class ArError(Exception):
+    ''' Ar-archive error '''
+    def __init__(self, error):
+        Exception.__init__(self, "Ar-archive error: %s" % error)
+        self.error = error
+
+class ArInfo(object):
+    ''' Ar-file information '''
+    __slots__  = "name", "size", "mtime", "uid", "gid", "mode", "offset"
+
+    def __init__(self, *args, **kwargs):
+        '''
+        @param name: Filename
+        @type name:  String
+        @param path: Path of file, use to set values
+        @type path:  String
+
+        If both name and path are given, the name overrides the name from path.
+
+        Use case 1, new ArInfo::
+            a = ArInfo()                                  # empty name
+            a = ArInfo("myFile")
+            a = ArInfo(name="myFile")
+
+        Use case 2, read from file::
+            a = ArInfo(path="/path/to/file")
+            a = ArInfo("myFile", path="/path/to/file")    # name is myFile rather than file
+            a = ArInfo("myFile", "/path/to/file")         # name is myFile rather than file
+
+        use case 3, set all values from list, as read from raw ar header::
+            a = ArInfo(list)
+        '''
+        path = None
+        self.name = ""          # file name
+        self.size = 0           # size in bytes
+        self.mtime = int(time.time())          # modification time
+        self.uid = 0            # user ID
+        self.gid = 0            # group ID
+        self.mode = 100644      # permissions
+        self.offset = 0         # offset in archive
+
+        if len(args) == 1:
+            if isinstance(args[0], basestring): # name only
+                self.name = args[0]
+            elif isinstance(args[0], list):
+                assert len(args[0]) == 8 # from raw headers, ignore magic
+                (self.name, self.mtime, self.uid,
+                self.gid, self.mode, self.size,
+                ignoreMagic, self.offset) = args[0]
+                self.size = int(self.size)
+        elif len(args) == 2:
+            self.name = args[0]
+            path = args[1]
+
+        self.name = kwargs.get("name", self.name)
+        path = kwargs.get("path", path)
+
+        if path:
+            try:
+                statinfo = os.stat(path)
+            except EnvironmentError, ex:
+                raise ArError("File '%s' not found" % path)
+
+            self.name = self.name or os.path.basename(path) # + "/" # trailing slash is GNU way to allow spaces in name
+            self.size = str(statinfo.st_size)
+            self.mtime = str(statinfo.st_mtime)
+            self.uid = str(statinfo.st_uid)
+            self.gid = str(statinfo.st_gid)
+            self.mode = str(oct(stat.S_IMODE(statinfo.st_mode)))
+
+    def getHeader(self):
+        return _FILE_HEADER_STRUCT.pack(self.name,# + "/", # trailing slash is GNU way to allow spaces in name
+                                        str(self.mtime),
+                                        str(self.uid),
+                                        str(self.gid),
+                                        str(self.mode),
+                                        str(self.size),
+                                        _FILE_MAGIC).replace("\x00", " ")
+
+    def __str__(self):
+        return ", ".join(["%s: %s" % (a, str(getattr(self, a))) for a in self.__slots__])
+
+class ArFile(object):
+    '''
+    Creating a new archive::
+
+        a = ArFile("new.ar", "w")
+        a.add("/some/file")
+        a.close()
+
+    Appending to an existing archive::
+
+        a = ArFile("old.ar", "a")
+        i = ArInfo("myfile.tar.gz")
+        a.addfile(i)
+        t = TarFile.open(mode="w|gz", fileobj=a)
+        t.close()
+        a.close()
+
+    Reading a member file directly (no tempfiles)::
+        a = ArFile("old.ar")
+        a.extractfiles("my_member.file")
+        a.read()
+
+    '''
+
+    MODES = ("r", "w", "a")
+
+    def __init__(self, name=None, mode="r", fileobj=None):
+        if mode not in ArFile.MODES:
+            raise ArError("Mode is %s. Mode must be one of '%s'." % (mode, ", ".join(ArFile.MODES)))
+        self.files = {}
+        self.archive = None
+        self.mode = mode
+        self.lastOpen = None            # archive size before last addition
+        self.startSize = None
+        self.filesToRead = []           # names of files to read()
+        self.opened = False
+        if name or fileobj:
+            self.open(name, fileobj)
+
+    def open(self, name=None, fileobj=None):
+        if fileobj and hasattr(fileobj, "read"):
+            self.archive = fileobj
+        else:
+            try:
+                self.archive = open(name, {"r": "rb", "w": "w+b", "a": "r+b"}[self.mode])
+                self.opened = True
+            except IOError:
+                raise ArError("File '%s' could not be opened" % name)
+        if self.mode == "w":
+            self.archive.write(_GLOBAL_HEADER)
+        else:
+            self._readHeaders()
+
+    def close(self):
+        if self.filesToRead:
+            self.filesToRead = None
+        self._endFile()
+        if self.opened:
+            self.archive.close()
+
+    def add(self, name, arcname=None):
+        info = ArInfo(arcname, name)
+        with open(name, "rb") as f:
+            self.addfile(info, f)
+
+    def remove(self, name):
+        fileheader = self.files.get(name)
+        if not fileheader:
+            raise ArError("File '%s' not found from archive" % name)
+        lastFileOffset = max(self.files[n].offset for n in self.files.iterkeys())
+        if fileheader.offset == lastFileOffset: # last file
+            self.archive.truncate(fileheader.offset - _FILE_HEADER_STRUCT.size)
+        else:
+            archiveWrite = open(self.archive.name, "r+b")
+            archiveWrite.seek(fileheader.offset - _FILE_HEADER_STRUCT.size)
+            nextFileOffset = fileheader.offset + fileheader.size + 1
+            self.archive.seek(nextFileOffset)
+            self._copyFileData(self.archive, archiveWrite)
+            archiveWrite.truncate()
+            archiveWrite.close()
+
+        del self.files[name]
+
+    def addfile(self, arinfo, fileobj=None):
+        if self.mode == "r":
+            raise ArError("Cannot add files in read mode")
+        if not fileobj and not hasattr(self.archive, "seek"):
+            raise ArError("Direct writing requires a target with seek()")
+        if len(arinfo.name) > 16:
+            raise ArError("Long filenames are not supported")
+        if arinfo.name in self.files:
+            raise ArError("Cannot add file '%s' because it already exists" % arinfo.name)
+        self._endFile()
+
+        self.archive.seek(0, os.SEEK_END)
+        here = self.archive.tell()
+        self.archive.write(arinfo.getHeader())
+        dataOffset = self.archive.tell()
+        if fileobj:
+            self._copyFileData(fileobj, self.archive, arinfo.size)
+            if int(arinfo.size) % 2 == 1:
+                self.archive.write("\n")
+        else:
+            # allow user to write() directly, just leave a signal for
+            # _endFile to clean up afterwards
+            self.lastOpen = here
+
+        arinfo.offset = dataOffset
+        self.files[arinfo.name] = arinfo
+
+    def write(self, str):
+        self.archive.write(str)
+
+    def _endFile(self):
+        ''' Overwrite correct size to last header '''
+        if self.lastOpen:
+            end = self.archive.tell()
+            self.archive.seek(self.lastOpen)
+            hdata = [field.strip() for field in _FILE_HEADER_STRUCT.unpack(self.archive.read(_FILE_HEADER_STRUCT.size))]
+            fileheader = ArInfo(list(hdata) + [self.archive.tell()])
+            fileheader.size = end - fileheader.offset
+            self.archive.seek(self.lastOpen)
+            self.archive.write(fileheader.getHeader())
+            self.archive.seek(end)
+            if int(fileheader.size) % 2 == 1:
+                self.archive.write("\n")
+            self.lastOpen = None
+            self.files[fileheader.name] = fileheader
+
+    def _readHeaders(self):
+        '''
+        @TODO: use name record file
+        '''
+        if self.archive.read(len(_GLOBAL_HEADER)) != _GLOBAL_HEADER:
+            raise ArError("File is not an ar-archive: global header not matching")
+
+        headerdata = self.archive.read(_FILE_HEADER_STRUCT.size)
+        if not headerdata:
+            raise ArError("File corrupted: file header not found")
+        while headerdata:
+            hdata = [field.strip() for field in _FILE_HEADER_STRUCT.unpack(headerdata)]
+            fileheader = ArInfo(hdata + [self.archive.tell()])
+            if fileheader.name.startswith("/"):
+                raise ArError("Long filenames are not supported")
+            self.files[fileheader.name] = fileheader
+
+            skip = int(fileheader.size)
+            if skip % 2 == 1:
+                skip += 1
+            self.archive.seek(skip, os.SEEK_CUR)
+            headerdata = self.archive.read(_FILE_HEADER_STRUCT.size)
+
+    def getNames(self):
+        ''' Returns list of names in archive '''
+        return self.files.keys()
+
+    def extract(self, filename, path_or_fileobj=""):
+        fileheader = self.files.get(filename)
+        if not fileheader:
+            raise ArError("File '%s' not found from archive" % filename)
+        self._writeFile(fileheader, path_or_fileobj)
+
+    def extractall(self, path=""):
+        '''
+        Extract all members to directory I{path}
+        @param path: Directory
+        @type path: String
+        '''
+        assert os.path.isdir(path), "%s is not a directory" % path
+        for header in self.files.itervalues():
+            self._writeFile(header, path)
+
+    def _writeFile(self, fileheader, path_or_fileobj):
+        self.archive.seek(fileheader.offset)
+        if isinstance(path_or_fileobj, basestring):
+            with open(os.path.join(path_or_fileobj, fileheader.name), "wb") as dstFile:
+                self._copyFileData(self.archive, dstFile, fileheader.size)
+                dstFile.close()
+        else:
+            self._copyFileData(self.archive, path_or_fileobj, fileheader.size)
+
+    @staticmethod
+    def _copyFileData(src, dst, size=None, blocksize=32*1024):
+        ''' Copy data from source file to destination file '''
+        bytesread = 0
+        while size is None or bytesread < size:
+            if size and (bytesread + blocksize) >= size:
+                blocksize = size - bytesread
+            buf = src.read(blocksize)
+            bytesread += blocksize
+            if not buf:
+                break
+            dst.write(buf)
+
+    def extractfile(self, *filenames):
+        '''
+        Read member file(s) as from a file-like object. Mimics tarfile's
+        extractfile() by returning the handle, but in fact just returns self.
+
+        @param filenames: Member files to read
+        @type filenames: String
+        @return file-like object for reading
+        '''
+        try:
+            self.filesToRead = [self.files[f] for f in filenames]
+        except IndexError:
+            raise ArError("Cannot extractfile, no such archive member(s): '%s'" % ', '.join(filenames))
+        self.filesToRead.sort(cmp=lambda x, y: cmp(x.offset, y.offset))
+        self._endFile()
+        self.seek(0)
+        return self
+
+    def seek(self, offset=0, whence=0):
+        if not self.filesToRead:
+            raise ArError("seek() supported only when reading files, use extractfile()")
+        if whence == 0:
+            if offset == 0:
+                self.archive.seek(self.filesToRead[0].offset)
+            else:
+                i = 0
+                while offset > self.filesToRead[i].size:
+                    if i+1 > len(self.filesToRead):
+                        break
+                    else:
+                        offset = offset - self.filesToRead[i].size
+                        i = i + 1
+                self.archive.seek(self.filesToRead[i].offset + offset)
+        elif whence == 1:
+            self.seek(self.tell() + offset, 0)
+        elif whence == 2:
+            self.seek(sum([member.size for member in self.filesToRead]) + offset, 0)
+        else:
+            raise ArError("seek() got invalid value for whence")
+
+    def _tellCurrentMember(self):
+        if not self.filesToRead:
+            raise ArError("No files to read. Use extractfile()")
+        i = 0
+        while i+1 < len(self.filesToRead):
+            if self.archive.tell() < self.filesToRead[i].offset:
+                # Position is outside data sections
+                raise ArError("Internal error, invalid position in archive")
+            elif self.archive.tell() < self.filesToRead[i].offset + self.filesToRead[i].size:
+                break
+            else:
+                i = i + 1
+        return i
+
+    def tell(self):
+        i = self._tellCurrentMember()
+        return sum([member.size for member in self.filesToRead[:i]]) + self.archive.tell() - self.filesToRead[i].offset
+
+    def read(self, size=32*1024):
+        '''
+        Read member files sequentially in I{size} byte chunks. This can be done
+        after calling extractfile()
+
+        @param size: Bytes to read
+        @type size: Integer
+        '''
+        i = self._tellCurrentMember()
+        end = self.filesToRead[i].offset + self.filesToRead[i].size
+        # End of a member file
+        if self.archive.tell() + size >= end:
+            remainder = end - self.archive.tell()
+            leftOver = size - remainder
+            buf = self.archive.read(remainder)
+            if i+1 < len(self.filesToRead) and leftOver:
+                self.archive.seek(self.filesToRead[i+1].offset)
+                buf += self.read(leftOver)
+        # Middle of a file
+        else:
+            buf = self.archive.read(size)
+        return buf
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/debfile.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,248 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Deb-file manager
+#
+
+import os
+import tarfile
+import tempfile
+from collections import namedtuple
+import shutil
+
+from Blocks.arfile import ArFile, ArInfo, ArError, _FILE_HEADER_STRUCT
+import Blocks.gpg as gpg
+
+class DebError(Exception):
+    ''' Debian package error '''
+    def __init__(self, error):
+        Exception.__init__(self, "Debian package error: %s" % error)
+        self.error = error
+
+MetaFiles = namedtuple("ControlFiles", "control, copyright, md5sum")
+Diff = namedtuple("Diff", "new, removed, changed")
+VerifySignStatus = namedtuple("VerifySignStatus", "type, status")
+
+class DebFile(ArFile):
+    ''' Manage debian package files (.deb) '''
+    def __init__(self, name=None, mode="r", fileobj=None):
+        try:
+            ArFile.__init__(self, name, mode, fileobj)
+        except ArError, ex:
+            raise DebError(ex.error)
+        self.datafiles = None
+        self._metadata = {}
+        self.md5sums = {}
+
+    def _initMetaData(self):
+        if not self._metadata:
+            with tempfile.SpooledTemporaryFile(1024*1024, prefix="debfile") as debtemp:
+                self.extract("control.tar.gz", debtemp)
+                debtemp.seek(0)
+                tar = tarfile.open(fileobj=debtemp)
+                metafiles = MetaFiles(tar.extractfile("control"), tar.extractfile("copyright"), tar.extractfile("md5sum"))
+                self._parseMetaFile(metafiles.control)
+                self._parseMetaFile(metafiles.copyright)
+                self.md5sums = dict(reversed(line.rstrip().split("  ", 1)) for line in metafiles.md5sum)
+
+    def _parseMetaFile(self, fileobj):
+        line = fileobj.readline()
+        part = line.partition(":")
+        metaContent = part[2].strip()
+        self._metadata[part[0]] = metaContent
+        for line in fileobj:
+            if line[0] in (" ", "\t"):
+                metaContent += "\n" + line.strip()
+            else:
+                self._metadata[part[0]] = metaContent
+                part = line.partition(":")
+                metaContent = part[2].strip()
+                self._metadata[part[0]] = metaContent
+
+    @property
+    def metadata(self):
+        self._initMetaData()
+        return self._metadata.copy()
+
+    def compareMetaData(self, other, doNotCompare=None):
+        self._initMetaData()
+        other._initMetaData()
+
+        if doNotCompare is None:
+            doNotCompare = ["Installed-Size"]
+        keys = set(self._metadata.keys())
+        otherkeys = set(other.metadata.keys())
+        new = otherkeys - keys
+        removed = keys - otherkeys
+        same = otherkeys - new
+        changed = tuple(key for key in same
+                        if key not in doNotCompare and
+                        self._metadata[key] != other.metadata[key])
+        return Diff(tuple(new), tuple(removed), changed)
+
+    def compareFiles(self, other):
+        self._initMetaData()
+        other._initMetaData()
+
+        files = set(self.md5sums.keys())
+        otherfiles = set(other.md5sums.keys())
+        new = otherfiles - files
+        removed = files - otherfiles
+        same = otherfiles - new
+        changed = tuple(fname for fname in same if self.md5sums[fname] != other.md5sums[fname])
+        return Diff(tuple(new), tuple(removed), changed)
+
+    def compare(self, other):
+        ''' Returns tuple (metadiff, filediff) '''
+        metadiff = self.compareMetaData(other)
+        filediff = self.compareFiles(other)
+        return (metadiff, filediff)
+
+    def getDataPackageName(self):
+        '''
+        @return: Name of data package
+        '''
+        try:
+            datafile = [name for name in self.getNames() if name.startswith("data.tar")][0]
+        except IndexError:
+            raise DebError("No data file found")
+        formats = ("tar", "tar.gz", "tar.bz2")
+        if not datafile.endswith(formats):
+            raise DebError("Data file not in supported%s format. Format: %s" % (formats, datafile))
+        return datafile
+
+    def getControlPackageName(self):
+        '''
+        @return: Name of control package
+        '''
+        try:
+            controlfile = [name for name in self.getNames() if name.startswith("control.tar")][0]
+        except IndexError:
+            raise DebError("No data file found")
+        formats = ("tar", "tar.gz", "tar.bz2")
+        if not controlfile.endswith(formats):
+            raise DebError("Control file not in supported%s format. Format: %s" % (formats, controlfile))
+        return controlfile
+
+    def extractDataPackage(self, fileObj):
+        '''
+        Extracts data package containing all install files
+        fileObj can be either path or file like object
+        '''
+        self.extract(self.getDataPackageName(), fileObj)
+
+    def extractData(self, filename, path):
+        if not self.datafiles:
+            debtemp = tempfile.SpooledTemporaryFile(1024*1024, prefix="debfile")
+            self.extractDataPackage(debtemp)
+            debtemp.seek(0)
+            self.datafiles = tarfile.open(fileobj=debtemp)
+        self.datafiles.extract(filename, path)
+
+    def getControl(self):
+        '''
+        Return the contents of the control tarball members as a dictionary.
+        File names are keys.
+        '''
+        ret = {}
+        self.extractfile(self.getControlPackageName())
+        t = tarfile.open("r:gz", fileobj=self)
+        for member in ("control", "copyright", "md5sum"):
+            c = t.extractfile(member)
+            ret[member] = ""
+            while True:
+                buf = c.read()
+                if not buf:
+                    break
+                ret[member] = ret[member] + buf
+        t.close()
+        return ret
+
+    def signatureExists(self, signtype):
+        signfilename = "_gpg" + signtype
+        return signfilename in self.getNames()
+
+    def addSignature(self, signtype, gpgHome=None, gpgBatchMode=False, gpgPassfile=None):
+        self._endFile()
+
+        if self.signatureExists(signtype):
+            raise DebError("Signature type '%s' already exists" % signtype)
+
+        self.extractfile("debian-binary", "control.tar.gz", self.getDataPackageName())
+        try:
+            sig = gpg.sign(self, None, gpgHome, gpgBatchMode, gpgPassfile)
+        except gpg.GpgError, ex:
+            raise DebError(ex.output)
+        signfilename = "_gpg" + signtype
+        self.addfile(ArInfo(signfilename))
+        self.write(sig)
+
+    def removeSignature(self, signtype=None):
+        '''
+        Remove signature(s)
+        If signtype is None all signatures are removed
+        Assumes that signatures are in the end of file
+        '''
+        gpgFileNames = [n for n in self.files.iterkeys() if n.startswith("_gpg")]
+        if signtype:
+            try:
+                self.remove("_gpg" + signtype)
+            except ArError:
+                raise DebError("Sign type '%s' not found" % signtype)
+        elif gpgFileNames:
+            signOffsets = [self.files[n].offset for n in gpgFileNames]
+            self.archive.truncate(min(signOffsets) - _FILE_HEADER_STRUCT.size)
+            for name in gpgFileNames:
+                del self.files[name]
+        else:
+            raise DebError("No signatures to remove")
+
+    def getSignatures(self):
+        return [n[4:] for n in self.getNames() if n.startswith("_gpg")]
+
+    def verifySignature(self, signtype=None, homedir=None):
+        ''' Verify signature(s) '''
+        verifyDataPath = None
+        temppath = None
+        status = []
+        try:
+            if signtype:
+                signfile = "_gpg" + signtype
+                signfiles = [signfile] if signfile in self.getNames() else []
+            else:
+                signfiles = [n for n in self.getNames() if n.startswith("_gpg")]
+            if not signfiles:
+                if signtype:
+                    raise DebError("Signature type '%s' not found." % signtype)
+                else:
+                    raise DebError("Signatures not found")
+
+            with tempfile.NamedTemporaryFile(prefix="deb_sign", delete=False) as debtemp:
+                for name in ("debian-binary", "control.tar.gz"):
+                    self.extract(name, debtemp)
+                self.extractDataPackage(debtemp)
+                verifyDataPath = debtemp.name
+            temppath = tempfile.mkdtemp(prefix="deb_sign_gpg")
+            for signname in signfiles:
+                typename = signname[4:]
+                self.extract(signname, temppath)
+                signFilePath = os.path.join(temppath, signname)
+                verifystatus = gpg.verify(signFilePath, verifyDataPath, homedir)
+                status.append(VerifySignStatus(typename, verifystatus))
+        finally:
+            if verifyDataPath:
+                os.remove(verifyDataPath)
+            if temppath:
+                shutil.rmtree(temppath)
+        return status
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/filelock.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,136 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Implements file locking mechanism
+#
+
+# TODO: Prevent lock file deletion
+
+import os
+import time
+import errno
+import logging
+import platform
+import tempfile
+import atexit
+
+if platform.system() == "Windows":
+    import win32api
+    import win32process
+
+class AcquireError(Exception):
+    ''' Acquire Error '''
+
+class FileLock(object):
+    ''' File lock '''
+    POLL_INTERVAL = 0.1
+
+    def __init__(self, path):
+        logging.log(logging.DEBUG, "FileLock path: %s", path)
+        self.path = path
+        self.locked = False
+        self.creatorPid = os.getpid()
+
+    def acquire(self, timeout=0):
+        '''Acquires lock on file
+
+        Timeout in milliseconds, use None for no timeout
+        Returns True if succesful otherwise False
+        '''
+
+        if self.locked:
+            return True
+
+        sleepcount = 0
+        tmpname = atomicCreateTempFile(str(os.getpid()))
+        logging.log(logging.DEBUG, "FileLock acquire temp file name: %s", tmpname)
+        try:
+            while not self.locked:
+                try:
+                    # Create file with pid atomically
+                    os.rename(tmpname, self.path)
+                except OSError, ex:
+                    # Lock in use?
+                    if ex.errno == errno.EEXIST:
+                        with open(self.path) as lockfile:
+                            pid = int(lockfile.read())
+                        if processRunning(pid):
+                            timeElapsed = sleepcount * (self.POLL_INTERVAL * 1000)
+                            timeoutElapsed = timeout > 0 and timeElapsed >= timeout
+                            if timeout == 0 or timeoutElapsed:
+                                break
+                            else:
+                                time.sleep(self.POLL_INTERVAL)
+                                sleepcount += 1
+                        else:
+                            logging.info("Stale lock file detected with pid %s. Removing...", pid)
+                            os.remove(self.path)
+                        continue
+                    raise
+                self.locked = True
+                # Call release on exit
+                atexit.register(self.release)
+            return self.locked
+        finally:
+            if not self.locked:
+                os.remove(tmpname)
+            return False
+
+    def release(self):
+        if self.locked and os.getpid() == self.creatorPid:
+            if os.path.exists(self.path):
+                os.remove(self.path)
+            self.locked = False
+
+PROCESS_QUERY_INFORMATION = 0x0400
+STILL_ACTIVE = 259
+def processRunning(pid):
+    if platform.system() == "Windows":
+        running = False
+        try:
+            handle = win32api.OpenProcess(PROCESS_QUERY_INFORMATION, True, pid)
+        except win32api.error:
+            pass
+        else:
+            # code == 0 -> problem
+            code = win32process.GetExitCodeProcess(handle)
+            win32api.CloseHandle(handle)
+            running = code == STILL_ACTIVE
+        return running
+    else:
+        try:
+            os.kill(pid, 0)
+        except OSError:
+            return False
+        return True
+
+def atomicCreateTempFile(data):
+    tmpfile = tempfile.NamedTemporaryFile(bufsize=0, delete=False)
+    try:
+        tmpfile.write(data)
+        tmpfile.flush()
+        os.fsync(tmpfile.fileno())
+        tmpfile.close()
+        return tmpfile.name
+    except Exception: # cleanup
+        tmpfile.close()
+        os.remove(tmpfile.name)
+        raise
+
+if __name__ == "__main__":
+    fl = FileLock(r"c:\temp\test123")
+    print "Acquiring..."
+    print fl.acquire(5000)
+    raw_input("press enter")
+    fl.release()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/gpg.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,132 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Wrapper for gpg
+#
+
+''' gpg command wrapper '''
+
+import sys
+from subprocess import Popen, PIPE
+from collections import namedtuple
+import logging
+
+_GNUPGPREFIX = "[GNUPG:] "
+_GNUPGGOODSIG = "GOODSIG"
+_GNUPGBADSIG = "BADSIG"
+_GNUPGNOPUBKEY = "NO_PUBKEY"
+_GNUPGKEYEXPIRED = "KEYEXPIRED"
+_GNUPGREVKEYSIG = "REVKEYSIG"
+
+class GpgStatusCode(object):
+    VERIFIED, BADSIG, NO_PUBKEY, KEYEXPIRED, REVKEYSIG = range(5)
+
+VerifyInfo = namedtuple("VerifyInfo", "name")
+VerifyStatus = namedtuple("VerifyStatus", "code, info")
+
+#define GNUPGVALIDSIG "[GNUPG:] VALIDSIG"
+#define GNUPGNODATA "[GNUPG:] NODATA"
+
+class GpgError(Exception):
+    """ Gpg exited with error """
+    def __init__(self, errorcode, output):
+        Exception.__init__(self, "Gpg failed with error code %s" % errorcode)
+        self.errorcode = errorcode
+        self.output = output.strip()
+
+def sign(sourcePath, outputPath, homedir=None, batch=False, passfile=None):
+    '''
+    Create a gpg signature of a file.
+
+    sign() has two modes: file and pipe. File: Specify sourcePath and outputPath
+    as strings to create signature of file sourcePath in file outputPath. Pipe:
+    Specify sourcePath as readable object and outputPath as None. The signature
+    is the return value.
+
+    Use a combination of batch, homedir and passfile to eliminate the need for
+    interaction.
+
+    File mode::
+        gpg.sign("/my/file", "/my/file.gpg")
+
+    Pipe mode without interaction::
+        f = open("/my/file", "rb")
+        key = gpg.sign(f, None, "/my/passwordless/keydir", True)
+        f.close()
+
+    @param sourcePath: Path of the file to sign, or pipe to read the file from
+    @type sourcePath: String or file-like object
+    @param outputPath: Path to write signature to, or None in pipe mode
+    @type outputPath: String or None
+    @param homedir: Directory to read keyfile from
+    @type homedir: String
+    @param batch: Whether to use I{--batch} with gpg command
+    @type batch: Boolean
+    @param passfile: Optional passphrase file to use with the key
+    @type passfile: String
+    '''
+    cmdstr = "gpg -abs"
+    if homedir:
+        cmdstr += ' --homedir "%s"' % homedir
+    if batch:
+        cmdstr += ' --batch'
+        pStdin = None
+    else:
+        pStdin = sys.stdin
+    if passfile:
+        cmdstr += ' --passphrase-file "%s"' % passfile
+
+    if isinstance(outputPath, basestring) and isinstance(sourcePath, basestring):
+        cmdstr += ' -o "%s" "%s"' % (outputPath, sourcePath)
+        p = Popen(cmdstr, shell=True, stdin=pStdin, stdout=PIPE, stderr=PIPE)
+    else:
+        assert (sourcePath and hasattr(sourcePath, "read")), "sourcePath not file-like object!"
+        p = Popen(cmdstr, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+        blockSize = 32*1024
+        buf = sourcePath.read(blockSize)
+        while buf:
+            try:
+                p.stdin.write(buf)
+            except IOError:
+                break
+            buf = sourcePath.read(blockSize)
+    (stdoutput, stderror) = p.communicate()
+    if stderror is None:
+        stderror = ""
+    if p.returncode != 0:
+        raise GpgError(p.returncode, stderror)
+    return stdoutput
+
+def verify(signFilePath, signedFilePath, homedir=None):
+    cmdstr = ('gpgv --keyring pubring.gpg --ignore-time-conflict --status-fd 2 %s "%s" "%s"' %
+        ('--homedir "%s"' % homedir if homedir else "", signFilePath, signedFilePath))
+    logging.debug("GPG running: %s", cmdstr)
+    p = Popen(cmdstr, shell=True, stdin=sys.stdin, stdout=PIPE, stderr=PIPE)
+    (stdoutput, stderror) = p.communicate()
+    logging.debug("GPG stdout: %s", stdoutput)
+    logging.debug("GPG stderror (status): %s", stderror)
+    for line in stderror.splitlines():
+        if line.startswith(_GNUPGPREFIX):
+            line = line.replace(_GNUPGPREFIX, "", 1)
+            (statusString, _, info) = line.partition(" ")
+            status = {_GNUPGGOODSIG: GpgStatusCode.VERIFIED,
+                      _GNUPGBADSIG: GpgStatusCode.BADSIG,
+                      _GNUPGNOPUBKEY: GpgStatusCode.NO_PUBKEY,
+                      _GNUPGKEYEXPIRED: GpgStatusCode.KEYEXPIRED,
+                      _GNUPGREVKEYSIG: GpgStatusCode.REVKEYSIG}.get(statusString)
+            if status is not None:
+                name = info.partition(" ")[2]
+                return VerifyStatus(status, VerifyInfo(name))
+    if p.returncode != 0:
+        raise GpgError(p.returncode, stdoutput)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/Blocks/singleinstance.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,84 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Create only single instance of application/process
+#
+
+import os
+import tempfile
+import logging
+import platform
+
+if platform.system() == "Windows":
+    from win32event import CreateMutex, WaitForSingleObject, ReleaseMutex, INFINITE
+    from win32api import CloseHandle, GetLastError
+    from winerror import ERROR_ALREADY_EXISTS
+
+from Blocks.filelock import FileLock
+
+class SingleInstanceWindows(object):
+    ''' Single instance implemented with mutex '''
+
+    def __init__(self, identifier, path=False, lockFileName=None):
+        ''' lockFileName not used in this implementation '''
+        if path:
+            identifier = identifier.replace("\\", "/")
+        self._identifier = "Global\\" + identifier
+        logging.log(logging.DEBUG, "SingleInstanceWindows mutex identifier: %s", self._identifier)
+        self._mutex = CreateMutex(None, True, self._identifier)
+        self.mutexError = GetLastError()
+
+    def alreadyRunning(self):
+        return (self.mutexError == ERROR_ALREADY_EXISTS)
+
+    def waitRelease(self, timeout=None):
+        timeout = INFINITE if timeout is None else timeout
+        if self.alreadyRunning():
+            WaitForSingleObject(self._mutex, timeout)
+
+    def release(self):
+        if self._mutex:
+            ReleaseMutex(self._mutex)
+            CloseHandle(self._mutex)
+
+class SingleInstanceFilelock(FileLock):
+    ''' Single instance implemented with file locking '''
+    def __init__(self, identifier, path=False, lockFileName=".lock"):
+        if path:
+            try:
+                os.makedirs(identifier)
+            except OSError: # Dir exists already?
+                pass
+            lockfile = os.path.join(identifier, lockFileName)
+        else:
+            lockfile = os.path.join(tempfile.gettempdir(), identifier)
+        FileLock.__init__(self, lockfile)
+
+    def alreadyRunning(self):
+        return not self.acquire()
+
+    def waitRelease(self, timeout=None):
+        self.acquire(timeout)
+
+if platform.system() == "Windows":
+    SingleInstance = SingleInstanceWindows
+else:
+    SingleInstance = SingleInstanceFilelock
+
+if __name__ == "__main__":
+    si = SingleInstance("c:\\temp", True)
+    print "Running:", si.alreadyRunning()
+    raw_input("Enter to release/start waiting")
+    si.waitRelease()
+    raw_input("Enter to quit")
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/SymbianUtils/Evalid.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,293 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Generate checksums for different types of files
+#
+
+import sys
+import os
+import re
+import struct
+import subprocess
+import platform
+try:
+    from hashlib import md5
+except ImportError:
+    import md5
+
+import SymbianUtils
+
+class Evalid(object):
+    '''
+    Provide some of the functionality found in epoc32/tools/EvalidCompare.pm
+
+    generateSignature() - use appropriate method to calculate checksum for a file
+    getUid() - extract Uid from file
+    '''
+
+    class ExternalError(SymbianUtils.SymbianUtilsError):
+        """ An external utility exited with an error """
+
+    ext = ".exe" if platform.system() == "Windows" else ""
+    binPath = os.path.normpath(os.path.join(os.path.dirname(__file__), "bin"))
+    NM = os.path.join(binPath, "nm" + ext)
+    ELFDUMP = os.path.join(binPath, "elfdump" + ext)
+    ELF2E32 = os.path.join(binPath, "elf2e32" + ext)
+    PE_DUMP = os.path.join(binPath, "pe_dump" + ext)
+
+    ELF_DLL_NAME = re.compile(r"#<DLL>(\S+\.\S+)#<\\DLL>")
+    ELF_DEBUG = re.compile(r"^\.(rel\.)?debug_")
+    ELF_P_HEAD = re.compile(r"^\tProgram header offset.*$")
+    ELF_S_HEAD = re.compile(r"^\tSection header offset.*$")
+    PRECOMP_IGNORE = re.compile(r'^# \d+ ".*"( \d)?$')
+    E32_EMPTY = re.compile("Time Stamp:|E32ImageFile|Header CRC:")
+    E32_LOWER = re.compile("imports from")
+    INTEL_OBJECTPATH_WIN  = re.compile(r"\.\.\\[^(]*\\")
+    INTEL_OBJECTPATH_NIX  = re.compile("\.\.\/[^(]*\/")
+    INTEL_DLLTOOL = re.compile("^(.+ (_head|_))\w+_(EPOC32_\w+(_LIB|_iname))$", re.I)
+
+    @classmethod
+    def typeLookup(cls, type):
+        '''
+        Return the internally used identifier string for the type
+        @todo: Warning
+        @param type: The type
+        @type type: String
+        @return: Internally used type identifier
+        @rtype: String
+        '''
+        if type in ("e32", "default", "elf", "preprocessed_text", "intel", "intel_pe"):
+            return type
+        elif type in ("file", "symbol"):
+            return "default"
+        elif type in ("staticlib", "dso"):
+            return "elf"
+        elif type in ("exe", "plugin", "dll"):
+            return "e32"
+        else:
+            #sys.stderr.write("warning - unknown hashtype %s.\n"%type)
+            return "default"
+
+    @classmethod
+    def generateSignature(cls, path, fileType):
+        '''
+        Generic dispatcher method for file types. Use the appropriate method for
+        I{type} to generate the signature for file at I{path}.
+
+        @param path: The path where the file is located
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        if not isinstance(path, basestring):
+            raise TypeError, "path must be a string"
+        if not path:
+            raise ValueError, "path must not be zero length"
+        path = os.path.normpath(path)
+        fileType = cls.typeLookup(fileType)
+        methodName = "sig_" + fileType
+        if hasattr(cls, methodName):
+            method = getattr(cls, methodName)
+            return method(path)
+        else:
+            raise NotImplementedError("No signature generator for type %s" % fileType)
+
+    @staticmethod
+    def getUid(index, file):
+        '''Get UID of file
+
+        @param index: Which UID
+        @param file: Absolute path
+        @return: UID
+        @rtype: String
+        '''
+        if index not in (1, 2, 3):
+            raise ValueError("Index can only be one of 1, 2 or 3")
+        if os.path.getsize(file) < 12:
+            return None
+        start = (index-1) * 4
+        finish = start + 4
+        f = open(file, "rb")
+        head = f.read(12)
+        f.close()
+        return struct.unpack("<l", head[start:finish])[0]
+
+    @staticmethod
+    def getMd5():
+        '''A convenicence method to use appropriate library regardless of Python
+        version. Maintain compatibility while using hashlib whenever possible.
+
+        @return: md5 object
+        @rtype: md5
+        '''
+        if hasattr(md5, "new"):
+            return md5.new()
+        else:
+            return md5()
+
+    # Signatures for various formats
+
+    @classmethod
+    def sig_e32(cls, path):
+        '''
+        Return the checksum of significant parts using elf2e32
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        bin = cls.ELF2E32 + " --dump --e32input="
+        m = cls.getMd5()
+        fo = os.popen(bin+path, "r", -1)
+        for line in fo:
+            if cls.E32_EMPTY.search(line):
+                line = ""
+            if cls.E32_LOWER.search(line):
+                line = line.lower()
+            m.update(line)
+        if fo.close():
+            raise cls.ExternalError("elf2e32 failed at %s" % path)
+        return m.hexdigest()
+
+    @classmethod
+    def sig_default(cls, path):
+        '''
+        Calculate the checksum of the file without filtering.
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        m = cls.getMd5()
+        f = open(path, "rb")
+        while True:
+            buf = f.read(32*1024)
+            if not buf:
+                break
+            m.update(buf)
+        f.close()
+        return m.hexdigest()
+
+    @classmethod
+    def sig_elf(cls, path):
+        '''
+        Return the checksum of significant parts using elfdump
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        bin = cls.ELFDUMP + " -i "
+        def firstGroupToLc(match):
+            return ("#<DLL>" + match.group(1).lower() + "#<\\DLL>")
+        m = cls.getMd5()
+        fo = os.popen(bin+path, "r", -1)
+        for line in fo:
+            if cls.ELF_P_HEAD.match(line):
+                line = "Program header offset\n"
+            if cls.ELF_S_HEAD.match(line):
+                line = "Section header offset\n"
+            line = cls.ELF_DLL_NAME.sub(firstGroupToLc, line)
+            if cls.ELF_DEBUG.match(line):
+                line = ""
+            #sys.stderr.write(line)
+            m.update(line)
+        if fo.close():
+            raise cls.ExternalError("elfdump failed at %s" % path)
+        return m.hexdigest()
+
+    @classmethod
+    def sig_preprocessed_text(cls, path):
+        '''
+        Return the checksum of significant parts of preprocessed text
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        m = cls.getMd5()
+        f = open(path, "rb")
+        for line in f:
+            line = line.replace("\r\n", "\n")
+            if cls.PRECOMP_IGNORE.search(line):
+                line = "\n"
+            m.update(line)
+        f.close()
+        return m.hexdigest()
+
+    @classmethod
+    def sig_intel_pe(cls, path):
+        '''
+        Return the checksum of significant parts of pe_dump output
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        m = cls.getMd5()
+        fo = os.popen("%s %s" % (cls.PE_DUMP, path), "r", -1)
+        for line in fo:
+            m.update(line)
+        if fo.close():
+            raise cls.ExternalError("pe_dump failed at %s" % path)
+        return m.hexdigest()
+
+
+    @classmethod
+    def sig_intel(cls, path):
+        '''
+        Return the checksum of significant parts using nm
+
+        @param path: The absolute path
+        @type path: String
+        @return: checksum
+        @rtype: String
+        '''
+        m = cls.getMd5()
+        try:
+            s = subprocess.Popen([cls.NM, "--no-sort", path], env=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            out, err = s.communicate()
+        except OSError, e:
+            raise cls.ExternalError, "nm failed at %s: %s" % (path, str(e))
+        if s.returncode != 0:
+            raise cls.ExternalError, "nm failed at %s: %s" % (path, err)
+        for line in out.splitlines():
+            # no need for regexps here
+            if line.endswith(":\n") \
+                or line.startswith("BFD: ") \
+                or cls.INTEL_OBJECTPATH_WIN.search(line) \
+                or cls.INTEL_OBJECTPATH_NIX.search(line):
+                line = "\n"
+            match = cls.INTEL_DLLTOOL.search(line)
+            if match:
+                line = "%s_..._%s" % (match.groups()[0], match.groups()[2])
+                line = line.upper()
+            m.update(line)
+
+        if s.returncode != 0:
+            raise cls.ExternalError("nm failed at %s" % path)
+        return m.hexdigest()
+
+def main():
+    path = sys.argv[1]
+    ftype = sys.argv[2]
+    print Evalid.generateSignature(path, ftype)
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/SymbianUtils/Readelf.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,63 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Extract library API from ELF-files
+#
+
+import os
+import sys
+import re
+
+from SymbianUtils import SymbianUtilsError
+
+class Readelf(object):
+    '''
+    Methods for extracting a library API. Blocks.Packaging.ComponentBuilder.Rule
+    and Blocks.Packaging.PackageModel.API make use of Readelf.
+    '''
+    class ConfigurationError(SymbianUtilsError):
+        ''' Missing required binaries '''
+
+    class Failed(SymbianUtilsError):
+        ''' The external binary returned an error '''
+
+    if sys.platform == "win32":
+        readelf = os.path.normpath(os.path.join(os.path.dirname(__file__), "bin", "readelf.exe"))
+    elif "linux" in sys.platform:
+        readelf = "/usr/bin/readelf"
+
+    if not os.path.isfile(readelf):
+        raise ConfigurationError, "Cannot find readelf command (%s)" % readelf
+
+    @classmethod
+    def getInterface(cls, path):
+        '''
+
+        @param path: The file to inspect
+        @return: The interface - relevant parts from the symbol table as read from file by I{readelf}
+        @rtype: Dictionary
+        '''
+        path = os.path.normpath(path)
+        api = {}
+        binary = cls.readelf + " -Ws " + path + " 2>&1"
+        rex = re.compile(r'^\s*(\d+):\s+\d+\s+\d+\s+FUNC\s+GLOBAL\s+DEFAULT\s+\d+\s+(.*)@@.*')
+        fo = os.popen(binary, "r", -1)
+        for line in fo:
+            match = rex.search(line)
+            if match:
+                api[match.group(1)] = match.group(2)
+        if fo.close():
+            # "Not an ELF file", "No such file", etc.
+            raise cls.Failed("readelf unable to get interface for '%s': %s" % (path, line.strip()))
+        return api
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/SymbianUtils/__init__.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,25 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Symbian utils
+#
+
+import os
+import sys
+
+class SymbianUtilsError(Exception):
+    ''' Parent class for SymbianUtils exceptions '''
+
+from SymbianUtils.Evalid import Evalid
+from SymbianUtils.Readelf import Readelf
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/SymbianUtils/bin/README.txt	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,19 @@
+Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+All rights reserved.
+This component and the accompanying materials are made available
+under the terms of "Eclipse Public License v1.0"
+which accompanies this distribution, and is available
+at the URL "http://www.eclipse.org/legal/epl-v10.html".
+
+
+Where to Get Needed Binaries
+----------------------------
+
+You need to copy these files here:
+
+elf2e32.exe, elfdump.exe and pe_dump.exe:
+From Symbian OS Build Package (http://developer.symbian.org/wiki/index.php/Build)
+
+readelf.exe and nm.exe:
+From binutils for windows which can be found from
+http://sourceforge.net/projects/mingw/files
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/src/plugins/filter_blocks.py	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,318 @@
+#
+# Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Nokia Corporation - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Blocks plugin for raptor
+#
+
+""" Extract linker output to link-<time>.log """
+
+import os
+import sys
+import time
+import re
+import xml.sax
+import xml.sax.saxutils
+from getopt import getopt, GetoptError
+
+import generic_path
+import filter_interface
+from raptor_meta import BldInfFile
+
+class LinkInfoFilter(filter_interface.Filter):
+    ''' Use of LinkInfoReader to collect output. '''
+
+    def __init__(self):
+        self.epocroot = str(generic_path.Path(os.environ.get("EPOCROOT", "")))
+        assert self.epocroot, "Cannot proceed without EPOCROOT"
+        assert os.path.isdir(self.epocroot), "EPOCROOT %s is not a directory" % self.epocroot
+
+    def open(self, raptor_instance):
+        """Open a log file for the various I/O methods to write to."""
+        self.logDir = str(generic_path.Path(os.environ.get("FILTER_OUT_DIR", "")))
+        if not (self.logDir and os.path.isdir(self.logDir)):
+            self.logDir = str(generic_path.Path(os.environ.get("SBS_BUILD_DIR", "")))
+        if not (self.logDir and os.path.isdir(self.logDir)):
+            if os.path.isdir(os.path.join(self.epocroot, "epoc32", "build")):
+                self.logDir = str(generic_path.Path(os.path.join(self.epocroot, "epoc32", "build")))
+            else:
+                self.logDir = str(generic_path.Path(self.epocroot))
+        self._linkTimeStamp = time.strftime("%Y-%m-%d-%H-%M-%S")
+        self._linkFile = os.path.join(self.logDir, "link." + self._linkTimeStamp + ".log")
+        self._linkOut = open(self._linkFile, "wb")
+        self._linkParser = xml.sax.make_parser()
+        self._linkInfoReader = LinkInfoReader(self.epocroot)
+        self._linkParser.setContentHandler(self._linkInfoReader)
+        return True
+
+    def write(self, text):
+        """Write text into the log file"""
+        try:
+            self._linkParser.feed(text)
+        except Exception, e:
+            sys.stderr.write("LinkInfoReader failed: %s. Last: %s" % (e, text))
+            return False
+        return True
+
+    def summary(self):
+        """Write Summary"""
+        sys.stdout.write("Writing linker data to %s\n"%self._linkFile)
+        return True
+
+    def close(self):
+        """Close the log file"""
+        try:
+            self._linkParser.close()
+            self._linkInfoReader.writeXml(out=self._linkOut)
+            self._linkOut.close()
+        except Exception:
+            return False
+        return True
+
+class ArmlinkCmd(object):
+    """
+    Read SBS output log to extract link info
+
+    Example::
+        /opt/rvct-2.2.616/bin/armlink
+            --diag_suppress 6331
+            --bpabi
+            --reloc
+            --no_scanlib
+            --datacompressor=off
+            --debug
+            --dll
+            --split
+            --rw-base 0x400000
+            --symver_soname
+            --soname 'balclient{000a0000}[101fb5dd].dll'
+            --entry _E32Dll '/data/build/sos94_09wk04/epoc32/release/armv5/udeb/edll.lib(uc_dll_.o)'
+            -o /data/build/sos94_09wk04/epoc32/release/armv5/udeb/balclient.dll.sym
+            --symbols
+            --list /data/build/sos94_09wk04/epoc32/release/armv5/udeb/balclient.dll.map
+            --via /data/build/sos94_09wk04/epoc32/build/framework/c_1b392bd557300014/balclient_dll/armv5/udeb/balclient_udeb_objects.via
+            /data/build/sos94_09wk04/epoc32/release/armv5/udeb/edllstub.lib
+            /data/build/sos94_09wk04/epoc32/release/armv5/udeb/usrt2_2.lib
+            /data/build/sos94_09wk04/epoc32/release/armv5/lib/euser.dso
+            /data/build/sos94_09wk04/epoc32/release/armv5/lib/drtaeabi.dso
+            /data/build/sos94_09wk04/epoc32/release/armv5/lib/dfpaeabi.dso
+            /data/build/sos94_09wk04/epoc32/release/armv5/lib/dfprvct2_2.dso
+            /data/build/sos94_09wk04/epoc32/release/armv5/lib/drtrvct2_2.dso
+            '/opt/rvct-2.2.616/lib/armlib/h_t__uf.l(switch8.o)'
+    """
+
+    @staticmethod
+    def getOpts(command):
+        if not command:
+            raise ValueError("Not a command: '%s'" % command)
+        if not os.path.basename(command.split()[0]) in ("armlink", "armlink.exe"):
+            raise ValueError("Not an armlink command '%s'" % command)
+        try:
+            # annoying space-separated value for --entry
+            parts = command.strip().split()[1:]
+            for i in range(len(parts)):
+                if parts[i] == "--entry":
+                    parts[i+1] = "%s %s" % (parts[i+1], parts[i+2])
+                    del parts[i+2]
+                    break
+            opts, leftovers = getopt(parts,
+                                "ho:",
+                                ["help",
+                                 "output=",
+                                 "via=",
+                                 "partial",
+                                 "scatter=",
+                                 "ro-base=",
+                                 "rw-base=",
+                                 "bestdebug",
+                                 "datacompressor=",
+                                 "nodebug",
+                                 "debug",
+                                 "entry=",
+                                 "libpath",
+                                 "userlibpath",
+                                 "nolocals",
+                                 "noremove",
+                                 "callgraph",
+                                 "feedback=",
+                                 "info=",
+                                 "map",
+                                 "symbols",
+                                 "xref",
+                                 # these are not listed in -h but found in commands
+                                 "diag_suppress=",
+                                 "bpabi",
+                                 "reloc",
+                                 "no_scanlib",
+                                 "split",
+                                 "dll",
+                                 "symver_soname",
+                                 "soname=",
+                                 "list="
+                                 ])
+            return (opts, leftovers)
+        except GetoptError:
+            return None
+
+    @staticmethod
+    def getLibraries(command):
+        '''
+        Yield libraries from armlink command line string.
+        @param command: Armlink command
+        @type command: String
+        '''
+        candidates = []
+        # get rid of obvious shit
+        parts = ArmlinkCmd.getOpts(command)
+        if parts:
+            candidates = parts[1]
+        else:
+            # fall back
+            candidates = [part for part in command.split()[1:] if not part.startswith("-")]
+        wanted = [re.compile(r"epoc32/release/armv5/lib/.+.dso$", re.IGNORECASE)]
+        for file in candidates:
+            for regex in wanted:
+                match = regex.search(file)
+                if match:
+                    yield match.group()
+
+    @staticmethod
+    def getTarget(command):
+        '''
+        Get target (output) file from command line string.
+        @param command:
+        @type command:
+        '''
+        parts = ArmlinkCmd.getOpts(command)
+        if parts:
+            opts = parts[0]
+            for k, v in opts:
+                if k in ("-o", "--output"):
+                    regex = re.compile(r"epoc32/release/armv5/[^/]+/.+.sym$", re.IGNORECASE)
+                    match = regex.search(v)
+                    if match:
+                        return match.group()
+
+
+
+class LinkInfoReader(xml.sax.ContentHandler):
+    '''
+    Read Raptor output and collect infs->targets->library dependencies and other
+    associated bits.
+
+    Link info can be spewed as XML using writeXml().
+
+    Stores link info in a dictionary::
+        self.infTargets = {"path/to.inf": [
+                {'component': u'foo',
+                 'platform': u'armv5',
+                 'path': u'epoc32/foo/foo.dll',
+                 'libraries': [u'epoc32/release/armv5/lib/foo.dso', u'epoc32/release/armv5/lib/bar.dso']
+                }
+            ]
+        }
+    '''
+
+    def __init__(self, epocroot=None):
+        self.epocroot = epocroot
+        self.cdata = []
+        self.infTargets = {}    # where the data ends up.
+                                # keys inf paths, values target lists
+        self.isLinkRecipe = False
+        self.currentInfPath = ""
+        self.currentTarget = ""
+        self.currentImportLib = ""
+
+    def characters(self, data):
+        self.cdata.append(data)
+
+    def startElement(self, tag, attributes):
+        # The last tag was a link recipe, now expect cdata to contain the link
+        # command string.
+        if self.isLinkRecipe:
+            cdString = "".join(self.cdata).strip()
+            for line in [ line for line in cdString.split("\n") if line.startswith("+") ]:
+                line = line.lstrip(" +")
+                # Lots of junk could be included, for our purposes it can be ignored
+                try:
+                    for dso in ArmlinkCmd.getLibraries(line):
+                        self.currentTarget["libraries"].append(dso)
+                except ValueError:
+                    pass
+#                    self.logger.debug("Line in link recipe could not be parsed by ArmlinkCmd: %s" % line)
+            self.infTargets[self.currentInfPath].append(self.currentTarget)
+            self.isLinkRecipe = False
+            self.currentInfPath = ""
+            self.currentTarget = None
+        self.cdata = []
+        if tag == "recipe":
+            recipeName = attributes.get("name")
+            if recipeName == "linkandpostlink":
+                self.isLinkRecipe = True
+                self.currentInfPath = attributes.get("bldinf")
+                target = attributes.get("target")
+                # INF path is absolute, all others are relative to epocroot
+                if self.currentInfPath not in self.infTargets:
+                    self.infTargets[self.currentInfPath] = []
+                if self.epocroot:
+                    rootIndex = len(self.epocroot)
+                else:
+                    rootIndex = target.find("epoc32")
+                    if rootIndex == -1:
+                        raise ValueError, "Cannot guess epocroot from a path without 'epoc32': '%s'" % attributes.get("target")
+                    self.epocroot = target[:rootIndex]
+                # lstrip required - self.epocroot from init arg may not have trailing separator
+                # Raptor always outputs Unix style paths -> hard coded separator in lstrip.
+                path = target[rootIndex:].lstrip("/")
+                if path.endswith(".sym"):
+                    path = path[:-4]
+
+                targetFileRoot = os.path.splitext(os.path.basename(target))[0]
+                libFileRoot = os.path.splitext(os.path.basename(self.currentImportLib))[0]
+                if targetFileRoot == libFileRoot:
+                    self.currentImportLib = ""
+
+                self.currentTarget = {
+                                      "path": path,
+                                      "component": attributes.get("component"),
+                                      "platform": attributes.get("platform"),
+                                      "libraries": [],
+                                      "dso": self.currentImportLib[rootIndex:].lstrip("/")
+                                      }
+            elif recipeName == "importlibtarget":
+                self.currentImportLib = attributes.get("target")
+
+    def writeXml(self, out=sys.stdout, encoding="iso-8859-1"):
+        generator = xml.sax.saxutils.XMLGenerator
+
+        class LinkWriter(generator):
+            def writeLinkData(self, infsAndTargets, epocroot):
+                generator.startElement(self, u"linkdata", {"epocroot": epocroot})
+                for inf, targets in infsAndTargets.items():
+                    generator.startElement(self, u"inf", {"path": inf, "ddir": BldInfFile.outputPathFragment(inf)})
+                    for target in targets:
+                        generator.startElement(self, u"target", {"path": target["path"],
+                                                                 "platform": target["platform"],
+                                                                 "component": target["component"],
+                                                                 "dso": target.get("dso", "")})
+                        generator.startElement(self, u"libraries", {})
+                        for lib in target["libraries"]:
+                            generator.startElement(self, u"lib", {})
+                            generator.characters(self, lib)
+                            generator.endElement(self, u"lib")
+                        generator.endElement(self, u"libraries")
+                        generator.endElement(self, u"target")
+                    generator.endElement(self, u"inf")
+                generator.endElement(self, u"linkdata")
+        l = LinkWriter(out, encoding)
+        l.writeLinkData(self.infTargets, self.epocroot)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/releasing/blocks/framework/symbian-version	Thu Sep 02 15:02:14 2010 +0800
@@ -0,0 +1,1 @@
+0.5.2
\ No newline at end of file