# HG changeset patch # User MattD # Date 1268742484 0 # Node ID f6ae410bd493eceb90808372b44d40872d76f50d # Parent 4e09c8ccae86e0402b6664b6b326fa34d088c8e1# Parent 842a773e65f2d62d7306b406cfe738ea8e84fcd6 Catchup merge. diff -r 842a773e65f2 -r f6ae410bd493 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,8 @@ +bd0e6fdb18f45af981b6b1014a7d27ec1f76e58e PDK_2.0.0 +62971d19bb3360ed37e47c76706ffeb3560a0944 PDK_3.0.a +2193253638157ec46566dcfa9d060ded112d627a PDK_3.0.b +718b119bed63ae5b12b9beda8e02576232eaa131 PDK_3.0.c +718b119bed63ae5b12b9beda8e02576232eaa131 PDK_2.0.1 +61b66a9de9154bf58b1fc75210d07bc5c5c1c678 PDK_2.0.2 +c5817fd289eca8bf8f86c29d77c175c6392d440a PDK_3.0.d +c63eca238256f2129b2416f7023930bb18ca5fec PDK_3.0.e diff -r 842a773e65f2 -r f6ae410bd493 bzcsv2mw/bzcsv2mw.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bzcsv2mw/bzcsv2mw.pl Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,108 @@ +#!perl -w + +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: bzcsv2mw.pl - simple script for converting CSV report files from Bugzilla to MediaWiKi text files +# + +use strict; +use warnings; +use Text::CSV; +use Getopt::Long; + +sub Usage($) + { + my ($msg) = @_; + + print "$msg\n\n" if ($msg ne ""); + + print <<'EOF'; + + bzcsv2mw.pl - simple script for converting CSV report files from Bugzilla to MediaWiki text files + + Options: + + -csv CSV file generated by Bugzilla + -h|-help print this help information + +EOF + exit (1); + } + +my $file = ""; +my $help = 0; +my $count_nb_total_bugs=0; + +if (!GetOptions( + "csv=s" => \$file, + "h|help" => \$help, + )) + { + Usage("Invalid argument"); + } + +Usage("Too few arguments....use -csv") if ($file eq ""); +Usage("") if ($help); + +#my $file = $ARGV[0]; +my $csv = Text::CSV->new(); +my $mwtxt = $file.".mw.txt"; + +open (CSV, "<", $file) or die $!; +open (MWTXT,">$mwtxt"); +print MWTXT "{|\n"; + +my %headermap = ("bug_id"=>"ID","bug_severity"=>"Severity","reporter"=>"Reporter","bug_status"=>"Status","product"=>"Package", + "short_desc"=>"Title","priority"=>"Priority","assigned_to"=>"Assigned To","resolution"=>"Resolution","op_sys"=>"OS",); + +my $header=0; +while () { + if ($csv->parse($_)) + { + my @columns = $csv->fields(); + + if(!$header) + { + $header=1; + foreach (@columns) + { + #my $val = $_; + #if(defined $headermap{$val}){$val = $headermap{$val};} + print MWTXT "!".$headermap{$_}."\n"; + } + } + else + { + if ($columns[0] =~ m/(\d+)/) + { + $columns[0] = "[http://developer.symbian.org/bugs/show_bug.cgi?id=$columns[0] Bug$columns[0]]"; + } + foreach (@columns) + { + print MWTXT "|$_\n"; + } + $count_nb_total_bugs++; + } + } + else + { + my $err = $csv->error_input; + print "Failed to parse line: $err"; + } + + print MWTXT "|----\n"; +} + +close CSV; +print MWTXT "|}\n"; +close MWTXT; +print "\nThe number of bugs is: $count_nb_total_bugs\n"; diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/clone_all_packages.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/clone_all_packages.pl Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,395 @@ +#! perl + +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: +# Perl script to clone or update all of the Foundation MCL repositories + +use strict; +use Getopt::Long; +use File::Basename; + +sub Usage($) + { + my ($msg) = @_; + + print "$msg\n\n" if ($msg ne ""); + + print <<'EOF'; +clone_all_repositories - simple script for cloning Symbian repository tree + +This script will clone repositories, or pull changes into a previously +cloned repository. The script will prompt for your username and +password, which will be needed to access the SFL repositories, or you can +supply them with command line arguments. + +The list of packages can be supplied in a text file using the -packagelist +option, which is capable of reading the build-info.xml files supplied with +Symbian PDKs. Supplying a build-info.xml file will cause the clone or update +operation to use the exact revision for each of the relevant repositories. + +Important: + This script uses https access to the repositories, so the username and + password will be stored as cleartext in the .hg/hgrc file for each repository. + +Used with the "-mirror" option, the script will copy both MCL and FCL +repositories into the same directory layout as the Symbian website, and will +use the Mercurial "--noupdate" option when cloning. + +Options: + +-username username at the Symbian website +-password password to go with username +-mirror create a "mirror" of the Symbian repository tree +-packagelist file containing the URLs for the packages to be processed +-retries number of times to retry a failed operation (default 1) +-verbose print the underlying "hg" commands before executing them +-n do nothing - don't actually execute the commands +-help print this help information +-exec execute command on each repository +-filter only process repository paths matching regular expression +-dummyrun Dummy Run, don't execute any Mercurial commands. +-webhost Web Mercurial host (defaults to developer.symbian.org) + +The -exec option processes the rest of the command line, treating it as +a command to apply to each repository in turn. Some keywords are expanded +to repository-specific values, and "hg" is always expanded to "hg -R %REPO%" + +%REPO% relative path to the repository +%WREPO% relative path to repository, with Windows path separators +%HREPO% path to the repository on the server +%WHREPO% path to the repository on the server, with Windows separators +%URL% URL of the master repository +%PUSHURL% URL suitable for pushing (always includes username & password) +%REV% revision associated with the repository (defaults to "tip") + +It's often useful to use "--" to separate the exec command from the options +to this script, e.g. "-exec -- hg update -C tip" + +EOF + exit (1); + } + +my @clone_options = (); # use ("--noupdate") to clone without extracting the source +my @pull_options = (); # use ("--rebase") to rebase your changes when pulling +my $hostname = "developer.symbian.org"; + +my $username = ""; +my $password = ""; +my $mirror = 0; # set to 1 if you want to mirror the repository structure +my $retries = 1; # number of times to retry problem repos +my $verbose = 0; # turn on more tracing +my $do_nothing = 0; # print the hg commands, don't actually do them +my $help = 0; +my $exec = 0; +my $filter = ""; +my @packagelist_files = (); + +# Analyse the rest of command-line parameters +if (!GetOptions( + "u|username=s" => \$username, + "p|password=s" => \$password, + "m|mirror" => \$mirror, + "r|retries=i" => \$retries, + "v|verbose" => \$verbose, + "n" => \$do_nothing, + "h|help" => \$help, + "e|exec" => \$exec, + "f|filter=s" => \$filter, + "l|packagelist=s" => \@packagelist_files, + "d|dummyrun" => \$do_nothing, + "w|webhost=s" => \$hostname, + )) + { + Usage("Invalid argument"); + } + +Usage("Too many arguments") if (scalar @ARGV > 0 && !$exec); +Usage("Too few arguments for -exec") if (scalar @ARGV == 0 && $exec); +Usage("") if ($help); + +# Important: This script uses http access to the repositories, so +# the username and password will be stored as cleartext in the +# .hg/hgrc file in each repository. + +my $needs_id = 1; # assumed necessary for clone/pull + +my @exec_cmd = @ARGV; +if ($exec) + { + if ($exec_cmd[0] eq "hg") + { + shift @exec_cmd; + unshift @exec_cmd, "hg", "-R", "%REPO%"; + } + if ($verbose) + { + print "* Exec template = >", join("<,>", @exec_cmd), "<\n"; + } + $needs_id = grep /URL%/,@exec_cmd; # only need id if using %URL% or %PUSHURL% + } + +if ($needs_id && $username eq "" ) + { + print "Username: "; + $username = ; + chomp $username; + } +if ($needs_id && $password eq "" ) + { + print "Password: "; + $password = ; + chomp $password; + } + +my %export_control_special_case = ( + "oss/MCL/sf/os/security" => 1, + "oss/FCL/sf/os/security" => 1, + ); + +sub do_system(@) + { + my (@cmd) = @_; + + if ($verbose) + { + print "* ", join(" ", @cmd), "\n"; + } + return 0 if ($do_nothing); + + return system(@cmd); + } + +my %revisions; + +sub process_one_repo($) + { + my ($package) = @_; + my @dirs = split /\//, $package; + my $license = shift @dirs; + my $repotree = shift @dirs; # remove the MCL or FCL repo tree information + my $destdir = pop @dirs; # ignore the package name, because Mercurial will create that + + if ($mirror) + { + # Mirror the full directory structure, so put back the license & repotree dirs + unshift @dirs, $repotree; + unshift @dirs, $license; + } + + # Ensure the directories already exist as far as the parent of the repository + my $path = ""; + foreach my $dir (@dirs) + { + $path = ($path eq "") ? $dir : "$path/$dir"; + if (!-d $path) + { + mkdir $path; + } + } + + $path .= "/$destdir"; # this is where the repository will go + + my $repo_url = "https://$username:$password\@$hostname/$package/"; + my $repo_push_url =$repo_url; + if ($license ne "sfl" && !$export_control_special_case{$package}) + { + # user registration is not required for reading public package repositories + $repo_url = "http://$hostname/$package/"; + } + + my @rev_options = (); + my $revision = $revisions{$package}; + if (defined($revision)) + { + @rev_options = ("--rev", $revision); + } + else + { + $revision = "tip"; + # and leave the rev_options list empty + } + + my $ret; + if ($exec) + { + # iteration functionality - process the keywords + my $wpath = $path; + my $wpackage = $package; + $wpath =~ s/\//\\/g; # win32 path separator + $wpackage =~ s/\//\\/g; # win32 path separator + my @repo_cmd = (); + foreach my $origcmd (@exec_cmd) + { + my $cmd = $origcmd; # avoid altering the original + $cmd =~ s/%REPO%/$path/; + $cmd =~ s/%WREPO%/$wpath/; + $cmd =~ s/%HREPO%/$package/; + $cmd =~ s/%WHREPO%/$wpackage/; + $cmd =~ s/%URL%/$repo_url/; + $cmd =~ s/%PUSHURL%/$repo_push_url/; + $cmd =~ s/%REV%/$revision/; + push @repo_cmd, $cmd; + } + print "Processing $path...\n"; + $ret = do_system(@repo_cmd); + } + elsif (-d "$path/.hg") + { + # The repository already exists, so just do an update + + print "Updating $destdir from $package...\n"; + $ret = do_system("hg", "pull", @pull_options, @rev_options, "-R", $path, $repo_url); + if ($ret == 0 && ! $mirror) + { + $ret = do_system("hg", "update", "-R", $path, @rev_options) + } + } + else + { + # Clone the repository + + print "Cloning $destdir from $package...\n"; + $ret = do_system("hg", "clone", @clone_options, @rev_options, $repo_url, $path); + } + + $ret = $ret >> 8; # extract the exit status + print "* Exit status $ret for $path\n\n" if ($verbose); + return $ret; + } + +my $add_implied_FCL_repos = 0; +if (scalar @packagelist_files == 0) + { + # Read the package list files alongside the script itself + + # Extract the path location of the program and locate package list files + my ($program_name,$program_path) = &File::Basename::fileparse($0); + + foreach my $file ("sf_oss_mcl_packages.txt", "sftools_oss_mcl_packages.txt", "other_packages.txt") + { + if (! -e $program_path.$file) + { + print "Cannot find implied packagelist $program_path$file\n"; + next; + } + push @packagelist_files, $program_path.$file; + } + $add_implied_FCL_repos = 1; # lists only contain the MCL repo locations + } + +my @all_packages = (); + +foreach my $file (@packagelist_files) + { + print "* reading package information from $file...\n" if ($verbose); + open PKG_LIST, "<$file" or die "Can't open $file: $!\n"; + foreach my $line () + { + chomp($line); + + $line =~ s/\015//g; # remove CR, in case we are processing Windows text files on Linux + + my $revision; # set when processing build-info listings + + # build-info.xml format + # //v800008/Builds01/mercurial_master_prod/sfl/MCL/sf/adaptation/stubs/#7:e086c7f635d5 + # //v800008/Builds01/mercurial_master_prod/sfl/MCL/sf/adaptation/stubs/#:e086c7f635d5 + # //v800008/Builds01/mercurial_master_prod/sfl/MCL/sf/adaptation/stubs/#e086c7f635d5 + if ($line =~ /(.*)#(\d*:)?([0-9a-fA-F]+)<\/baseline>/i) + { + $line = $1; # discard the wrapping + $revision = $3; + } + + # Look for the oss/MCL/ prefix to a path e.g. + # https://developer.symbian.org/oss/FCL/interim/contrib/WidgetExamples + if ($line =~ /((oss|sfl)\/(FCL|MCL)\/.*)\s*$/) + { + my $repo_path = $1; + $repo_path =~ s/\/$//; # remove trailing slash, if any + + push @all_packages, $repo_path; + $revisions{$repo_path} = $revision if (defined $revision); + next; + } + } + close PKG_LIST; + } + +if ($mirror) + { + push @clone_options, "--noupdate"; + + if ($add_implied_FCL_repos) + { + # Assume that every MCL has a matching FCL. As we are mirroring, + # we can process both without them overlapping in the local filesystem + my @list_with_fcls = (); + foreach my $package (@all_packages) + { + push @list_with_fcls, $package; + if ($package =~ /MCL/) + { + $package =~ s/MCL/FCL/; + push @list_with_fcls, $package; + } + } + @all_packages = @list_with_fcls; + } + } + +my @problem_packages = (); +my $total_packages = 0; + +foreach my $package (@all_packages) + { + if ($filter && $package !~ /$filter/) + { + next; # skip repos which don't match the filter + } + my $err = process_one_repo($package); + $total_packages++; + push @problem_packages, $package if ($err < 0 || $err > 127); + } + +# retry problem packages + +my $attempt = 0; +while ($attempt < $retries && scalar @problem_packages) + { + $attempt++; + printf "\n\n------------\nRetry attempt %d on %d packages\n", + $attempt, scalar @problem_packages; + print join("\n", @problem_packages, ""), "\n"; + + my @list = @problem_packages; + @problem_packages = (); + foreach my $package (@list) + { + my $err = process_one_repo($package); + push @problem_packages, $package if ($err < 0 || $err > 127); + } + } + +printf "\n------------\nProcessed %d packages, of which %d reported errors\n", + $total_packages, scalar @problem_packages; +if (scalar @problem_packages) + { + print join("\n", @problem_packages, ""); + exit(1); + } + else + { + exit(0); + } + \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/other_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/other_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,9 @@ +https://developer.symbian.org/oss/API_REF/SHAI/ +https://developer.symbian.org/oss/FCL/examples/app/NPR/ +https://developer.symbian.org/oss/MCL/utilities +https://developer.symbian.org/sfl/API_REF/Public_API/epoc32/ +https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/AgileBrowser +https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/EUserHL +https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/route_generator +https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/simulation_PSY +https://developer.symbian.org/sfl/FCL/interim/desktopsw diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/parse_clone_all.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/parse_clone_all.pl Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,87 @@ +#! perl + +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: +# Perl script to summarise output from clone_all_package.pl + + +@all = <>; + +my $repo; +my $newrepo = 0; +my $errors = 0; +my $summary = 0; +my $retries = 0; +foreach my $line (@all) +{ + if($summary) + { + # if we are in the summary section then just echo all lines out + # this should be a list of all the packages with errors + print "$line\n"; + } + #save package name + # e.g new package "Cloning compatanaapps from sfl/MCL/sftools/ana/compatanaapps..." + # e.g. existing package "Updating helix from sfl/MCL/sf/mw/helix..." + # e.g. with -exec option "Processing sfl/FCL/interim/auxiliary_tools/AgileBrowser." + elsif ($line =~ m/Cloning (.*?)from(.*)$/) + { + $repo = $2; + $newrepo = 1; + $retries =0; + } + elsif ($line =~ m/Updating (.*?)from(.*)$/) + { + $repo = $2; + $newrepo = 0; + $retries =0; + } + + # + # Capture number of changes, should be line like one of the following + # e.g. "added 4 changesets with 718 changes to 690 files" + # e.g. "no changes found" + elsif ($line =~ m/added (.*?)changesets with(.*)$/) + { + print "\n$repo\t added $1 chamgesets"; + print "\t retries $retries"; + print "\t** NEW" if ($newrepo); + } + + if($line =~ m/abort:/) + { + $retries++; + } + + # Process the summary section + # e.g. "------------" + # e.g. "Processed 22 packages, of which 0 reported errors" + if ($line =~ m/Processed (.*?)packages, of which(.*?)reported errors/) + { + print "\n-------------------------------\n"; + print "\n Summary: Processed $1 : Errors $2\n"; + $errors= $2; + $summary = 1; + } + +} +if ($errors > 0) +{ + print "\nexit with error\n"; + exit 1; +} +else +{ + print "\nexit success\n"; + exit 0; +} diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/patch_hgrc.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/patch_hgrc.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,93 @@ +#! /usr/bin/python +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: +# Python script to manipulate the hgrc files + +from ConfigParser import * +import optparse +import os +import sys +import re + +verbose = False; +credentials= re.compile(r"//.*?@") + +def strip_credentials(hgrc): + """ Remove the user credentials from the default path in hgrc file""" + # e.g. + # before http://user:pass@prod.foundationhost.org/sfl/MCL/sf/os/boardsupport/ + # after http://prod.foundationhost.org/sfl/MCL/sf/os/boardsupport/ + if hgrc.has_section('paths'): + if (verbose): print hgrc.items('paths') + defpath = hgrc.get('paths', 'default') + newpath = credentials.sub(r"//",defpath) + #print "new path ", newpath + hgrc.set('paths', 'default',newpath) + elif (verbose): + if (verbose): print "No [paths] section\n" + +def add_hooks(hgrc): + if (hgrc.has_section('hooks')): + # unpdate + if (verbose) : print 'updating existing hooks section' + else: + if (verbose) : print 'adding hooks section' + hgrc.add_section('hooks') + # add example (windows only) hook to block local commit to the repo + hgrc.set('hooks', 'pretxncommit.abort', 'exit /b 1') + hgrc.set('hooks', 'pretxncommit.message', 'ERROR: This is a read only repo') + + +def write_hgrcfile(hgrc,fout): + fnewini = file(fout,'w') + hgrc.write(fnewini) + fnewini.close() + +def main(): + global verbose + usage = "usage: %prog [options]" + try: + parser = optparse.OptionParser(usage) + parser.set_defaults(filename=".hg/hgrc") + parser.add_option("-f","--file", dest="filename", default=".hg/hgrc",metavar="FILE" , help='file to be patched') + parser.add_option("-v", action="store_true",dest="verbose",default=False, help='Verbose trace information') + (options, args) = parser.parse_args() + except: + parser.print_help() + sys.exit(1) + + f = os.path.abspath(options.filename) + if(options.verbose): + verbose = True + print f + if(os.path.isfile(f)): + try: + #conff = file(f,'w') #open file f for read/write + hgrcfile = RawConfigParser() + hgrcfile.read(f) + if (verbose): + print hgrcfile.sections() + except: + print 'Something failed opening the configuration file' + sys.exit(2) + else: + print "Configuration file does not exist? ",f + sys.exit(2) + + strip_credentials(hgrcfile) + add_hooks(hgrcfile) + write_hgrcfile(hgrcfile,f) + + +if __name__ == "__main__": + main() diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sf_oss_fcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sf_oss_fcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,134 @@ +https://developer.symbian.org/oss/FCL/sf/adaptation/beagleboard +https://developer.symbian.org/oss/FCL/sf/adaptation/qemu +https://developer.symbian.org/oss/FCL/sf/adaptation/stubs +https://developer.symbian.org/oss/FCL/sf/app/camera +https://developer.symbian.org/oss/FCL/sf/app/commonemail +https://developer.symbian.org/oss/FCL/sf/app/conntools +https://developer.symbian.org/oss/FCL/sf/app/contacts +https://developer.symbian.org/oss/FCL/sf/app/contentcontrol +https://developer.symbian.org/oss/FCL/sf/app/conversations +https://developer.symbian.org/oss/FCL/sf/app/devicecontrol +https://developer.symbian.org/oss/FCL/sf/app/dictionary +https://developer.symbian.org/oss/FCL/sf/app/files +https://developer.symbian.org/oss/FCL/sf/app/graphicsuis +https://developer.symbian.org/oss/FCL/sf/app/helps +https://developer.symbian.org/oss/FCL/sf/app/homescreen +https://developer.symbian.org/oss/FCL/sf/app/homescreentools +https://developer.symbian.org/oss/FCL/sf/app/im +https://developer.symbian.org/oss/FCL/sf/app/imgeditor +https://developer.symbian.org/oss/FCL/sf/app/iptelephony +https://developer.symbian.org/oss/FCL/sf/app/jrt +https://developer.symbian.org/oss/FCL/sf/app/location +https://developer.symbian.org/oss/FCL/sf/app/messaging +https://developer.symbian.org/oss/FCL/sf/app/mmsharinguis +https://developer.symbian.org/oss/FCL/sf/app/musicplayer +https://developer.symbian.org/oss/FCL/sf/app/organizer +https://developer.symbian.org/oss/FCL/sf/app/phone +https://developer.symbian.org/oss/FCL/sf/app/photos +https://developer.symbian.org/oss/FCL/sf/app/podcatcher +https://developer.symbian.org/oss/FCL/sf/app/printing +https://developer.symbian.org/oss/FCL/sf/app/profile +https://developer.symbian.org/oss/FCL/sf/app/radio +https://developer.symbian.org/oss/FCL/sf/app/rndtools +https://developer.symbian.org/oss/FCL/sf/app/screensaver +https://developer.symbian.org/oss/FCL/sf/app/settingsuis +https://developer.symbian.org/oss/FCL/sf/app/speechsrv +https://developer.symbian.org/oss/FCL/sf/app/techview +https://developer.symbian.org/oss/FCL/sf/app/utils +https://developer.symbian.org/oss/FCL/sf/app/videoeditor +https://developer.symbian.org/oss/FCL/sf/app/videoplayer +https://developer.symbian.org/oss/FCL/sf/app/videotelephony +https://developer.symbian.org/oss/FCL/sf/app/voicerec +https://developer.symbian.org/oss/FCL/sf/app/webuis +https://developer.symbian.org/oss/FCL/sf/incubator/modemadaptation +https://developer.symbian.org/oss/FCL/sf/incubator/socialmobilefw +https://developer.symbian.org/oss/FCL/sf/mw/accesssec +https://developer.symbian.org/oss/FCL/sf/mw/appinstall +https://developer.symbian.org/oss/FCL/sf/mw/appsupport +https://developer.symbian.org/oss/FCL/sf/mw/btservices +https://developer.symbian.org/oss/FCL/sf/mw/camerasrv +https://developer.symbian.org/oss/FCL/sf/mw/classicui +https://developer.symbian.org/oss/FCL/sf/mw/dlnasrv +https://developer.symbian.org/oss/FCL/sf/mw/drm +https://developer.symbian.org/oss/FCL/sf/mw/gsprofilesrv +https://developer.symbian.org/oss/FCL/sf/mw/gstreamer +https://developer.symbian.org/oss/FCL/sf/mw/hapticsservices +https://developer.symbian.org/oss/FCL/sf/mw/helix +https://developer.symbian.org/oss/FCL/sf/mw/homescreensrv +https://developer.symbian.org/oss/FCL/sf/mw/imghandling +https://developer.symbian.org/oss/FCL/sf/mw/imsrv +https://developer.symbian.org/oss/FCL/sf/mw/inputmethods +https://developer.symbian.org/oss/FCL/sf/mw/ipappprotocols +https://developer.symbian.org/oss/FCL/sf/mw/ipappsrv +https://developer.symbian.org/oss/FCL/sf/mw/ipconnmgmt +https://developer.symbian.org/oss/FCL/sf/mw/legacypresence +https://developer.symbian.org/oss/FCL/sf/mw/locationsrv +https://developer.symbian.org/oss/FCL/sf/mw/mds +https://developer.symbian.org/oss/FCL/sf/mw/messagingmw +https://developer.symbian.org/oss/FCL/sf/mw/metadatasrv +https://developer.symbian.org/oss/FCL/sf/mw/mmappfw +https://developer.symbian.org/oss/FCL/sf/mw/mmmw +https://developer.symbian.org/oss/FCL/sf/mw/mmuifw +https://developer.symbian.org/oss/FCL/sf/mw/netprotocols +https://developer.symbian.org/oss/FCL/sf/mw/networkingdm +https://developer.symbian.org/oss/FCL/sf/mw/opensrv +https://developer.symbian.org/oss/FCL/sf/mw/phonesrv +https://developer.symbian.org/oss/FCL/sf/mw/platformtools +https://developer.symbian.org/oss/FCL/sf/mw/qt +https://developer.symbian.org/oss/FCL/sf/mw/remoteconn +https://developer.symbian.org/oss/FCL/sf/mw/remotemgmt +https://developer.symbian.org/oss/FCL/sf/mw/remotestorage +https://developer.symbian.org/oss/FCL/sf/mw/securitysrv +https://developer.symbian.org/oss/FCL/sf/mw/serviceapi +https://developer.symbian.org/oss/FCL/sf/mw/serviceapifw +https://developer.symbian.org/oss/FCL/sf/mw/shortlinkconn +https://developer.symbian.org/oss/FCL/sf/mw/srvdiscovery +https://developer.symbian.org/oss/FCL/sf/mw/svgt +https://developer.symbian.org/oss/FCL/sf/mw/uiaccelerator +https://developer.symbian.org/oss/FCL/sf/mw/uiresources +https://developer.symbian.org/oss/FCL/sf/mw/uitools +https://developer.symbian.org/oss/FCL/sf/mw/usbservices +https://developer.symbian.org/oss/FCL/sf/mw/videoutils +https://developer.symbian.org/oss/FCL/sf/mw/vpnclient +https://developer.symbian.org/oss/FCL/sf/mw/web +https://developer.symbian.org/oss/FCL/sf/mw/websrv +https://developer.symbian.org/oss/FCL/sf/mw/wirelessacc +https://developer.symbian.org/oss/FCL/sf/os/boardsupport +https://developer.symbian.org/oss/FCL/sf/os/bt +https://developer.symbian.org/oss/FCL/sf/os/buildtools +https://developer.symbian.org/oss/FCL/sf/os/cellularsrv +https://developer.symbian.org/oss/FCL/sf/os/commsfw +https://developer.symbian.org/oss/FCL/sf/os/deviceplatformrelease +https://developer.symbian.org/oss/FCL/sf/os/devicesrv +https://developer.symbian.org/oss/FCL/sf/os/graphics +https://developer.symbian.org/oss/FCL/sf/os/imagingext +https://developer.symbian.org/oss/FCL/sf/os/kernelhwsrv +https://developer.symbian.org/oss/FCL/sf/os/lbs +https://developer.symbian.org/oss/FCL/sf/os/mm +https://developer.symbian.org/oss/FCL/sf/os/networkingsrv +https://developer.symbian.org/oss/FCL/sf/os/osrndtools +https://developer.symbian.org/oss/FCL/sf/os/ossrv +https://developer.symbian.org/oss/FCL/sf/os/persistentdata +https://developer.symbian.org/oss/FCL/sf/os/security +https://developer.symbian.org/oss/FCL/sf/os/textandloc +https://developer.symbian.org/oss/FCL/sf/os/usb +https://developer.symbian.org/oss/FCL/sf/os/wlan +https://developer.symbian.org/oss/FCL/sf/os/xmlsrv +https://developer.symbian.org/oss/FCL/sf/ostools/osrndtools +https://developer.symbian.org/oss/FCL/sf/tools/build_s60 +https://developer.symbian.org/oss/FCL/sf/tools/buildplatforms +https://developer.symbian.org/oss/FCL/sf/tools/makefile_templates +https://developer.symbian.org/oss/FCL/sf/tools/platformtools +https://developer.symbian.org/oss/FCL/sf/tools/rndtools +https://developer.symbian.org/oss/FCL/sf/tools/swconfigtools +https://developer.symbian.org/oss/FCL/interim/QEMU +https://developer.symbian.org/oss/FCL/interim/contrib/QtExamples +https://developer.symbian.org/oss/FCL/interim/contrib/WidgetExamples +https://developer.symbian.org/oss/FCL/interim/fbf/bootstrap +https://developer.symbian.org/oss/FCL/interim/fbf/configs/default +https://developer.symbian.org/oss/FCL/interim/fbf/configs/pkgbuild +https://developer.symbian.org/oss/FCL/interim/fbf/hudson +https://developer.symbian.org/oss/FCL/interim/fbf/projects/packages +https://developer.symbian.org/oss/FCL/interim/fbf/projects/platforms +https://developer.symbian.org/oss/FCL/interim/sf-test/platform/smoketest + diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sf_oss_mcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sf_oss_mcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,121 @@ +https://developer.symbian.org/oss/MCL/sf/adaptation/beagleboard +https://developer.symbian.org/oss/MCL/sf/adaptation/qemu +https://developer.symbian.org/oss/MCL/sf/adaptation/stubs +https://developer.symbian.org/oss/MCL/sf/app/camera +https://developer.symbian.org/oss/MCL/sf/app/commonemail +https://developer.symbian.org/oss/MCL/sf/app/conntools +https://developer.symbian.org/oss/MCL/sf/app/contacts +https://developer.symbian.org/oss/MCL/sf/app/contentcontrol +https://developer.symbian.org/oss/MCL/sf/app/conversations +https://developer.symbian.org/oss/MCL/sf/app/devicecontrol +https://developer.symbian.org/oss/MCL/sf/app/dictionary +https://developer.symbian.org/oss/MCL/sf/app/files +https://developer.symbian.org/oss/MCL/sf/app/graphicsuis +https://developer.symbian.org/oss/MCL/sf/app/helps +https://developer.symbian.org/oss/MCL/sf/app/homescreen +https://developer.symbian.org/oss/MCL/sf/app/homescreentools +https://developer.symbian.org/oss/MCL/sf/app/im +https://developer.symbian.org/oss/MCL/sf/app/imgeditor +https://developer.symbian.org/oss/MCL/sf/app/iptelephony +https://developer.symbian.org/oss/MCL/sf/app/jrt +https://developer.symbian.org/oss/MCL/sf/app/location +https://developer.symbian.org/oss/MCL/sf/app/messaging +https://developer.symbian.org/oss/MCL/sf/app/mmsharinguis +https://developer.symbian.org/oss/MCL/sf/app/musicplayer +https://developer.symbian.org/oss/MCL/sf/app/organizer +https://developer.symbian.org/oss/MCL/sf/app/phone +https://developer.symbian.org/oss/MCL/sf/app/photos +https://developer.symbian.org/oss/MCL/sf/app/podcatcher +https://developer.symbian.org/oss/MCL/sf/app/printing +https://developer.symbian.org/oss/MCL/sf/app/profile +https://developer.symbian.org/oss/MCL/sf/app/radio +https://developer.symbian.org/oss/MCL/sf/app/rndtools +https://developer.symbian.org/oss/MCL/sf/app/screensaver +https://developer.symbian.org/oss/MCL/sf/app/settingsuis +https://developer.symbian.org/oss/MCL/sf/app/speechsrv +https://developer.symbian.org/oss/MCL/sf/app/techview +https://developer.symbian.org/oss/MCL/sf/app/utils +https://developer.symbian.org/oss/MCL/sf/app/videoeditor +https://developer.symbian.org/oss/MCL/sf/app/videoplayer +https://developer.symbian.org/oss/MCL/sf/app/videotelephony +https://developer.symbian.org/oss/MCL/sf/app/voicerec +https://developer.symbian.org/oss/MCL/sf/app/webuis +https://developer.symbian.org/oss/MCL/sf/mw/accesssec +https://developer.symbian.org/oss/MCL/sf/mw/appinstall +https://developer.symbian.org/oss/MCL/sf/mw/appsupport +https://developer.symbian.org/oss/MCL/sf/mw/btservices +https://developer.symbian.org/oss/MCL/sf/mw/camerasrv +https://developer.symbian.org/oss/MCL/sf/mw/classicui +https://developer.symbian.org/oss/MCL/sf/mw/dlnasrv +https://developer.symbian.org/oss/MCL/sf/mw/drm +https://developer.symbian.org/oss/MCL/sf/mw/gsprofilesrv +https://developer.symbian.org/oss/MCL/sf/mw/gstreamer +https://developer.symbian.org/oss/MCL/sf/mw/hapticsservices +https://developer.symbian.org/oss/MCL/sf/mw/helix +https://developer.symbian.org/oss/MCL/sf/mw/homescreensrv +https://developer.symbian.org/oss/MCL/sf/mw/imghandling +https://developer.symbian.org/oss/MCL/sf/mw/imsrv +https://developer.symbian.org/oss/MCL/sf/mw/inputmethods +https://developer.symbian.org/oss/MCL/sf/mw/ipappprotocols +https://developer.symbian.org/oss/MCL/sf/mw/ipappsrv +https://developer.symbian.org/oss/MCL/sf/mw/ipconnmgmt +https://developer.symbian.org/oss/MCL/sf/mw/legacypresence +https://developer.symbian.org/oss/MCL/sf/mw/locationsrv +https://developer.symbian.org/oss/MCL/sf/mw/mds +https://developer.symbian.org/oss/MCL/sf/mw/messagingmw +https://developer.symbian.org/oss/MCL/sf/mw/metadatasrv +https://developer.symbian.org/oss/MCL/sf/mw/mmappfw +https://developer.symbian.org/oss/MCL/sf/mw/mmmw +https://developer.symbian.org/oss/MCL/sf/mw/mmuifw +https://developer.symbian.org/oss/MCL/sf/mw/netprotocols +https://developer.symbian.org/oss/MCL/sf/mw/networkingdm +https://developer.symbian.org/oss/MCL/sf/mw/opensrv +https://developer.symbian.org/oss/MCL/sf/mw/phonesrv +https://developer.symbian.org/oss/MCL/sf/mw/platformtools +https://developer.symbian.org/oss/MCL/sf/mw/qt +https://developer.symbian.org/oss/MCL/sf/mw/remoteconn +https://developer.symbian.org/oss/MCL/sf/mw/remotemgmt +https://developer.symbian.org/oss/MCL/sf/mw/remotestorage +https://developer.symbian.org/oss/MCL/sf/mw/securitysrv +https://developer.symbian.org/oss/MCL/sf/mw/serviceapi +https://developer.symbian.org/oss/MCL/sf/mw/serviceapifw +https://developer.symbian.org/oss/MCL/sf/mw/shortlinkconn +https://developer.symbian.org/oss/MCL/sf/mw/srvdiscovery +https://developer.symbian.org/oss/MCL/sf/mw/svgt +https://developer.symbian.org/oss/MCL/sf/mw/uiaccelerator +https://developer.symbian.org/oss/MCL/sf/mw/uiresources +https://developer.symbian.org/oss/MCL/sf/mw/uitools +https://developer.symbian.org/oss/MCL/sf/mw/usbservices +https://developer.symbian.org/oss/MCL/sf/mw/videoutils +https://developer.symbian.org/oss/MCL/sf/mw/vpnclient +https://developer.symbian.org/oss/MCL/sf/mw/web +https://developer.symbian.org/oss/MCL/sf/mw/websrv +https://developer.symbian.org/oss/MCL/sf/mw/wirelessacc +https://developer.symbian.org/oss/MCL/sf/os/boardsupport +https://developer.symbian.org/oss/MCL/sf/os/bt +https://developer.symbian.org/oss/MCL/sf/os/buildtools +https://developer.symbian.org/oss/MCL/sf/os/cellularsrv +https://developer.symbian.org/oss/MCL/sf/os/commsfw +https://developer.symbian.org/oss/MCL/sf/os/deviceplatformrelease +https://developer.symbian.org/oss/MCL/sf/os/devicesrv +https://developer.symbian.org/oss/MCL/sf/os/graphics +https://developer.symbian.org/oss/MCL/sf/os/imagingext +https://developer.symbian.org/oss/MCL/sf/os/kernelhwsrv +https://developer.symbian.org/oss/MCL/sf/os/lbs +https://developer.symbian.org/oss/MCL/sf/os/mm +https://developer.symbian.org/oss/MCL/sf/os/networkingsrv +https://developer.symbian.org/oss/MCL/sf/os/osrndtools +https://developer.symbian.org/oss/MCL/sf/os/ossrv +https://developer.symbian.org/oss/MCL/sf/os/persistentdata +https://developer.symbian.org/oss/MCL/sf/os/security +https://developer.symbian.org/oss/MCL/sf/os/textandloc +https://developer.symbian.org/oss/MCL/sf/os/usb +https://developer.symbian.org/oss/MCL/sf/os/wlan +https://developer.symbian.org/oss/MCL/sf/os/xmlsrv +https://developer.symbian.org/oss/MCL/sf/ostools/osrndtools +https://developer.symbian.org/oss/MCL/sf/tools/build_s60 +https://developer.symbian.org/oss/MCL/sf/tools/buildplatforms +https://developer.symbian.org/oss/MCL/sf/tools/makefile_templates +https://developer.symbian.org/oss/MCL/sf/tools/platformtools +https://developer.symbian.org/oss/MCL/sf/tools/rndtools +https://developer.symbian.org/oss/MCL/sf/tools/swconfigtools diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sf_sfl_fcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sf_sfl_fcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,114 @@ +https://developer.symbian.org/sfl/FCL/sf/adaptation/stubs +https://developer.symbian.org/sfl/FCL/sf/app/camera +https://developer.symbian.org/sfl/FCL/sf/app/commonemail +https://developer.symbian.org/sfl/FCL/sf/app/conntools +https://developer.symbian.org/sfl/FCL/sf/app/contacts +https://developer.symbian.org/sfl/FCL/sf/app/contentcontrol +https://developer.symbian.org/sfl/FCL/sf/app/conversations +https://developer.symbian.org/sfl/FCL/sf/app/devicecontrol +https://developer.symbian.org/sfl/FCL/sf/app/dictionary +https://developer.symbian.org/sfl/FCL/sf/app/files +https://developer.symbian.org/sfl/FCL/sf/app/graphicsuis +https://developer.symbian.org/sfl/FCL/sf/app/helps +https://developer.symbian.org/sfl/FCL/sf/app/homescreen +https://developer.symbian.org/sfl/FCL/sf/app/homescreentools +https://developer.symbian.org/sfl/FCL/sf/app/im +https://developer.symbian.org/sfl/FCL/sf/app/imgeditor +https://developer.symbian.org/sfl/FCL/sf/app/iptelephony +https://developer.symbian.org/sfl/FCL/sf/app/location +https://developer.symbian.org/sfl/FCL/sf/app/messaging +https://developer.symbian.org/sfl/FCL/sf/app/mmsharinguis +https://developer.symbian.org/sfl/FCL/sf/app/musicplayer +https://developer.symbian.org/sfl/FCL/sf/app/organizer +https://developer.symbian.org/sfl/FCL/sf/app/phone +https://developer.symbian.org/sfl/FCL/sf/app/photos +https://developer.symbian.org/sfl/FCL/sf/app/printing +https://developer.symbian.org/sfl/FCL/sf/app/profile +https://developer.symbian.org/sfl/FCL/sf/app/radio +https://developer.symbian.org/sfl/FCL/sf/app/rndtools +https://developer.symbian.org/sfl/FCL/sf/app/screensaver +https://developer.symbian.org/sfl/FCL/sf/app/settingsuis +https://developer.symbian.org/sfl/FCL/sf/app/speechsrv +https://developer.symbian.org/sfl/FCL/sf/app/techview +https://developer.symbian.org/sfl/FCL/sf/app/utils +https://developer.symbian.org/sfl/FCL/sf/app/videoeditor +https://developer.symbian.org/sfl/FCL/sf/app/videoplayer +https://developer.symbian.org/sfl/FCL/sf/app/videotelephony +https://developer.symbian.org/sfl/FCL/sf/app/voicerec +https://developer.symbian.org/sfl/FCL/sf/mw/accesssec +https://developer.symbian.org/sfl/FCL/sf/mw/appinstall +https://developer.symbian.org/sfl/FCL/sf/mw/appsupport +https://developer.symbian.org/sfl/FCL/sf/mw/btservices +https://developer.symbian.org/sfl/FCL/sf/mw/camerasrv +https://developer.symbian.org/sfl/FCL/sf/mw/classicui +https://developer.symbian.org/sfl/FCL/sf/mw/dlnasrv +https://developer.symbian.org/sfl/FCL/sf/mw/drm +https://developer.symbian.org/sfl/FCL/sf/mw/gsprofilesrv +https://developer.symbian.org/sfl/FCL/sf/mw/hapticsservices +https://developer.symbian.org/sfl/FCL/sf/mw/helix +https://developer.symbian.org/sfl/FCL/sf/mw/homescreensrv +https://developer.symbian.org/sfl/FCL/sf/mw/imghandling +https://developer.symbian.org/sfl/FCL/sf/mw/imsrv +https://developer.symbian.org/sfl/FCL/sf/mw/inputmethods +https://developer.symbian.org/sfl/FCL/sf/mw/ipappprotocols +https://developer.symbian.org/sfl/FCL/sf/mw/ipappsrv +https://developer.symbian.org/sfl/FCL/sf/mw/ipconnmgmt +https://developer.symbian.org/sfl/FCL/sf/mw/legacypresence +https://developer.symbian.org/sfl/FCL/sf/mw/locationsrv +https://developer.symbian.org/sfl/FCL/sf/mw/mds +https://developer.symbian.org/sfl/FCL/sf/mw/messagingmw +https://developer.symbian.org/sfl/FCL/sf/mw/metadatasrv +https://developer.symbian.org/sfl/FCL/sf/mw/mmappfw +https://developer.symbian.org/sfl/FCL/sf/mw/mmmw +https://developer.symbian.org/sfl/FCL/sf/mw/mmuifw +https://developer.symbian.org/sfl/FCL/sf/mw/netprotocols +https://developer.symbian.org/sfl/FCL/sf/mw/networkingdm +https://developer.symbian.org/sfl/FCL/sf/mw/opensrv +https://developer.symbian.org/sfl/FCL/sf/mw/phonesrv +https://developer.symbian.org/sfl/FCL/sf/mw/platformtools +https://developer.symbian.org/sfl/FCL/sf/mw/remoteconn +https://developer.symbian.org/sfl/FCL/sf/mw/remotemgmt +https://developer.symbian.org/sfl/FCL/sf/mw/remotestorage +https://developer.symbian.org/sfl/FCL/sf/mw/securitysrv +https://developer.symbian.org/sfl/FCL/sf/mw/shortlinkconn +https://developer.symbian.org/sfl/FCL/sf/mw/srvdiscovery +https://developer.symbian.org/sfl/FCL/sf/mw/svgt +https://developer.symbian.org/sfl/FCL/sf/mw/uiaccelerator +https://developer.symbian.org/sfl/FCL/sf/mw/uiresources +https://developer.symbian.org/sfl/FCL/sf/mw/uitools +https://developer.symbian.org/sfl/FCL/sf/mw/usbservices +https://developer.symbian.org/sfl/FCL/sf/mw/videoutils +https://developer.symbian.org/sfl/FCL/sf/mw/vpnclient +https://developer.symbian.org/sfl/FCL/sf/mw/websrv +https://developer.symbian.org/sfl/FCL/sf/mw/wirelessacc +https://developer.symbian.org/sfl/FCL/sf/os/boardsupport +https://developer.symbian.org/sfl/FCL/sf/os/bt +https://developer.symbian.org/sfl/FCL/sf/os/buildtools +https://developer.symbian.org/sfl/FCL/sf/os/cellularsrv +https://developer.symbian.org/sfl/FCL/sf/os/commsfw +https://developer.symbian.org/sfl/FCL/sf/os/deviceplatformrelease +https://developer.symbian.org/sfl/FCL/sf/os/devicesrv +https://developer.symbian.org/sfl/FCL/sf/os/graphics +https://developer.symbian.org/sfl/FCL/sf/os/imagingext +https://developer.symbian.org/sfl/FCL/sf/os/kernelhwsrv +https://developer.symbian.org/sfl/FCL/sf/os/lbs +https://developer.symbian.org/sfl/FCL/sf/os/mm +https://developer.symbian.org/sfl/FCL/sf/os/networkingsrv +https://developer.symbian.org/sfl/FCL/sf/os/osrndtools +https://developer.symbian.org/sfl/FCL/sf/os/ossrv +https://developer.symbian.org/sfl/FCL/sf/os/persistentdata +https://developer.symbian.org/sfl/FCL/sf/os/security +https://developer.symbian.org/sfl/FCL/sf/os/textandloc +https://developer.symbian.org/sfl/FCL/sf/os/usb +https://developer.symbian.org/sfl/FCL/sf/os/wlan +https://developer.symbian.org/sfl/FCL/sf/os/xmlsrv +https://developer.symbian.org/sfl/FCL/sf/ostools/osrndtools +https://developer.symbian.org/sfl/FCL/sf/tools/build_s60 +https://developer.symbian.org/sfl/FCL/sf/tools/buildplatforms +https://developer.symbian.org/sfl/FCL/sf/tools/homescreentools +https://developer.symbian.org/sfl/FCL/sf/tools/makefile_templates +https://developer.symbian.org/sfl/FCL/sf/tools/platformtools +https://developer.symbian.org/sfl/FCL/sf/tools/rndtools +https://developer.symbian.org/sfl/FCL/sf/tools/swconfigtools +https://developer.symbian.org/sfl/FCL/interim/desktopsw + diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sf_sfl_mcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sf_sfl_mcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,112 @@ +https://developer.symbian.org/sfl/MCL/sf/adaptation/stubs +https://developer.symbian.org/sfl/MCL/sf/app/camera +https://developer.symbian.org/sfl/MCL/sf/app/commonemail +https://developer.symbian.org/sfl/MCL/sf/app/conntools +https://developer.symbian.org/sfl/MCL/sf/app/contacts +https://developer.symbian.org/sfl/MCL/sf/app/contentcontrol +https://developer.symbian.org/sfl/MCL/sf/app/conversations +https://developer.symbian.org/sfl/MCL/sf/app/devicecontrol +https://developer.symbian.org/sfl/MCL/sf/app/dictionary +https://developer.symbian.org/sfl/MCL/sf/app/files +https://developer.symbian.org/sfl/MCL/sf/app/graphicsuis +https://developer.symbian.org/sfl/MCL/sf/app/helps +https://developer.symbian.org/sfl/MCL/sf/app/homescreen +https://developer.symbian.org/sfl/MCL/sf/app/homescreentools +https://developer.symbian.org/sfl/MCL/sf/app/im +https://developer.symbian.org/sfl/MCL/sf/app/imgeditor +https://developer.symbian.org/sfl/MCL/sf/app/iptelephony +https://developer.symbian.org/sfl/MCL/sf/app/location +https://developer.symbian.org/sfl/MCL/sf/app/messaging +https://developer.symbian.org/sfl/MCL/sf/app/mmsharinguis +https://developer.symbian.org/sfl/MCL/sf/app/musicplayer +https://developer.symbian.org/sfl/MCL/sf/app/organizer +https://developer.symbian.org/sfl/MCL/sf/app/phone +https://developer.symbian.org/sfl/MCL/sf/app/photos +https://developer.symbian.org/sfl/MCL/sf/app/printing +https://developer.symbian.org/sfl/MCL/sf/app/profile +https://developer.symbian.org/sfl/MCL/sf/app/radio +https://developer.symbian.org/sfl/MCL/sf/app/rndtools +https://developer.symbian.org/sfl/MCL/sf/app/screensaver +https://developer.symbian.org/sfl/MCL/sf/app/settingsuis +https://developer.symbian.org/sfl/MCL/sf/app/speechsrv +https://developer.symbian.org/sfl/MCL/sf/app/techview +https://developer.symbian.org/sfl/MCL/sf/app/utils +https://developer.symbian.org/sfl/MCL/sf/app/videoeditor +https://developer.symbian.org/sfl/MCL/sf/app/videoplayer +https://developer.symbian.org/sfl/MCL/sf/app/videotelephony +https://developer.symbian.org/sfl/MCL/sf/app/voicerec +https://developer.symbian.org/sfl/MCL/sf/mw/accesssec +https://developer.symbian.org/sfl/MCL/sf/mw/appinstall +https://developer.symbian.org/sfl/MCL/sf/mw/appsupport +https://developer.symbian.org/sfl/MCL/sf/mw/btservices +https://developer.symbian.org/sfl/MCL/sf/mw/camerasrv +https://developer.symbian.org/sfl/MCL/sf/mw/classicui +https://developer.symbian.org/sfl/MCL/sf/mw/dlnasrv +https://developer.symbian.org/sfl/MCL/sf/mw/drm +https://developer.symbian.org/sfl/MCL/sf/mw/gsprofilesrv +https://developer.symbian.org/sfl/MCL/sf/mw/hapticsservices +https://developer.symbian.org/sfl/MCL/sf/mw/helix +https://developer.symbian.org/sfl/MCL/sf/mw/homescreensrv +https://developer.symbian.org/sfl/MCL/sf/mw/imghandling +https://developer.symbian.org/sfl/MCL/sf/mw/imsrv +https://developer.symbian.org/sfl/MCL/sf/mw/inputmethods +https://developer.symbian.org/sfl/MCL/sf/mw/ipappprotocols +https://developer.symbian.org/sfl/MCL/sf/mw/ipappsrv +https://developer.symbian.org/sfl/MCL/sf/mw/ipconnmgmt +https://developer.symbian.org/sfl/MCL/sf/mw/legacypresence +https://developer.symbian.org/sfl/MCL/sf/mw/locationsrv +https://developer.symbian.org/sfl/MCL/sf/mw/mds +https://developer.symbian.org/sfl/MCL/sf/mw/messagingmw +https://developer.symbian.org/sfl/MCL/sf/mw/metadatasrv +https://developer.symbian.org/sfl/MCL/sf/mw/mmappfw +https://developer.symbian.org/sfl/MCL/sf/mw/mmmw +https://developer.symbian.org/sfl/MCL/sf/mw/mmuifw +https://developer.symbian.org/sfl/MCL/sf/mw/netprotocols +https://developer.symbian.org/sfl/MCL/sf/mw/networkingdm +https://developer.symbian.org/sfl/MCL/sf/mw/opensrv +https://developer.symbian.org/sfl/MCL/sf/mw/phonesrv +https://developer.symbian.org/sfl/MCL/sf/mw/platformtools +https://developer.symbian.org/sfl/MCL/sf/mw/remoteconn +https://developer.symbian.org/sfl/MCL/sf/mw/remotemgmt +https://developer.symbian.org/sfl/MCL/sf/mw/remotestorage +https://developer.symbian.org/sfl/MCL/sf/mw/securitysrv +https://developer.symbian.org/sfl/MCL/sf/mw/shortlinkconn +https://developer.symbian.org/sfl/MCL/sf/mw/srvdiscovery +https://developer.symbian.org/sfl/MCL/sf/mw/svgt +https://developer.symbian.org/sfl/MCL/sf/mw/uiaccelerator +https://developer.symbian.org/sfl/MCL/sf/mw/uiresources +https://developer.symbian.org/sfl/MCL/sf/mw/uitools +https://developer.symbian.org/sfl/MCL/sf/mw/usbservices +https://developer.symbian.org/sfl/MCL/sf/mw/videoutils +https://developer.symbian.org/sfl/MCL/sf/mw/vpnclient +https://developer.symbian.org/sfl/MCL/sf/mw/websrv +https://developer.symbian.org/sfl/MCL/sf/mw/wirelessacc +https://developer.symbian.org/sfl/MCL/sf/os/boardsupport +https://developer.symbian.org/sfl/MCL/sf/os/bt +https://developer.symbian.org/sfl/MCL/sf/os/buildtools +https://developer.symbian.org/sfl/MCL/sf/os/cellularsrv +https://developer.symbian.org/sfl/MCL/sf/os/commsfw +https://developer.symbian.org/sfl/MCL/sf/os/deviceplatformrelease +https://developer.symbian.org/sfl/MCL/sf/os/devicesrv +https://developer.symbian.org/sfl/MCL/sf/os/graphics +https://developer.symbian.org/sfl/MCL/sf/os/imagingext +https://developer.symbian.org/sfl/MCL/sf/os/kernelhwsrv +https://developer.symbian.org/sfl/MCL/sf/os/lbs +https://developer.symbian.org/sfl/MCL/sf/os/mm +https://developer.symbian.org/sfl/MCL/sf/os/networkingsrv +https://developer.symbian.org/sfl/MCL/sf/os/osrndtools +https://developer.symbian.org/sfl/MCL/sf/os/ossrv +https://developer.symbian.org/sfl/MCL/sf/os/persistentdata +https://developer.symbian.org/sfl/MCL/sf/os/security +https://developer.symbian.org/sfl/MCL/sf/os/textandloc +https://developer.symbian.org/sfl/MCL/sf/os/usb +https://developer.symbian.org/sfl/MCL/sf/os/wlan +https://developer.symbian.org/sfl/MCL/sf/os/xmlsrv +https://developer.symbian.org/sfl/MCL/sf/ostools/osrndtools +https://developer.symbian.org/sfl/MCL/sf/tools/build_s60 +https://developer.symbian.org/sfl/MCL/sf/tools/buildplatforms +https://developer.symbian.org/sfl/MCL/sf/tools/homescreentools +https://developer.symbian.org/sfl/MCL/sf/tools/makefile_templates +https://developer.symbian.org/sfl/MCL/sf/tools/platformtools +https://developer.symbian.org/sfl/MCL/sf/tools/rndtools +https://developer.symbian.org/sfl/MCL/sf/tools/swconfigtools diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sftools_oss_fcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sftools_oss_fcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,17 @@ +http://developer.symbian.org/oss/FCL/sftools/ana/compatanaapps +http://developer.symbian.org/oss/FCL/sftools/ana/compatanamdw +http://developer.symbian.org/oss/FCL/sftools/ana/staticanaapps +http://developer.symbian.org/oss/FCL/sftools/ana/staticanamdw +http://developer.symbian.org/oss/FCL/sftools/depl/docscontent +http://developer.symbian.org/oss/FCL/sftools/depl/doctools +http://developer.symbian.org/oss/FCL/sftools/dev/build +http://developer.symbian.org/oss/FCL/sftools/dev/eclipseenv/buildlayout34 +http://developer.symbian.org/oss/FCL/sftools/dev/eclipseenv/eclipse +http://developer.symbian.org/oss/FCL/sftools/dev/eclipseenv/wrttools/ +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/compilationtoolchains +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/cpptoolsplat +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/dist +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/javatoolsplat +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/makeng +http://developer.symbian.org/oss/FCL/sftools/dev/hostenv/pythontoolsplat +http://developer.symbian.org/oss/FCL/sftools/dev/ide/carbidecpp diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sftools_oss_mcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sftools_oss_mcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,19 @@ +http://developer.symbian.org/oss/MCL/sftools/ana/compatanaapps +http://developer.symbian.org/oss/MCL/sftools/ana/compatanamdw +http://developer.symbian.org/oss/MCL/sftools/ana/staticanaapps +http://developer.symbian.org/oss/MCL/sftools/ana/staticanamdw +http://developer.symbian.org/oss/MCL/sftools/depl/docscontent +http://developer.symbian.org/oss/MCL/sftools/depl/doctools +http://developer.symbian.org/oss/MCL/sftools/dev/build +http://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/buildlayout34 +http://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/buildlayout35 +http://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/eclipse +http://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/wrttools +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/compilationtoolchains +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/cpptoolsplat +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/dist +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/javatoolsplat +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/makeng +http://developer.symbian.org/oss/MCL/sftools/dev/hostenv/pythontoolsplat +http://developer.symbian.org/oss/MCL/sftools/dev/ide/carbidecpp +http://developer.symbian.org/oss/MCL/sftools/dev/ide/carbidecppplugins diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sftools_sfl_fcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sftools_sfl_fcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,22 @@ +https://developer.symbian.org/sfl/FCL/sftools/ana/compatanaapps +https://developer.symbian.org/sfl/FCL/sftools/ana/compatanamdw +https://developer.symbian.org/sfl/FCL/sftools/ana/dynaanaapps +https://developer.symbian.org/sfl/FCL/sftools/ana/dynaanactrlandcptr +https://developer.symbian.org/sfl/FCL/sftools/ana/dynaanamdw/analysistools +https://developer.symbian.org/sfl/FCL/sftools/ana/dynaanamdw/crashmdw +https://developer.symbian.org/sfl/FCL/sftools/ana/staticanaapps +https://developer.symbian.org/sfl/FCL/sftools/ana/staticanamdw +https://developer.symbian.org/sfl/FCL/sftools/ana/testcreationandmgmt +https://developer.symbian.org/sfl/FCL/sftools/ana/testexec +https://developer.symbian.org/sfl/FCL/sftools/ana/testfw +https://developer.symbian.org/sfl/FCL/sftools/depl/sdkcreationmdw/packaging +https://developer.symbian.org/sfl/FCL/sftools/depl/swconfigapps/configtools +https://developer.symbian.org/sfl/FCL/sftools/depl/swconfigapps/swmgnttoolsguides +https://developer.symbian.org/sfl/FCL/sftools/depl/swconfigapps/sysmodeltools +https://developer.symbian.org/sfl/FCL/sftools/depl/swconfigmdw +https://developer.symbian.org/sfl/FCL/sftools/dev/build +https://developer.symbian.org/sfl/FCL/sftools/dev/dbgsrvsmdw +https://developer.symbian.org/sfl/FCL/sftools/dev/devicedbgsrvs +https://developer.symbian.org/sfl/FCL/sftools/dev/ide/carbidecppplugins +https://developer.symbian.org/sfl/FCL/sftools/dev/iss +https://developer.symbian.org/sfl/FCL/sftools/dev/ui diff -r 842a773e65f2 -r f6ae410bd493 clone_packages/sftools_sfl_mcl_packages.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/clone_packages/sftools_sfl_mcl_packages.txt Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,23 @@ +https://developer.symbian.org/sfl/MCL/sftools/ana/compatanaapps +https://developer.symbian.org/sfl/MCL/sftools/ana/compatanamdw +https://developer.symbian.org/sfl/MCL/sftools/ana/dynaanaapps +https://developer.symbian.org/sfl/MCL/sftools/ana/dynaanactrlandcptr +https://developer.symbian.org/sfl/MCL/sftools/ana/dynaanamdw/analysistools +https://developer.symbian.org/sfl/MCL/sftools/ana/dynaanamdw/crashmdw +https://developer.symbian.org/sfl/MCL/sftools/ana/staticanaapps +https://developer.symbian.org/sfl/MCL/sftools/ana/staticanamdw +https://developer.symbian.org/sfl/MCL/sftools/ana/testcreationandmgmt +https://developer.symbian.org/sfl/MCL/sftools/ana/testexec +https://developer.symbian.org/sfl/MCL/sftools/ana/testfw +https://developer.symbian.org/sfl/MCL/sftools/depl/sdkcreationmdw/packaging +https://developer.symbian.org/sfl/MCL/sftools/depl/swconfigapps/configtools +https://developer.symbian.org/sfl/MCL/sftools/depl/swconfigapps/swmgnttoolsguides +https://developer.symbian.org/sfl/MCL/sftools/depl/swconfigapps/sysmodeltools +https://developer.symbian.org/sfl/MCL/sftools/depl/swconfigmdw +https://developer.symbian.org/sfl/MCL/sftools/dev/build +https://developer.symbian.org/sfl/MCL/sftools/dev/dbgsrvsmdw +https://developer.symbian.org/sfl/MCL/sftools/dev/devicedbgsrvs +https://developer.symbian.org/sfl/MCL/sftools/dev/ide/carbidecppplugins +https://developer.symbian.org/sfl/MCL/sftools/dev/iss +https://developer.symbian.org/sfl/MCL/sftools/dev/ui + diff -r 842a773e65f2 -r f6ae410bd493 code_churn/churn_core.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/code_churn/churn_core.pl Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,568 @@ +#!perl -w + +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: +# + +use strict; +use File::Find; +use File::Copy; +use Cwd; + +sub diffstat(); + +my $Logs_Dir = $ARGV[0]; +my $dir_left = $ARGV[1]; +my $dir_right = $ARGV[2]; +my $dir_tmp_left = $ARGV[0].'\\'.$ARGV[1]; +my $dir_tmp_right = $ARGV[0].'\\'.$ARGV[2]; + +print "left changeset $dir_left\n"; +print "right chnageset $dir_right\n"; +mkdir $dir_tmp_left; +mkdir $dir_tmp_right; + +# default inclusions from churn.pl are "*.cpp", "*.c", "*.cxx", "*.h", "*.hpp", "*.inl" +my @file_pattern=('\.cpp$','\.c$','\.hpp$','\.h$','\.inl$','\.cxx$','\.hrh$'); +my $totallinecount=0; +my $countcomments=0; + +if (! -d $Logs_Dir) +{ + die("$Logs_Dir does not exist \n"); +} + +#$dir_left =~ m/^(\w+)\.[0-9a-fA-F]+/; +$dir_right =~ m/^(\w+)\.[0-9a-fA-F]+/; +my $package_name = $1; + +$dir_left =~ m/^\w+\.([0-9a-fA-F]+)/; +my $changeset_left = $1; + +$dir_right =~ m/^\w+\.([0-9a-fA-F]+)/; +my $changeset_right = $1; + +print "\nWorking on package: $package_name\n"; +print "\nProcessing $dir_left\n"; +find(\&process_files, $dir_left); +#DEBUG INFO: +print "\nTotal linecount for changed files in $dir_left is $totallinecount\n"; +my $code_size_left = $totallinecount; + +$totallinecount=0; +print "\nProcessing $dir_right\n"; +find(\&process_files, $dir_right); +#DEBUG INFO: +print "\nTotal linecount for changed files in $dir_right is $totallinecount\n"; +my $code_size_right = $totallinecount; + +my @diffs; + +if (-d $dir_tmp_left && -d $dir_tmp_left) +{ + @diffs = `diff -r -N $dir_tmp_left $dir_tmp_right`; +} + +my $changed_lines=@diffs; +my $diffsfile = $Logs_Dir.'\\'."dirdiffs.out"; +open (DIFFS, ">$diffsfile"); +print DIFFS @diffs; +close (DIFFS); + +diffstat(); + +$dir_tmp_left =~ s{/}{\\}g; +$dir_tmp_right =~ s{/}{\\}g; + +if (-d $dir_tmp_left) +{ + system("rmdir /S /Q $dir_tmp_left"); +} + +if (-d $dir_tmp_right) +{ +system("rmdir /S /Q $dir_tmp_right"); +} + +unlink $diffsfile; +unlink "$Logs_Dir\\line_count_newdir.txt"; + +print "\n** Finished processing $package_name **\n\n\n\n\n"; + +exit(0); + +sub diffstat() +{ +open (DIFFSFILE,"$diffsfile"); + +my $curfile = ""; +my %changes = (); + +while () +{ + my $line = $_; + # diff -r -N D:/mirror\fbf_churn_output\commsfw.000000000000\serialserver\c32serialserver\Test\te_C32Performance\USB PC Side Code\resource.h + # diff -r + if ($line =~ m/^diff -r.*\.[A-Fa-f0-9]{12}[\/\\](.*)\s*$/) + { + $curfile = $1; + #DEBUG INFO: + #print "\t$curfile\n"; + if (!defined $changes{$curfile}) + { + $changes{$curfile} = {'a'=>0,'c'=>0,'d'=>0,'filetype'=>'unknown'}; + } + + $curfile =~ m/\.(\w+)$/g; + + #if filetype known... + my $filetype = $+; + + $changes{$curfile}->{'filetype'}=uc($filetype); + } + elsif ($line =~ m/^(\d+)(,(\d+))?(d)\d+(,\d+)?/) + { + if (defined $3) + { + $changes{$curfile}->{$4} += ($3-$1)+1; + } + else + { + $changes{$curfile}->{$4}++; + } + } + elsif ($line =~ m/^\d+(,\d+)?([ac])(\d+)(,(\d+))?/) + { + if (defined $5) + { + $changes{$curfile}->{$2} += ($5-$3)+1; + } + else + { + $changes{$curfile}->{$2}++; + } + } +} + +close (DIFFSFILE); + +my %package_changes = ("CPP"=>0, "H"=>0, "HPP"=>0, "INL"=>0, "C"=>0, "CXX"=>0,"HRH"=>0,); +my %package_deletions = ("CPP"=>0, "H"=>0, "HPP"=>0, "INL"=>0, "C"=>0, "CXX"=>0,"HRH"=>0,); +my %package_additions = ("CPP"=>0, "H"=>0, "HPP"=>0, "INL"=>0, "C"=>0, "CXX"=>0,"HRH"=>0,); +my $package_churn = 0; + +for my $file (keys %changes) +{ + $package_changes{$changes{$file}->{'filetype'}} += $changes{$file}->{'c'}; + $package_deletions{$changes{$file}->{'filetype'}} += $changes{$file}->{'d'}; + $package_additions{$changes{$file}->{'filetype'}} += $changes{$file}->{'a'}; +} + + +#DEBUG INFO: For printing contents of hashes containing per filetype summary +#print "\n\n\n\n"; +#print "package_changes:\n"; +#print map { "$_ => $package_changes{$_}\n" } keys %package_changes; +#print "\n\n\n\n"; +#print "package_deletions:\n"; +#print map { "$_ => $package_deletions{$_}\n" } keys %package_deletions; +#print "\n\n\n\n"; +#print "package_additions:\n"; +#print map { "$_ => $package_additions{$_}\n" } keys %package_additions; + + + +my $overall_changes = 0; +for my $filetype (keys %package_changes) +{ + $overall_changes += $package_changes{$filetype}; +} + +my $overall_deletions = 0; +for my $filetype (keys %package_deletions) +{ + $overall_deletions += $package_deletions{$filetype}; +} + +my $overall_additions = 0; +for my $filetype (keys %package_additions) +{ + $overall_additions += $package_additions{$filetype}; +} + + +$package_churn = $overall_changes + $overall_additions; + +print "\n\n\n\nSummary for Package: $package_name\n"; +print "-------------------\n"; +print "Changesets Compared: $dir_left and $dir_right\n"; +#print "Code Size for $dir_left = $code_size_left lines\n"; +#print "Code Size for $dir_right = $code_size_right lines\n"; +print "Total Lines Changed = $overall_changes\n"; +print "Total Lines Added = $overall_additions\n"; +print "Total Lines Deleted = $overall_deletions\n"; +print "Package Churn = $package_churn lines\n"; + +my @header = qw(filetype a c d); + +my $outputfile = $Logs_Dir.'\\'."$package_name\_diffstat.csv"; +open(PKGSTATCSV, ">$outputfile") or die "Coudln't open $outputfile"; + + + +print PKGSTATCSV " SF CODE-CHURN SUMMARY\n"; +print PKGSTATCSV "Package: $package_name\n"; +print PKGSTATCSV "Changesets Compared: $dir_left and $dir_right\n"; +#print PKGSTATCSV "Code Size for $dir_left = $code_size_left lines\n"; +#print PKGSTATCSV "Code Size for $dir_right = $code_size_right lines\n"; +print PKGSTATCSV "Total Lines Changed = $overall_changes\n"; +print PKGSTATCSV "Total Lines Added = $overall_additions\n"; +print PKGSTATCSV "Total Lines Deleted = $overall_deletions\n"; +print PKGSTATCSV "Package Churn = $package_churn lines\n\n\n\n\n"; + + + + +# print the header +print PKGSTATCSV "FILENAME,"; + +foreach my $name (@header) +{ + if ($name eq 'filetype') + { + print PKGSTATCSV uc($name).","; + } + elsif ($name eq 'a') + { + print PKGSTATCSV "LINES_ADDED,"; + } + elsif ($name eq 'c') + { + print PKGSTATCSV "LINES_CHANGED,"; + } + elsif ($name eq 'd') + { + print PKGSTATCSV "LINES_DELETED,"; + } + +} + +print PKGSTATCSV "\n"; + +foreach my $file (sort keys %changes) +{ + print PKGSTATCSV $file.","; + foreach my $key (@header) + { + if(defined $changes{$file}->{$key}) + { + print PKGSTATCSV $changes{$file}->{$key}; + } + print PKGSTATCSV ","; + } + print PKGSTATCSV "\n"; +} + +close (PKGSTATCSV); + + + +my $diffstat_summary = $Logs_Dir.'\\'."diffstat_summary.csv"; + +if (-e $diffstat_summary) +{ + open(DIFFSTATCSV, ">>$diffstat_summary") or die "Coudln't open $outputfile"; + print DIFFSTATCSV "$package_name,"; + print DIFFSTATCSV "$changeset_left,"; + print DIFFSTATCSV "$changeset_right,"; + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_changes) + { + if(defined $package_changes{$filetype}) + { + print DIFFSTATCSV $package_changes{$filetype}.","; + } + } + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_additions) + { + if(defined $package_additions{$filetype}) + { + print DIFFSTATCSV $package_additions{$filetype}.","; + + } + } + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_deletions) + { + if(defined $package_deletions{$filetype}) + { + print DIFFSTATCSV $package_deletions{$filetype}.","; + #print DIFFSTATCSV ","; + } + } + + #print DIFFSTATCSV ","; + print DIFFSTATCSV "$overall_changes,"; + print DIFFSTATCSV "$overall_additions,"; + print DIFFSTATCSV "$overall_deletions,"; + print DIFFSTATCSV "$package_churn,"; + + print DIFFSTATCSV "\n"; + + close (DIFFSTATCSV); +} +else +{ + open(DIFFSTATCSV, ">$diffstat_summary") or die "Couldn't open $outputfile"; + + # print the header + print DIFFSTATCSV "PACKAGE_NAME,"; + print DIFFSTATCSV "LEFT_CHANGESET,"; + print DIFFSTATCSV "RIGHT_CHANGESET,"; + + #print DIFFSTATCSV ","; + + foreach my $name (sort keys %package_changes) + { + print DIFFSTATCSV $name." CHANGES,"; + } + #print DIFFSTATCSV ","; + + + foreach my $name (sort keys %package_additions) + { + print DIFFSTATCSV $name." ADDITIONS,"; + } + #print DIFFSTATCSV ","; + + + foreach my $name (sort keys %package_deletions) + { + print DIFFSTATCSV $name." DELETIONS,"; + } + #print DIFFSTATCSV ","; + + print DIFFSTATCSV "PACKAGE_CHANGES,"; + print DIFFSTATCSV "PACKAGE_ADDITIONS,"; + print DIFFSTATCSV "PACKAGE_DELETIONS,"; + print DIFFSTATCSV "PACKAGE_CHURN,"; + print DIFFSTATCSV "\n"; + + + print DIFFSTATCSV "$package_name,"; + + print DIFFSTATCSV "$changeset_left,"; + print DIFFSTATCSV "$changeset_right,"; + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_changes) + { + if(defined $package_changes{$filetype}) + { + print DIFFSTATCSV $package_changes{$filetype}.","; + } + } + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_additions) + { + if(defined $package_additions{$filetype}) + { + print DIFFSTATCSV $package_additions{$filetype}.","; + + } + } + + #print DIFFSTATCSV ","; + + foreach my $filetype (sort keys %package_deletions) + { + if(defined $package_deletions{$filetype}) + { + print DIFFSTATCSV $package_deletions{$filetype}.","; + } + } + + #print DIFFSTATCSV ","; + print DIFFSTATCSV "$overall_changes,"; + print DIFFSTATCSV "$overall_additions,"; + print DIFFSTATCSV "$overall_deletions,"; + print DIFFSTATCSV "$package_churn,"; + + print DIFFSTATCSV "\n"; + + close (DIFFSTATCSV); +} + + + +} + +sub process_files() +{ + my $lfile = $_; + my $lfile_fullpath=$File::Find::name; + $lfile_fullpath =~ s#\/#\\#g; + #print "$lfile\t\tFull path $lfile_fullpath\n" ; + if (-f $lfile) + { + foreach my $regpat (@file_pattern) + { + if (lc($lfile) =~ m/$regpat/) + { + $lfile =~ s#\/#\\#g; + #print "Processing file $lfile (Matched $regpat) \n"; #ck + #print `type $lfile`; + # We copy mathching files to a separate temp directory + # so that the final diff can simply diff the full dir + # Note : RemoveNoneLOC routine edits the file in-situ. + my $lfile_abs = cwd().'\\'.$lfile; + my $lfile_local = $Logs_Dir.'\\'.$lfile_fullpath; + makepath($lfile_local); + print "%"; + copy($lfile_abs,$lfile_local); + $totallinecount += RemoveNonLOC( $lfile, $lfile_local, "newdir" ); + } + } + } +} + + +sub makepath() +{ + my $absfile = shift; + $absfile =~ s#\\#\/#g; + my @dirs = split /\//, $absfile; + pop @dirs; # throw away the filename + my $path = ""; + foreach my $dir (@dirs) + { + $path = ($path eq "") ? $dir : "$path/$dir"; + if (!-d $path) + { +# print "making $path \n"; + mkdir $path; + } + } +} + + +sub RemoveNonLOC($$$) { + + # Gather arguments + my $file = shift; + my $original_file = shift; + my $type_of_dir = shift; + +# print("\nDebug: in ProcessFile, file is $file, full file + path is $original_file \n"); + + # Remove comments... + + # Set up the temporary files that will be used to perform the processing steps + my $temp1File = $original_file."temp1"; + my $temp2File = $original_file."temp2"; + + open(TEMP1, "+>$temp1File"); + + if (!($countcomments)) { + + # Remove any comments from the file + my $original_file_string; + open INPUT, "<", $original_file; + { + local $/ = undef; + $original_file_string = ; + } + close INPUT; + + my $dbl = qr/"[^"\\]*(?:\\.[^"\\]*)*"/s; + my $sgl = qr/'[^'\\]*(?:\\.[^'\\]*)*'/s; + + my $C = qr{/\*.*?\*/}s; # C style comments /* */ + my $CPP = qr{//.*}; # C+ style comments // + my $com = qr{$C|$CPP}; + my $other = qr{.[^/"'\\]*}s; # all other '" + my $keep = qr{$sgl|$dbl|$other}; + + #Remove the comments (need to turn off warnings on the next regexp for unititialised variable) +no warnings 'uninitialized'; + + $original_file_string=~ s/$com|($keep)/$1/gom; + print TEMP1 "$original_file_string"; + +use warnings 'uninitialized'; + } + else { + + print("\n option --CountComments specified so comments will be included in the count\n"); + #Just copy over original with comments still in it + copy($original_file,$temp1File); + } + + close(TEMP1); + + + # Remove blank lines... +# print("\nDebug: Getting rid of blank lines in \n$temp1File to produce \n$temp2File \n"); + open (TEMP1, "+<$temp1File"); # include lines + pre-processed code + open (TEMP2, "+>$temp2File"); + + while () { + + if (!(/^\s*\n$/)) { # if line isn't blank write it to the new file + print TEMP2 $_; + } + } + close(TEMP1); + close(TEMP2); + + #Copy the final file to the original file. This updated file will form the input to diff later. + #todo dont need chmod now? + chmod(oct("0777"), $original_file) or warn "\nCannot chmod $original_file : $!\n"; +# print("\nCopying $temp2File\n to \n$original_file\n"); + + #system("copy /Y \"$temp2File\" \"$original_file\"") == 0 + #or print "\nERROR: Copy of $temp2File to $original_file failed\n"; + copy($temp2File,$original_file); + + # Store original file size + + open(LINECOUNT, ">>$Logs_Dir\\line_count_$type_of_dir.txt"); + open(SOURCEFILE, "<$original_file"); + + my @source_code = ; + print LINECOUNT "\n$original_file "; + my $linecount = scalar(@source_code); +# print LINECOUNT scalar(@source_code); + print LINECOUNT $linecount; + + close(LINECOUNT); + close(SOURCEFILE); + + #system("del /F /Q $Logs_Dir\\line_count_$type_of_dir.txt"); + + #Delete the temporary files + unlink($temp1File); + unlink($temp2File); + + return $linecount; +} \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 code_churn/fbf_churn.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/code_churn/fbf_churn.pl Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,158 @@ +#! perl -w + +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# +# Description: +# + +use strict; +use Getopt::Long; + +use FindBin; +#my $churn_core = "D:\\mirror\\churn_core.pl"; +my $churn_core = "$FindBin::Bin\\churn_core.pl"; +my $churn_output_temp = "$FindBin::Bin\\fbf_churn_output"; +mkdir $churn_output_temp; + +my $path = $FindBin::Bin; +$path =~ s/\//\\/g; +my $clone_packages = "$path\\..\\clone_packages\\clone_all_packages.pl"; + + +sub Usage($) + { + my ($msg) = @_; + + print "$msg\n\n" if ($msg ne ""); + + print <<'EOF'; + + +fbf_churn.pl - simple script for calculating code churn in between two revisions +or labels for a package. This script can also be used to calculate code size for +a package. + +When used without a package name or filter, this script runs for all the packages +in the BOM (build-info.xml) file supplied to it. + +Important: + This script uses clone_all_packages.pl which clones all repositories listed in + the BOM or pull changes into a previously cloned repository. + + This script uses its accompayning script churn_core.pl - which should be + present in the same directory as this script. + +Limitations: + If a BOM is not supplied to the script using the -bom option, then the script + runs on the package locations inside both MCL and FCL producing two results + for a single package. For running the script for calculating code churn between + two release buils (using labels) or for calculating code size for a release build, + it is essential that a BOM (preferably for the newer build) is passed as an + argument using the -bom option. + + +Options: + +-o --old old revision or label for a package/respoitory + +-n --new new revision or label for a package/respoitory + +--rev revision for package/respoitory - Use this while calculating code size for a single package + +--label revision tag for package or release build - Use this while calculating code size + +-bom --bom build-info.xml files supplied with Symbian PDKs + +-verbose print the underlying "clone_all_packages" & "hg" commands before executing them + +-help print this help information + +-package only process repositories matching regular expression + +-filter only process repositories matching regular expression + +EOF + exit (1); + } + +print "\n\n==Symbian Foundation Code Churn Tool v1.0==\n\n"; + + + +my $old = "null"; +my $new = ""; +my $filter = ""; +my $codeline = ""; +my $package = ""; +my $licence = ""; +my $packagelist = ""; +my $verbose = 0; +my $mirror = 0; +my $help = 0; + +sub do_system + { + my (@args) = @_; + print "* ", join(" ", @args), "\n" if ($verbose); + return system(@args); + } + +# Analyse the command-line parameters +if (!GetOptions( + "n|new-rev|new-label|label|rev=s" => \$new, + "o|old-rev|old-label=s" => \$old, + "f|filter=s" => \$filter, + "p|package=s" => \$filter, + "cl|codeline=s" => \$codeline, + "li|licence=s" => \$licence, + "bom|bom=s" => \$packagelist, + "v|verbose" => \$verbose, + "h|help" => \$help, + )) + { + Usage("Invalid argument"); + } + +Usage("") if ($help); +Usage("Too few arguments....use at least one from -n|new-rev|new-label|label|rev or -bom") if ($new eq "" && $packagelist eq ""); +#Usage("Too many arguments") if ($new ne "" && $packagelist ne ""); + + +if ($old eq 'null') + { + print "\nCode size calculation....\n"; + } +else + { + print "\nCode churn calculation....\n"; + } + + +my @packagelistopts = (); +@packagelistopts = ("-packagelist", $packagelist) if ($packagelist ne ""); + +my @verboseopt = (); +@verboseopt = "-v" if ($verbose); + +my @mirroropt = (); +@mirroropt = "-mirror" if ($mirror); + +my $new_rev = $new; +$new_rev = "%REV%" if ($new_rev eq ""); + +#TO_DO: Locate clone_all_packages relative to the location of this script. +#TO_DO: Remove references to absolute paths, change to relative paths. +do_system($clone_packages,@verboseopt,@mirroropt,"-filter","$licence.*$codeline.*$filter",@packagelistopts,"-exec","--", + "hg","--config","\"extensions.hgext.extdiff=\"","extdiff","-p",$churn_core,"-o",$churn_output_temp, + "-r","$old","-r","$new_rev"); + +exit(0); diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,46 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR - the root DBR script that farms out the jobs to the other scripts + +import sys +import os.path + +def main(): + print 'MattD: Need to fix the import path properly!' + dbrpath = os.path.join(os.path.dirname(sys.argv[0]),'dbr') + sys.path.append(dbrpath) + args = sys.argv + if(len(sys.argv)>1): + cmd = sys.argv[1] + args.pop(0) + args.pop(0) + + if(cmd): + try: + command = __import__ (cmd) + command.run(args) + except ImportError: + help(args) + else: + help(args) + +def help(args): + try: + command = __import__ ('help') + command.run(args) + except ImportError: + print "error: Cannot find DBR tools help in %s" % dbrpath + +main() + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/checkenv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/checkenv.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,45 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR checkenv - Checks your environment against what was installed + +import dbrbaseline +import dbrpatch +import dbrutils + +import os.path + +def main(): + dbfilename = dbrutils.defaultdb() + + baseline = dbrbaseline.readdb(dbfilename) + if(len(baseline ) > 0): + patches = dbrpatch.loadpatches(dbrpatch.dbrutils.patchpath()) + db = dbrpatch.createpatchedbaseline(baseline,patches) + env = dbrutils.scanenv() + dbrpatch.newupdatedb(db,env) + baseline = dbrpatch.updatebaseline(baseline, db) + patches = dbrpatch.updatepatches(patches, db) + + dbrpatch.savepatches(patches) + else: + baseline = dbrbaseline.createdb() + dbrbaseline.writedb(baseline,dbfilename) + + +def run(args): + main() + +def help(): + print "Shows the current state of the environment" + print "Usage\n\tdbr checkenv" \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/cleanenv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/cleanenv.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,59 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR cleanenv - cleans your environment + +import dbrbaseline +import dbrpatch +import dbrutils + +import re #temporary for dealing with patches + +def main(args): + zippath = '/' + if(len(args)): + zippath = args[0] + + dbfilename = dbrutils.defaultdb() + baseline = dbrbaseline.readdb(dbfilename) + if(len(baseline ) > 0): + env = dbrutils.scanenv() + patches = dbrpatch.loadpatches(dbrpatch.dbrutils.patchpath()) + db = dbrpatch.createpatchedbaseline(baseline,patches) + results = dbrpatch.newupdatedb(db,env) + dbrutils.deletefiles(sorted(results['added'])) + required = set() + required.update(results['removed']) + required.update(results['changed']) + required.update(results['untestable']) #untestable is going to be a problem... + dbrutils.extractfiles(required, zippath) + for name in sorted(patches): + dbrutils.extractfromzip(required, re.sub('.txt','.zip',name),'') + + env = dbrutils.scanenv() + results2 = dbrpatch.newupdatedb(db,env) + + baseline = dbrpatch.updatebaseline(baseline, db) + patches = dbrpatch.updatepatches(patches, db) + + dbrpatch.savepatches(patches) + + +def run(args): + main(args) + +def help(): + print "Cleans the current environment" + print "Usage\n\tdbr cleanenv ()" + print "\nDefault behaviour presumes baselie zips exist at the root" + \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/createpatch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/createpatch.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,45 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR createpatch - Creates a patch of the changes made to a patched baseline + +import sys +import dbrbaseline +import dbrpatch +import dbrutils + +def run(args): + if(len(args)): + dbfilename = dbrutils.defaultdb() + patchname = args[0] + if(patchname): + print 'Creating Patch:%s\n' % patchname + baseline = dbrbaseline.readdb(dbfilename) + if(len(baseline) > 0): + patches = dbrpatch.loadpatches(dbrpatch.dbrutils.patchpath()) + db = dbrpatch.createpatchedbaseline(baseline,patches) + env = dbrutils.scanenv() + db = dbrpatch.newcreatepatch(patchname,db,env) + baseline = dbrpatch.updatebaseline(baseline, db) + patches = dbrpatch.updatepatches(patches, db) + dbrpatch.savepatches(patches) + dbrbaseline.writedb(baseline,dbfilename) + else: + help() + else: + help() + +def help(): + print 'usage: Createpatch ' + + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/dbrarchive.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/dbrarchive.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,55 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR archive - handles archives - not used at present + +import dbrutils +import re + +def readarchives(dbfile): + db = dict() + if(isfile(dbfile)): + file = open(dbfile,'r') + for line in file: + #file structure 'name:zip + results = re.split(',|\n',line) + db[results[0]] = results[1] + file.close() + return db + +def writearchives(db, dbfile): + file = open(dbfile,'w') + for archive in sorted(db): + str = "%s,%s\n" % (archive, db[archive]) + file.write(str) + file.close() + +def archivefile(): + return '/epoc32/relinfo/archive.txt' + +def extract(archive,files): + + db = readarchives(archivefile()) + if(archive is in db): + dbrutils.unzipfiles(db[archive],files) + elsif(re.search('baseline' archive)): #Nasty + for zip in sorted(db): + if(re.search('baseline' zip): + dbrutils.unzipfiles(db[zip],files) + +def install(zip): #nasty at the moment... +# archives = readarchives(archivefile()) + unzip(zip) + + + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/dbrbaseline.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/dbrbaseline.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,156 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBRbaseline - module for handling vanilla baselines +# + + +import re +import os +import string +import glob +import tempfile +import shutil +from os.path import join, isfile, stat +from stat import * +import dbrutils + + + +def readdb(dbfile): + db = dict() + if(isfile(dbfile)): + file = open(dbfile,'r') +# regex = re.compile('(\S+)\s+(\S+)\s+(\S+)\s+(.+)\n') + for line in file: + #file structure 'timestamp size hash filename' avoids the problems of spaces in names, etc... + results = re.split(':|\n',line) + if(len(results) > 3): + entry = dict() + entry['time'] = results[0] + entry['size'] = results[1] + entry['md5'] = results[2] + if(results[4]): + entry['archive'] = results[4] + print entry['archive'] + db[results[3]] = entry +# db[results[3]] = [results[0],results[1],results[2]] +# bits = regex.match(line) +# if(bits): +# db[bits.group(3)] = [bits.group(0), bits.group(1), bits.group(2)] + file.close() + return db + +def writedb(db, dbfile): +# print 'Writing db to', dbfile + file = open(dbfile,'w') + for filename in sorted(db): + if (len(db[filename]) < 3): + db[filename].append('') + str = "%s:%s:%s:%s" %( db[filename]['time'],db[filename]['size'],db[filename]['md5'], filename) + if('archive' in db[filename]): + str = "%s:%s" %(str,db[filename]['archive']) +# if(db[filename]['md5'] == 'xxx'): +# print 'Warning: no MD5 for %s' % filename +# str = "%s:%s:%s:%s\n" %( db[filename][0],db[filename][1],db[filename][2], filename) + file.write('%s\n' % str) + file.close() + +def md5test(db, md5testset): + changed = set() + md5s = dbrutils.generateMD5s(md5testset) + for file in md5testset: + if(db[file]['md5'] != md5s[file]['md5']): + changed.add(file) + return changed + + +def updatedb(db1, db2): + compareupdatedb(db1, db2, 1) + +def comparedb(db1, db2): + compareupdatedb(db1, db2, 0) + +def compareupdatedb(db1, db2, update): + print "compareupdatedb() is deprecated" + db1files = set(db1.keys()) + db2files = set(db2.keys()) + removed = db1files - db2files + added = db2files - db1files + common = db1files & db2files + + touched = set() + for file in common: + if(db1[file]['time'] != db2[file]['time']): + touched.add(file) + + sizechanged = set() + for file in common: + if(db1[file]['size'] != db2[file]['size']): + sizechanged.add(file) + + #pobably won't bother with size changed... we know they're different... +# md5testset = touched - sizechanged + md5testset = touched + + changed = md5test(db1,md5testset) + + #remove the ones we know are changed + touched = touched - changed + + print 'Comparing dbs/n' + for file in sorted(added): + print 'added:', file + for file in sorted(removed): + print 'removed:', file + for file in sorted(touched): + print 'touched:', file + for file in sorted(changed): + print 'changed:', file + + #update the touched... + if(update): + for file in sorted(touched): + print 'Updating timestamp for: ',file + db1[file]['time'] = db2[file]['time'] + +def createdb(): + print 'creating db...Move CreateDB into dbrutils!!!' + env = dbrutils.scanenv() + hashes = glob.glob(os.path.join(dbrutils.patchpath(),'*.md5')) + for file in hashes: + print 'Reading: %s\n' % file + dbrutils.gethashes(env, file, False) + return env + + +def readzippeddb(drive): + env = dict() + #Note that this is really crude. I'm seeing if it'll work before cleaning things up... + #see if we have a build_md5.zip file + md5zip = os.path.join(drive,'build_md5.zip') + temp_dir = tempfile.mkdtemp() + print temp_dir + if(os.path.exists(md5zip)): + files = set(); + files.add('*') + dbrutils.extractfromzip(files,md5zip,temp_dir) + globsearch = os.path.join(temp_dir, os.path.join(dbrutils.patch_path_internal(),'*.md5')) + print globsearch + hashes = glob.glob(globsearch) + for file in hashes: + print 'Reading: %s\n' % file + dbrutils.gethashes(env, file, True) + shutil.rmtree(temp_dir) + return env + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/dbrpatch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/dbrpatch.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,333 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBRpatch - module for handling patched baselines + +import re +import os.path #used for 'listpatches' +import string +import glob +import dbrutils +import dbrbaseline + +def newcompare(db1, db2): + db1files = set(db1.keys()) + db2files = set(db2.keys()) + + removed = db1files - db2files + added = db2files - db1files + common = db1files & db2files + + touched = set() + for file in common: + if(db1[file]['time'] != db2[file]['time']): + touched.add(file) + + sizechanged = set() + for file in common: + if(db1[file]['size'] != db2[file]['size']): + sizechanged.add(file) + + changed = set() + + genmd5 = 1 #I probably want to try to generate... add this as a third arg??? + + if(len(touched)): + if(genmd5): + md5testset = set() + for file in touched: + if((db1[file]['md5'] != 'xxx' ) and (db2[file]['md5'] == 'xxx')): #no point geenrating an MD5 if we've nothing to compare it to... +# print 'testing %s' % file + md5testset.add(file) + md5s = dbrutils.generateMD5s(md5testset) + for file in md5testset: + db2[file]['md5'] = md5s[file]['md5'] + for file in touched: + if(db1[file]['md5'] != db2[file]['md5']): + changed.add(file) + touched = touched - changed + + untestable1 = set() + untestable2 = set() + for file in common: + if(db1[file]['md5'] == "xxx"): + untestable1.add(file) + if(db2[file]['md5'] == 'xxx'): + untestable2.add(file) + + untestable = untestable1 & untestable2 + changed = changed - untestable + + #remove the ones we know are changed + touched = touched - changed + touched = touched - untestable + + results = dict() + results['added'] = dict() + results['removed'] = dict() + results['touched'] = dict() + results['changed'] = dict() + results['untestable'] = dict() + + for file in added: + results['added'][file] = db2[file] + for file in removed: + results['removed'][file] = 0 + for file in touched: + results['touched'][file] = db2[file] + for file in changed: + results['changed'][file] = db2[file] + for file in untestable: + results['untestable'][file] = 0 + return results + +def printresults(results): + for file in sorted (results['added']): + print 'added:', file + for file in sorted (results['removed']): + print 'removed:', file + for file in sorted (results['touched']): + print 'touched:', file + for file in sorted (results['changed']): + print 'changed:', file + for file in sorted (results['untestable']): + print 'untestable:', file + if(len(results['added']) + len(results['removed']) + len(results['changed']) + len(results['untestable']) == 0): + print '\nStatus:\tclean' + else: + print '\nStatus:\tdirty' + +def newupdatedb(baseline,env): + results = newcompare(baseline, env) + printresults(results) + for file in results['touched']: + baseline[file]['time'] = env[file]['time'] + return results + +def newcreatepatch(name, db1, db2): + results = newcompare(db1, db2) + printresults(results) + for file in results['touched']: + db1[file]['time'] = db2[file]['time'] + + patch = dict(); + patch['name'] = name + patch['time'] = 'now!!!' + patch['removed'] = results['removed'] + added = results['added'].keys() + md5sAdded = dbrutils.generateMD5s(added) + for file in added: + results['added'][file]['md5'] = md5sAdded[file]['md5'] + patch['added'] = results['added'] + print "Need to add in the untestable stuff here also!!!" + patch['changed'] = results['changed'] + patchname = "%spatch_%s" %(dbrutils.patchpath(), name) + + createpatchzip(patch, patchname) + + #update the ownership + for file in patch['changed']: + db1[file]['name'] = name + + return db1 + +def newcomparepatcheddbs(drive1, drive2): + envdbroot = dbrutils.defaultdb() + print "MattD: should move this function to a better location..." + print 'Comparing %s with %s' % (drive2,drive1) + + db1 = loadpatcheddb(drive1) + db2 = loadpatcheddb(drive2) + + results = newcompare(db1, db2) + printresults(results) + +def loadpatcheddb(drive): + envdbroot = dbrutils.defaultdb() + print 'Loading %s' % drive + baseline = dbrbaseline.readdb('%s%s' %(drive,envdbroot)) + if(len(baseline) > 0): + patches = loadpatches('%s/%s' %(drive,dbrutils.patchpath())) + return createpatchedbaseline(baseline,patches) + else: + return dbrbaseline.readzippeddb(drive) + +def createpatchzip(patch, patchname): + patchtext = '%s.txt' % patchname + patchtext = os.path.join(dbrutils.patchpath(),patchtext) + + writepatch(patch, patchtext) + files = set() + files.update(patch['added']) + files.update(patch['changed']) + files.add(re.sub('\\\\','',patchtext)) #remove leading slash - Nasty - need to fix the whole EPOCROOT thing. + + zipname = '%s.zip' % patchname + dbrutils.createzip(files, zipname) + + +def updatebaseline(baseline, db): + for file in (db.keys()): + origin = db[file]['name'] + if(origin == 'baseline'): + if(baseline[file]['time'] != db[file]['time']): + baseline[file]['time'] = db[file]['time'] + print 'Updating timestamp for %s in baseline' % file + return baseline + +def updatepatches(patches, db): + for file in (db.keys()): + origin = db[file]['name'] + for patch in patches.keys(): + if(patches[patch]['name'] == origin): + mod=0 + if(file in patches[patch]['added']): + mod = 'added' + if(file in patches[patch]['changed']): + mod = 'changed' + if(mod): + if (patches[patch][mod][file]['time'] != db[file]['time']): + patches[patch][mod][file]['time'] = db[file]['time'] + print 'Updating timestamp in %s for %s' %(patches[patch]['name'],file) + return patches + + +def createpatchedbaseline(baseline,patches): + files = dict() + files = addtodb(files,baseline,'baseline') + for patch in sorted(patches.keys()): +# print 'adding patch: %s' % patch + files = addtodb(files,patches[patch]['added'],patches[patch]['name']) + files = addtodb(files,patches[patch]['changed'],patches[patch]['name']) + files = removefromdb(files,patches[patch]['removed'],patches[patch]['name']) + return files + +def removefromdb(db,removed,name): + for file in removed: + if(file in db): +# print '%s removing %s' %(name,file) + del db[file] + return db + +def addtodb(db,new,name): + for file in new: + if(file not in db): + db[file] = dict() +# else: +# print '%s overriding %s' % (name,file) + db[file]['time'] = new[file]['time'] + db[file]['md5'] = new[file]['md5'] + db[file]['size'] = new[file]['size'] + db[file]['name'] = name + return db + +def listpatches(): + path = dbrutils.patchpath() + patchfiles = glob.glob('%spatch*.txt' % path) + print 'Installed patches' + for file in patchfiles: + print '\t%s' % re.sub('.txt','',os.path.basename(file)) + +def removepatch(patch): + path = dbrutils.patchpath() + file = '%s%s%s' %(path,patch,'.txt') + files = set() + files.add(file) + dbrutils.deletefiles(files) + + +def loadpatches(path): + patches = dict() + patchfiles = glob.glob('%spatch*.txt' % path) + + for file in patchfiles: + print 'Loading patch: %s' % re.sub('.txt','',os.path.basename(file)) +# print 'Reading: %s\n' % file +# patchname = re.match('\S+patch(\S+)\.txt',file) +# print 'patchname %s' % patchname.group(1); + patch = readpatch(file) +# patches[patchname.group(1)] = patch +# print 'Read %s from %s' % (patch['name'],file) + patches[file] = patch + return patches + + +def savepatches(patches): + for patch in sorted(patches.keys()): + # print 'writing %s to %s' % (patches[patch]['name'],patch) + writepatch(patches[patch], patch) + + +def writepatch(patch, filename): + file = open(filename,'w') +# print 'saving patch to %s' %filename + file.write("name=%s\n" % patch['name']); + file.write("time=%s\n" % patch['time']); + + removed = patch['removed'] + for filename in sorted(removed): + str = "removed=%s\n" % filename + file.write(str) + + added = patch['added'] + for filename in sorted(added): + if (len(added[filename]) < 3): + added[filename].append('') + str = "added=%s:%s:%s:%s\n" %( added[filename]['time'],added[filename]['size'],added[filename]['md5'], filename) + file.write(str) + + changed = patch['changed'] + for filename in sorted(changed): + if (len(changed[filename]) < 3): + changed[filename].append('') + str = "changed=%s:%s:%s:%s\n" %( changed[filename]['time'],changed[filename]['size'],changed[filename]['md5'], filename) + file.write(str) + file.close() + + +def readpatch(filename): + file = open(filename,'r') + #name=blah + #time=blah + #removed=file + #added=time:size:md5:file + #changed=time:size:md5:file + patch = dict() + removed = set() + added = dict() + changed = dict() + for line in file: + results = re.split('=|\n',line) + type = results[0] + if( type == 'name'): + patch['name'] = results[1] + elif( type == 'time'): + patch['time'] = results[1] + elif( type == 'removed'): + removed.add(results[1]) + elif(( type == 'added') or (type == 'changed')): + results2 = re.split(':|\n',results[1]) + entry = dict() + entry['time'] = results2[0] + entry['size'] = results2[1] + entry['md5'] = results2[2] + if(type == 'added'): + added[results2[3]] = entry + else: + changed[results2[3]] = entry + file.close() + patch['removed'] = removed + patch['added'] = added + patch['changed'] = changed + return patch + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/dbrutils.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/dbrutils.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,204 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBRutils - Module for handling little bits of stuff to do with generating hashes and scaning directories + +import re +import os +import sys +import string +from os.path import join, isfile, stat +from stat import * + +import glob # temporary (I hope) used for grabbing stuf from zip files... + + + +def defaultdb(): + return os.path.join(patchpath(),'baseline.db') + +def patchpath(): + return os.path.join(epocroot(),'%s/' % patch_path_internal()) + +def patch_path_internal(): + return 'epoc32/relinfo' + +def exclude_dirs(): + fixpath = re.compile('\\\\') + leadingslash = re.compile('^%s' % fixpath.sub('/',epocroot())) + return [string.lower(leadingslash.sub('',fixpath.sub('/',os.path.join(epocroot(),'epoc32/build')))),string.lower(leadingslash.sub('',fixpath.sub('/',patch_path_internal())))] + +def exclude_files(): +# return ['\.sym$','\.dll$'] # just testing... + return ['\.sym$'] + +def epocroot(): + return os.environ.get('EPOCROOT') + +def scanenv(): + print 'Scanning local environment' + directory = os.path.join(epocroot(),'epoc32') + env = scandir(directory, exclude_dirs(), exclude_files()) + return env + +def createzip(files, name): + tmpfilename = os.tmpnam( ) + print tmpfilename + f = open(tmpfilename,'w') + for file in sorted(files): + str = '%s%s' % (file,'\n') + f.write(str) + f.close() + os.chdir(epocroot()) + exestr = '7z a -Tzip -i@%s %s' %(tmpfilename,name) + print 'executing: >%s<\n' %exestr + os.system(exestr) + os.unlink(tmpfilename) + +def extractfiles(files, path): + zips = glob.glob(os.path.join(path, '*.zip')) + for name in zips: + extractfromzip(files, name,'') + + +def extractfromzip(files, name, location): + tmpfilename = os.tmpnam( ) + print tmpfilename + cwd = os.getcwd(); + os.chdir(os.path.join(epocroot(),location)) + f = open(tmpfilename,'w') + for file in sorted(files): + str = '%s%s' % (file,'\n') + f.write(str) + f.close() + exestr = '7z x -y -i@%s %s >nul' %(tmpfilename,name) +# exestr = '7z x -y -i@%s %s' %(tmpfilename,name) + print 'executing: >%s<\n' %exestr + os.system(exestr) + os.unlink(tmpfilename) + os.chdir(cwd) + +def deletefiles(files): + os.chdir(epocroot()) + for file in files: + print 'deleting %s' %file + os.unlink(file) + + +def generateMD5s(testset): + db = dict() + if(len(testset)): +# print testset + os.chdir(epocroot()) + tmpfilename = os.tmpnam( ) + print tmpfilename, '\n' + f = open(tmpfilename,'w') + for file in testset: + entry = dict() + entry['md5'] = 'xxx' + db[file] = entry + str = '%s%s' % (file,'\n') + f.write(str) + f.close() + outputfile = os.tmpnam() + exestr = 'evalid -f %s %s %s' % (tmpfilename, epocroot(), outputfile) +# print exestr + exeresult = os.system(exestr) + if(exeresult): + sys.exit('Fatal error executing: %s\nReported error: %s' % (exestr,os.strerror(exeresult))) + else: + db = gethashes(db,outputfile, False) + os.unlink(outputfile) + os.unlink(tmpfilename) + return db + +# Brittle and nasty!!! +def gethashes(db, md5filename, create): + os.chdir(epocroot()) +# print 'trying to open %s' % md5filename + file = open(md5filename,'r') + root = '' + fixpath = re.compile('\\\\') + leadingslash = re.compile('^%s' % fixpath.sub('/',epocroot())) + + evalidparse = re.compile('(.+)\sTYPE=(.+)\sMD5=(.+)') + dirparse = re.compile('Directory:(\S+)') + for line in file: + res = evalidparse.match(line) + if(res): + filename = "%s%s" % (root,res.group(1)) + filename = string.lower(fixpath.sub('/',leadingslash.sub('',filename))) +# print "found %s" % filename + if(create): + entry = dict() + entry['time'] = 'xxx' + entry['size'] = 'xxx' + entry['md5'] = res.group(3) + db[filename] = entry + else: + if(filename in db): + db[filename]['md5'] = res.group(3) + + else: + res = dirparse.match(line) + if(res): + if(res.group(1) == '.'): + root = '' + else: + root = '%s/' % res.group(1) + + file.close() + return db + + +def scandir(top, exclude_dirs, exclude_files): +# exclude_dirs must be in lower case... +# print "Remember to expand the logged dir from", top, "!!!" + countdown = 0 + env = dict() + fixpath = re.compile('\\\\') + leadingslash = re.compile('^%s' % fixpath.sub('/',epocroot())) + + ignorestr='' + for exclude in exclude_files: + if(len(ignorestr)): + ignorestr = '%s|%s' % (ignorestr, exclude) + else: + ignorestr = exclude + ignore = re.compile(ignorestr) + + for root, dirs, files in os.walk(top, topdown=True): + for dirname in dirs: +# print string.lower(leadingslash.sub('',fixpath.sub('/',os.path.join(root,dirname)))) + if(string.lower(leadingslash.sub('',fixpath.sub('/',os.path.join(root,dirname)))) in exclude_dirs): +# print 'removing: %s' % os.path.join(root,dirname) + dirs.remove(dirname) + for name in files: + filename = os.path.join(root, name) + statinfo = os.stat(filename) + fn = string.lower(leadingslash.sub('',fixpath.sub('/',filename))) +# print '%s\t%s' % (filename, fn); + if(countdown == 0): + print '.', + countdown = 1000 + countdown = countdown-1 + if not ignore.search(fn,1): + entry = dict() + entry['time'] = '%d' % statinfo[ST_MTIME] + entry['size'] = '%d' % statinfo[ST_SIZE] + entry['md5'] = 'xxx' + env[fn] = entry + # data = [statinfo[ST_MTIME],statinfo[ST_SIZE],'xxx'] + # env[fn] = data + print '\n' + return env diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/diffenv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/diffenv.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,33 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR diffenv - compares two environments + +import sys +import dbrpatch + +def run(args): + if(len(args) == 2): + first = args[0] + second = args[1] + dbrpatch.newcomparepatcheddbs(first, second) + else: + help() + +def help(): + print "Compares two environments" + print "Usage:" + print "\tdbr diffenv " + + + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/help.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/help.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,51 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR help - displays the DBR help + +import sys + +def main(): + args = sys.argv + run(args) + +def run(args): + if(len(args)): + try: + tool = __import__(args[0]) + tool.help() + except ImportError: + print "No help on %s\n" % args[0] + usage() + else: + usage() + +def usage(): + print "Usage:" + print "\tdbr intro\t- basic introduction\n" + + print "\tdbr getenv\t- installs a baseline NOT IMPLEMENTED" + print "\tdbr checkenv\t- Checks current environment" +# print "\tdbr diffbaseline\t- Compares baselines" + print "\tdbr diffenv\t- Compares environments" + print "\tdbr cleanenv\t- cleans the environment" + print "" + print "\tdbr installpatch\t- installs a patch" + print "\tdbr createpatch\t- creates a patch" + print "\tdbr removepatch\t- removes a patch" + print "\tdbr listpatches\t- lists patches" + print "" + print "\tdbr help - help" + +def help(): + print "No help available!" \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/installpatch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/installpatch.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,44 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR installpatch - installs a patch in the current environment + +import sys +import os.path +import shutil +import dbrutils + + + +def run(args): + if(len(args)): + patch = args[0] + if(patch): + if(os.path.exists(patch)): + patchname = os.path.basename(patch) + if(not os.path.exists(os.path.join(dbrutils.patchpath(),patchname))): + shutil.copyfile(patch, os.path.join(dbrutils.patchpath(),patchname)) + files = set(); + files.add('*') + dbrutils.extractfromzip(files,os.path.join(dbrutils.patchpath(),patchname),'') + print 'Should probably run checkenv now...' + else: + print 'Cannot find patch zip: %s\n' %patch + help() + else: + help() + else: + help() + +def help(): + print 'usage: Createpatch ' diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/intro.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/intro.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,46 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR intro - displays some introductory information + +def run(args): + help() + +def help(): + l1 ='\nDBR tools are simply a way of checking what has been changed in the build you are using.' + l2 ='\n\nUnlike CBRs, they intentionally make no attempt at understanding components,' + l3 ='and subsequently they do not have the restrictions that CBRs require.' + l4 ='\n\nGenerally speaking all developers work from builds of the whole platform,' + l5 ='and developers want to change the build, and know what they have changed,' + l6 ='what has changed between builds, or what they have different to other developers' + l7 ='with as little hastle as possible.' + + l8 ='\nThere is a patching mechanism for developer providing patches to eachother for the short-term,' + l9 ='but the idea is that patches are short-lived, unlike CBRs where they can live forever.' + l10 ='\n\nIn short, you get most of the benefits of CBRs without the hastle.' + print l1,l2,l3,l4,l5,l6,l7,l8,l9,l10 + + s1='\nHow To use\n\n' + s2='Starting Method 1:\n' + s3='\t1. Unpack all your zips on to a clean drive\n' + s4='\t2. Ensure you\'ve extracted the MD5s into epoc32/relinfo\n' + s5='\t3. Run \'dbr checkenv\' to generate a database\n\n' + s6='Starting Method 2:\n' + s7='\t1. Run \'dbr getenv \' to install a full build and configure the database\n\n' + s8='If you want to know what you\'ve changed, run \'dbr checkenv\'\n' + s9='If you want to clean the environment run \'dbr cleanenv\'\n' + s10='If you want to compare two baselines run \'dbr diffenv \'\n' + + + print s1,s2,s3,s4,s5,s6,s7,s8,s9, s10 + diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/listpatches.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/listpatches.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,24 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR help - displays the DBR help + +import dbrpatch + + +def run(args): + dbrpatch.listpatches() + + +def help(): + print "lists the patches" \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 dbrtools/dbr/removepatch.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dbrtools/dbr/removepatch.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,25 @@ +# Copyright (c) 2009 Symbian Foundation Ltd +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Symbian Foundation Ltd - initial contribution. +# +# Contributors: +# mattd +# +# Description: +# DBR help - displays the DBR help + +import dbrpatch + + +def run(args): + if(len(args) == 1): + dbrpatch.removepatch(args[0]); + print 'do cleanenv!!!' + +def help(): + print "removes a patch" \ No newline at end of file diff -r 842a773e65f2 -r f6ae410bd493 downloadkit/BeautifulSoup.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/downloadkit/BeautifulSoup.py Tue Mar 16 12:28:04 2010 +0000 @@ -0,0 +1,2000 @@ +"""Beautiful Soup +Elixir and Tonic +"The Screen-Scraper's Friend" +http://www.crummy.com/software/BeautifulSoup/ + +Beautiful Soup parses a (possibly invalid) XML or HTML document into a +tree representation. It provides methods and Pythonic idioms that make +it easy to navigate, search, and modify the tree. + +A well-formed XML/HTML document yields a well-formed data +structure. An ill-formed XML/HTML document yields a correspondingly +ill-formed data structure. If your document is only locally +well-formed, you can use this library to find and process the +well-formed part of it. + +Beautiful Soup works with Python 2.2 and up. It has no external +dependencies, but you'll have more success at converting data to UTF-8 +if you also install these three packages: + +* chardet, for auto-detecting character encodings + http://chardet.feedparser.org/ +* cjkcodecs and iconv_codec, which add more encodings to the ones supported + by stock Python. + http://cjkpython.i18n.org/ + +Beautiful Soup defines classes for two main parsing strategies: + + * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific + language that kind of looks like XML. + + * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid + or invalid. This class has web browser-like heuristics for + obtaining a sensible parse tree in the face of common HTML errors. + +Beautiful Soup also defines a class (UnicodeDammit) for autodetecting +the encoding of an HTML or XML document, and converting it to +Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser. + +For more than you ever wanted to know about Beautiful Soup, see the +documentation: +http://www.crummy.com/software/BeautifulSoup/documentation.html + +Here, have some legalese: + +Copyright (c) 2004-2009, Leonard Richardson + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the the Beautiful Soup Consortium and All + Night Kosher Bakery nor the names of its contributors may be + used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT. + +""" +from __future__ import generators + +__author__ = "Leonard Richardson (leonardr@segfault.org)" +__version__ = "3.1.0.1" +__copyright__ = "Copyright (c) 2004-2009 Leonard Richardson" +__license__ = "New-style BSD" + +import codecs +import markupbase +import types +import re +from HTMLParser import HTMLParser, HTMLParseError +try: + from htmlentitydefs import name2codepoint +except ImportError: + name2codepoint = {} +try: + set +except NameError: + from sets import Set as set + +#These hacks make Beautiful Soup able to parse XML with namespaces +markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match + +DEFAULT_OUTPUT_ENCODING = "utf-8" + +# First, the classes that represent markup elements. + +def sob(unicode, encoding): + """Returns either the given Unicode string or its encoding.""" + if encoding is None: + return unicode + else: + return unicode.encode(encoding) + +class PageElement: + """Contains the navigational information for some part of the page + (either a tag or a piece of text)""" + + def setup(self, parent=None, previous=None): + """Sets up the initial relations between this element and + other elements.""" + self.parent = parent + self.previous = previous + self.next = None + self.previousSibling = None + self.nextSibling = None + if self.parent and self.parent.contents: + self.previousSibling = self.parent.contents[-1] + self.previousSibling.nextSibling = self + + def replaceWith(self, replaceWith): + oldParent = self.parent + myIndex = self.parent.contents.index(self) + if hasattr(replaceWith, 'parent') and replaceWith.parent == self.parent: + # We're replacing this element with one of its siblings. + index = self.parent.contents.index(replaceWith) + if index and index < myIndex: + # Furthermore, it comes before this element. That + # means that when we extract it, the index of this + # element will change. + myIndex = myIndex - 1 + self.extract() + oldParent.insert(myIndex, replaceWith) + + def extract(self): + """Destructively rips this element out of the tree.""" + if self.parent: + try: + self.parent.contents.remove(self) + except ValueError: + pass + + #Find the two elements that would be next to each other if + #this element (and any children) hadn't been parsed. Connect + #the two. + lastChild = self._lastRecursiveChild() + nextElement = lastChild.next + + if self.previous: + self.previous.next = nextElement + if nextElement: + nextElement.previous = self.previous + self.previous = None + lastChild.next = None + + self.parent = None + if self.previousSibling: + self.previousSibling.nextSibling = self.nextSibling + if self.nextSibling: + self.nextSibling.previousSibling = self.previousSibling + self.previousSibling = self.nextSibling = None + return self + + def _lastRecursiveChild(self): + "Finds the last element beneath this object to be parsed." + lastChild = self + while hasattr(lastChild, 'contents') and lastChild.contents: + lastChild = lastChild.contents[-1] + return lastChild + + def insert(self, position, newChild): + if (isinstance(newChild, basestring) + or isinstance(newChild, unicode)) \ + and not isinstance(newChild, NavigableString): + newChild = NavigableString(newChild) + + position = min(position, len(self.contents)) + if hasattr(newChild, 'parent') and newChild.parent != None: + # We're 'inserting' an element that's already one + # of this object's children. + if newChild.parent == self: + index = self.find(newChild) + if index and index < position: + # Furthermore we're moving it further down the + # list of this object's children. That means that + # when we extract this element, our target index + # will jump down one. + position = position - 1 + newChild.extract() + + newChild.parent = self + previousChild = None + if position == 0: + newChild.previousSibling = None + newChild.previous = self + else: + previousChild = self.contents[position-1] + newChild.previousSibling = previousChild + newChild.previousSibling.nextSibling = newChild + newChild.previous = previousChild._lastRecursiveChild() + if newChild.previous: + newChild.previous.next = newChild + + newChildsLastElement = newChild._lastRecursiveChild() + + if position >= len(self.contents): + newChild.nextSibling = None + + parent = self + parentsNextSibling = None + while not parentsNextSibling: + parentsNextSibling = parent.nextSibling + parent = parent.parent + if not parent: # This is the last element in the document. + break + if parentsNextSibling: + newChildsLastElement.next = parentsNextSibling + else: + newChildsLastElement.next = None + else: + nextChild = self.contents[position] + newChild.nextSibling = nextChild + if newChild.nextSibling: + newChild.nextSibling.previousSibling = newChild + newChildsLastElement.next = nextChild + + if newChildsLastElement.next: + newChildsLastElement.next.previous = newChildsLastElement + self.contents.insert(position, newChild) + + def append(self, tag): + """Appends the given tag to the contents of this tag.""" + self.insert(len(self.contents), tag) + + def findNext(self, name=None, attrs={}, text=None, **kwargs): + """Returns the first item that matches the given criteria and + appears after this Tag in the document.""" + return self._findOne(self.findAllNext, name, attrs, text, **kwargs) + + def findAllNext(self, name=None, attrs={}, text=None, limit=None, + **kwargs): + """Returns all items that match the given criteria and appear + after this Tag in the document.""" + return self._findAll(name, attrs, text, limit, self.nextGenerator, + **kwargs) + + def findNextSibling(self, name=None, attrs={}, text=None, **kwargs): + """Returns the closest sibling to this Tag that matches the + given criteria and appears after this Tag in the document.""" + return self._findOne(self.findNextSiblings, name, attrs, text, + **kwargs) + + def findNextSiblings(self, name=None, attrs={}, text=None, limit=None, + **kwargs): + """Returns the siblings of this Tag that match the given + criteria and appear after this Tag in the document.""" + return self._findAll(name, attrs, text, limit, + self.nextSiblingGenerator, **kwargs) + fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x + + def findPrevious(self, name=None, attrs={}, text=None, **kwargs): + """Returns the first item that matches the given criteria and + appears before this Tag in the document.""" + return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs) + + def findAllPrevious(self, name=None, attrs={}, text=None, limit=None, + **kwargs): + """Returns all items that match the given criteria and appear + before this Tag in the document.""" + return self._findAll(name, attrs, text, limit, self.previousGenerator, + **kwargs) + fetchPrevious = findAllPrevious # Compatibility with pre-3.x + + def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs): + """Returns the closest sibling to this Tag that matches the + given criteria and appears before this Tag in the document.""" + return self._findOne(self.findPreviousSiblings, name, attrs, text, + **kwargs) + + def findPreviousSiblings(self, name=None, attrs={}, text=None, + limit=None, **kwargs): + """Returns the siblings of this Tag that match the given + criteria and appear before this Tag in the document.""" + return self._findAll(name, attrs, text, limit, + self.previousSiblingGenerator, **kwargs) + fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x + + def findParent(self, name=None, attrs={}, **kwargs): + """Returns the closest parent of this Tag that matches the given + criteria.""" + # NOTE: We can't use _findOne because findParents takes a different + # set of arguments. + r = None + l = self.findParents(name, attrs, 1) + if l: + r = l[0] + return r + + def findParents(self, name=None, attrs={}, limit=None, **kwargs): + """Returns the parents of this Tag that match the given + criteria.""" + + return self._findAll(name, attrs, None, limit, self.parentGenerator, + **kwargs) + fetchParents = findParents # Compatibility with pre-3.x + + #These methods do the real heavy lifting. + + def _findOne(self, method, name, attrs, text, **kwargs): + r = None + l = method(name, attrs, text, 1, **kwargs) + if l: + r = l[0] + return r + + def _findAll(self, name, attrs, text, limit, generator, **kwargs): + "Iterates over a generator looking for things that match." + + if isinstance(name, SoupStrainer): + strainer = name + else: + # Build a SoupStrainer + strainer = SoupStrainer(name, attrs, text, **kwargs) + results = ResultSet(strainer) + g = generator() + while True: + try: + i = g.next() + except StopIteration: + break + if i: + found = strainer.search(i) + if found: + results.append(found) + if limit and len(results) >= limit: + break + return results + + #These Generators can be used to navigate starting from both + #NavigableStrings and Tags. + def nextGenerator(self): + i = self + while i: + i = i.next + yield i + + def nextSiblingGenerator(self): + i = self + while i: + i = i.nextSibling + yield i + + def previousGenerator(self): + i = self + while i: + i = i.previous + yield i + + def previousSiblingGenerator(self): + i = self + while i: + i = i.previousSibling + yield i + + def parentGenerator(self): + i = self + while i: + i = i.parent + yield i + + # Utility methods + def substituteEncoding(self, str, encoding=None): + encoding = encoding or "utf-8" + return str.replace("%SOUP-ENCODING%", encoding) + + def toEncoding(self, s, encoding=None): + """Encodes an object to a string in some encoding, or to Unicode. + .""" + if isinstance(s, unicode): + if encoding: + s = s.encode(encoding) + elif isinstance(s, str): + if encoding: + s = s.encode(encoding) + else: + s = unicode(s) + else: + if encoding: + s = self.toEncoding(str(s), encoding) + else: + s = unicode(s) + return s + +class NavigableString(unicode, PageElement): + + def __new__(cls, value): + """Create a new NavigableString. + + When unpickling a NavigableString, this method is called with + the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be + passed in to the superclass's __new__ or the superclass won't know + how to handle non-ASCII characters. + """ + if isinstance(value, unicode): + return unicode.__new__(cls, value) + return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) + + def __getnewargs__(self): + return (unicode(self),) + + def __getattr__(self, attr): + """text.string gives you text. This is for backwards + compatibility for Navigable*String, but for CData* it lets you + get the string without the CData wrapper.""" + if attr == 'string': + return self + else: + raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr) + + def encode(self, encoding=DEFAULT_OUTPUT_ENCODING): + return self.decode().encode(encoding) + + def decodeGivenEventualEncoding(self, eventualEncoding): + return self + +class CData(NavigableString): + + def decodeGivenEventualEncoding(self, eventualEncoding): + return u'' + +class ProcessingInstruction(NavigableString): + + def decodeGivenEventualEncoding(self, eventualEncoding): + output = self + if u'%SOUP-ENCODING%' in output: + output = self.substituteEncoding(output, eventualEncoding) + return u'' + +class Comment(NavigableString): + def decodeGivenEventualEncoding(self, eventualEncoding): + return u'' + +class Declaration(NavigableString): + def decodeGivenEventualEncoding(self, eventualEncoding): + return u'' + +class Tag(PageElement): + + """Represents a found HTML tag with its attributes and contents.""" + + def _invert(h): + "Cheap function to invert a hash." + i = {} + for k,v in h.items(): + i[v] = k + return i + + XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", + "quot" : '"', + "amp" : "&", + "lt" : "<", + "gt" : ">" } + + XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS) + + def _convertEntities(self, match): + """Used in a call to re.sub to replace HTML, XML, and numeric + entities with the appropriate Unicode characters. If HTML + entities are being converted, any unrecognized entities are + escaped.""" + x = match.group(1) + if self.convertHTMLEntities and x in name2codepoint: + return unichr(name2codepoint[x]) + elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS: + if self.convertXMLEntities: + return self.XML_ENTITIES_TO_SPECIAL_CHARS[x] + else: + return u'&%s;' % x + elif len(x) > 0 and x[0] == '#': + # Handle numeric entities + if len(x) > 1 and x[1] == 'x': + return unichr(int(x[2:], 16)) + else: + return unichr(int(x[1:])) + + elif self.escapeUnrecognizedEntities: + return u'&%s;' % x + else: + return u'&%s;' % x + + def __init__(self, parser, name, attrs=None, parent=None, + previous=None): + "Basic constructor." + + # We don't actually store the parser object: that lets extracted + # chunks be garbage-collected + self.parserClass = parser.__class__ + self.isSelfClosing = parser.isSelfClosingTag(name) + self.name = name + if attrs == None: + attrs = [] + self.attrs = attrs + self.contents = [] + self.setup(parent, previous) + self.hidden = False + self.containsSubstitutions = False + self.convertHTMLEntities = parser.convertHTMLEntities + self.convertXMLEntities = parser.convertXMLEntities + self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities + + def convert(kval): + "Converts HTML, XML and numeric entities in the attribute value." + k, val = kval + if val is None: + return kval + return (k, re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);", + self._convertEntities, val)) + self.attrs = map(convert, self.attrs) + + def get(self, key, default=None): + """Returns the value of the 'key' attribute for the tag, or + the value given for 'default' if it doesn't have that + attribute.""" + return self._getAttrMap().get(key, default) + + def has_key(self, key): + return self._getAttrMap().has_key(key) + + def __getitem__(self, key): + """tag[key] returns the value of the 'key' attribute for the tag, + and throws an exception if it's not there.""" + return self._getAttrMap()[key] + + def __iter__(self): + "Iterating over a tag iterates over its contents." + return iter(self.contents) + + def __len__(self): + "The length of a tag is the length of its list of contents." + return len(self.contents) + + def __contains__(self, x): + return x in self.contents + + def __nonzero__(self): + "A tag is non-None even if it has no contents." + return True + + def __setitem__(self, key, value): + """Setting tag[key] sets the value of the 'key' attribute for the + tag.""" + self._getAttrMap() + self.attrMap[key] = value + found = False + for i in range(0, len(self.attrs)): + if self.attrs[i][0] == key: + self.attrs[i] = (key, value) + found = True + if not found: + self.attrs.append((key, value)) + self._getAttrMap()[key] = value + + def __delitem__(self, key): + "Deleting tag[key] deletes all 'key' attributes for the tag." + for item in self.attrs: + if item[0] == key: + self.attrs.remove(item) + #We don't break because bad HTML can define the same + #attribute multiple times. + self._getAttrMap() + if self.attrMap.has_key(key): + del self.attrMap[key] + + def __call__(self, *args, **kwargs): + """Calling a tag like a function is the same as calling its + findAll() method. Eg. tag('a') returns a list of all the A tags + found within this tag.""" + return apply(self.findAll, args, kwargs) + + def __getattr__(self, tag): + #print "Getattr %s.%s" % (self.__class__, tag) + if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3: + return self.find(tag[:-3]) + elif tag.find('__') != 0: + return self.find(tag) + raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag) + + def __eq__(self, other): + """Returns true iff this tag has the same name, the same attributes, + and the same contents (recursively) as the given tag. + + NOTE: right now this will return false if two tags have the + same attributes in a different order. Should this be fixed?""" + if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other): + return False + for i in range(0, len(self.contents)): + if self.contents[i] != other.contents[i]: + return False + return True + + def __ne__(self, other): + """Returns true iff this tag is not identical to the other tag, + as defined in __eq__.""" + return not self == other + + def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING): + """Renders this tag as a string.""" + return self.decode(eventualEncoding=encoding) + + BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" + + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" + + ")") + + def _sub_entity(self, x): + """Used with a regular expression to substitute the + appropriate XML entity for an XML special character.""" + return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";" + + def __unicode__(self): + return self.decode() + + def __str__(self): + return self.encode() + + def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, + prettyPrint=False, indentLevel=0): + return self.decode(prettyPrint, indentLevel, encoding).encode(encoding) + + def decode(self, prettyPrint=False, indentLevel=0, + eventualEncoding=DEFAULT_OUTPUT_ENCODING): + """Returns a string or Unicode representation of this tag and + its contents. To get Unicode, pass None for encoding.""" + + attrs = [] + if self.attrs: + for key, val in self.attrs: + fmt = '%s="%s"' + if isString(val): + if (self.containsSubstitutions + and eventualEncoding is not None + and '%SOUP-ENCODING%' in val): + val = self.substituteEncoding(val, eventualEncoding) + + # The attribute value either: + # + # * Contains no embedded double quotes or single quotes. + # No problem: we enclose it in double quotes. + # * Contains embedded single quotes. No problem: + # double quotes work here too. + # * Contains embedded double quotes. No problem: + # we enclose it in single quotes. + # * Embeds both single _and_ double quotes. This + # can't happen naturally, but it can happen if + # you modify an attribute value after parsing + # the document. Now we have a bit of a + # problem. We solve it by enclosing the + # attribute in single quotes, and escaping any + # embedded single quotes to XML entities. + if '"' in val: + fmt = "%s='%s'" + if "'" in val: + # TODO: replace with apos when + # appropriate. + val = val.replace("'", "&squot;") + + # Now we're okay w/r/t quotes. But the attribute + # value might also contain angle brackets, or + # ampersands that aren't part of entities. We need + # to escape those to XML entities too. + val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val) + if val is None: + # Handle boolean attributes. + decoded = key + else: + decoded = fmt % (key, val) + attrs.append(decoded) + close = '' + closeTag = '' + if self.isSelfClosing: + close = ' /' + else: + closeTag = '' % self.name + + indentTag, indentContents = 0, 0 + if prettyPrint: + indentTag = indentLevel + space = (' ' * (indentTag-1)) + indentContents = indentTag + 1 + contents = self.decodeContents(prettyPrint, indentContents, + eventualEncoding) + if self.hidden: + s = contents + else: + s = [] + attributeString = '' + if attrs: + attributeString = ' ' + ' '.join(attrs) + if prettyPrint: + s.append(space) + s.append('<%s%s%s>' % (self.name, attributeString, close)) + if prettyPrint: + s.append("\n") + s.append(contents) + if prettyPrint and contents and contents[-1] != "\n": + s.append("\n") + if prettyPrint and closeTag: + s.append(space) + s.append(closeTag) + if prettyPrint and closeTag and self.nextSibling: + s.append("\n") + s = ''.join(s) + return s + + def decompose(self): + """Recursively destroys the contents of this tree.""" + contents = [i for i in self.contents] + for i in contents: + if isinstance(i, Tag): + i.decompose() + else: + i.extract() + self.extract() + + def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING): + return self.encode(encoding, True) + + def encodeContents(self, encoding=DEFAULT_OUTPUT_ENCODING, + prettyPrint=False, indentLevel=0): + return self.decodeContents(prettyPrint, indentLevel).encode(encoding) + + def decodeContents(self, prettyPrint=False, indentLevel=0, + eventualEncoding=DEFAULT_OUTPUT_ENCODING): + """Renders the contents of this tag as a string in the given + encoding. If encoding is None, returns a Unicode string..""" + s=[] + for c in self: + text = None + if isinstance(c, NavigableString): + text = c.decodeGivenEventualEncoding(eventualEncoding) + elif isinstance(c, Tag): + s.append(c.decode(prettyPrint, indentLevel, eventualEncoding)) + if text and prettyPrint: + text = text.strip() + if text: + if prettyPrint: + s.append(" " * (indentLevel-1)) + s.append(text) + if prettyPrint: + s.append("\n") + return ''.join(s) + + #Soup methods + + def find(self, name=None, attrs={}, recursive=True, text=None, + **kwargs): + """Return only the first child of this Tag matching the given + criteria.""" + r = None + l = self.findAll(name, attrs, recursive, text, 1, **kwargs) + if l: + r = l[0] + return r + findChild = find + + def findAll(self, name=None, attrs={}, recursive=True, text=None, + limit=None, **kwargs): + """Extracts a list of Tag objects that match the given + criteria. You can specify the name of the Tag and any + attributes you want the Tag to have. + + The value of a key-value pair in the 'attrs' map can be a + string, a list of strings, a regular expression object, or a + callable that takes a string and returns whether or not the + string matches for some custom definition of 'matches'. The + same is true of the tag name.""" + generator = self.recursiveChildGenerator + if not recursive: + generator = self.childGenerator + return self._findAll(name, attrs, text, limit, generator, **kwargs) + findChildren = findAll + + # Pre-3.x compatibility methods. Will go away in 4.0. + first = find + fetch = findAll + + def fetchText(self, text=None, recursive=True, limit=None): + return self.findAll(text=text, recursive=recursive, limit=limit) + + def firstText(self, text=None, recursive=True): + return self.find(text=text, recursive=recursive) + + # 3.x compatibility methods. Will go away in 4.0. + def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, + prettyPrint=False, indentLevel=0): + if encoding is None: + return self.decodeContents(prettyPrint, indentLevel, encoding) + else: + return self.encodeContents(encoding, prettyPrint, indentLevel) + + + #Private methods + + def _getAttrMap(self): + """Initializes a map representation of this tag's attributes, + if not already initialized.""" + if not getattr(self, 'attrMap'): + self.attrMap = {} + for (key, value) in self.attrs: + self.attrMap[key] = value + return self.attrMap + + #Generator methods + def recursiveChildGenerator(self): + if not len(self.contents): + raise StopIteration + stopNode = self._lastRecursiveChild().next + current = self.contents[0] + while current is not stopNode: + yield current + current = current.next + + def childGenerator(self): + if not len(self.contents): + raise StopIteration + current = self.contents[0] + while current: + yield current + current = current.nextSibling + raise StopIteration + +# Next, a couple classes to represent queries and their results. +class SoupStrainer: + """Encapsulates a number of ways of matching a markup element (tag or + text).""" + + def __init__(self, name=None, attrs={}, text=None, **kwargs): + self.name = name + if isString(attrs): + kwargs['class'] = attrs + attrs = None + if kwargs: + if attrs: + attrs = attrs.copy() + attrs.update(kwargs) + else: + attrs = kwargs + self.attrs = attrs + self.text = text + + def __str__(self): + if self.text: + return self.text + else: + return "%s|%s" % (self.name, self.attrs) + + def searchTag(self, markupName=None, markupAttrs={}): + found = None + markup = None + if isinstance(markupName, Tag): + markup = markupName + markupAttrs = markup + callFunctionWithTagData = callable(self.name) \ + and not isinstance(markupName, Tag) + + if (not self.name) \ + or callFunctionWithTagData \ + or (markup and self._matches(markup, self.name)) \ + or (not markup and self._matches(markupName, self.name)): + if callFunctionWithTagData: + match = self.name(markupName, markupAttrs) + else: + match = True + markupAttrMap = None + for attr, matchAgainst in self.attrs.items(): + if not markupAttrMap: + if hasattr(markupAttrs, 'get'): + markupAttrMap = markupAttrs + else: + markupAttrMap = {} + for k,v in markupAttrs: + markupAttrMap[k] = v + attrValue = markupAttrMap.get(attr) + if not self._matches(attrValue, matchAgainst): + match = False + break + if match: + if markup: + found = markup + else: + found = markupName + return found + + def search(self, markup): + #print 'looking for %s in %s' % (self, markup) + found = None + # If given a list of items, scan it for a text element that + # matches. + if isList(markup) and not isinstance(markup, Tag): + for element in markup: + if isinstance(element, NavigableString) \ + and self.search(element): + found = element + break + # If it's a Tag, make sure its name or attributes match. + # Don't bother with Tags if we're searching for text. + elif isinstance(markup, Tag): + if not self.text: + found = self.searchTag(markup) + # If it's text, make sure the text matches. + elif isinstance(markup, NavigableString) or \ + isString(markup): + if self._matches(markup, self.text): + found = markup + else: + raise Exception, "I don't know how to match against a %s" \ + % markup.__class__ + return found + + def _matches(self, markup, matchAgainst): + #print "Matching %s against %s" % (markup, matchAgainst) + result = False + if matchAgainst == True and type(matchAgainst) == types.BooleanType: + result = markup != None + elif callable(matchAgainst): + result = matchAgainst(markup) + else: + #Custom match methods take the tag as an argument, but all + #other ways of matching match the tag name as a string. + if isinstance(markup, Tag): + markup = markup.name + if markup is not None and not isString(markup): + markup = unicode(markup) + #Now we know that chunk is either a string, or None. + if hasattr(matchAgainst, 'match'): + # It's a regexp object. + result = markup and matchAgainst.search(markup) + elif (isList(matchAgainst) + and (markup is not None or not isString(matchAgainst))): + result = markup in matchAgainst + elif hasattr(matchAgainst, 'items'): + result = markup.has_key(matchAgainst) + elif matchAgainst and isString(markup): + if isinstance(markup, unicode): + matchAgainst = unicode(matchAgainst) + else: + matchAgainst = str(matchAgainst) + + if not result: + result = matchAgainst == markup + return result + +class ResultSet(list): + """A ResultSet is just a list that keeps track of the SoupStrainer + that created it.""" + def __init__(self, source): + list.__init__([]) + self.source = source + +# Now, some helper functions. + +def isList(l): + """Convenience method that works with all 2.x versions of Python + to determine whether or not something is listlike.""" + return ((hasattr(l, '__iter__') and not isString(l)) + or (type(l) in (types.ListType, types.TupleType))) + +def isString(s): + """Convenience method that works with all 2.x versions of Python + to determine whether or not something is stringlike.""" + try: + return isinstance(s, unicode) or isinstance(s, basestring) + except NameError: + return isinstance(s, str) + +def buildTagMap(default, *args): + """Turns a list of maps, lists, or scalars into a single map. + Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and + NESTING_RESET_TAGS maps out of lists and partial maps.""" + built = {} + for portion in args: + if hasattr(portion, 'items'): + #It's a map. Merge it. + for k,v in portion.items(): + built[k] = v + elif isList(portion) and not isString(portion): + #It's a list. Map each item to the default. + for k in portion: + built[k] = default + else: + #It's a scalar. Map it to the default. + built[portion] = default + return built + +# Now, the parser classes. + +class HTMLParserBuilder(HTMLParser): + + def __init__(self, soup): + HTMLParser.__init__(self) + self.soup = soup + + # We inherit feed() and reset(). + + def handle_starttag(self, name, attrs): + if name == 'meta': + self.soup.extractCharsetFromMeta(attrs) + else: + self.soup.unknown_starttag(name, attrs) + + def handle_endtag(self, name): + self.soup.unknown_endtag(name) + + def handle_data(self, content): + self.soup.handle_data(content) + + def _toStringSubclass(self, text, subclass): + """Adds a certain piece of text to the tree as a NavigableString + subclass.""" + self.soup.endData() + self.handle_data(text) + self.soup.endData(subclass) + + def handle_pi(self, text): + """Handle a processing instruction as a ProcessingInstruction + object, possibly one with a %SOUP-ENCODING% slot into which an + encoding will be plugged later.""" + if text[:3] == "xml": + text = u"xml version='1.0' encoding='%SOUP-ENCODING%'" + self._toStringSubclass(text, ProcessingInstruction) + + def handle_comment(self, text): + "Handle comments as Comment objects." + self._toStringSubclass(text, Comment) + + def handle_charref(self, ref): + "Handle character references as data." + if self.soup.convertEntities: + data = unichr(int(ref)) + else: + data = '&#%s;' % ref + self.handle_data(data) + + def handle_entityref(self, ref): + """Handle entity references as data, possibly converting known + HTML and/or XML entity references to the corresponding Unicode + characters.""" + data = None + if self.soup.convertHTMLEntities: + try: + data = unichr(name2codepoint[ref]) + except KeyError: + pass + + if not data and self.soup.convertXMLEntities: + data = self.soup.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref) + + if not data and self.soup.convertHTMLEntities and \ + not self.soup.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref): + # TODO: We've got a problem here. We're told this is + # an entity reference, but it's not an XML entity + # reference or an HTML entity reference. Nonetheless, + # the logical thing to do is to pass it through as an + # unrecognized entity reference. + # + # Except: when the input is "&carol;" this function + # will be called with input "carol". When the input is + # "AT&T", this function will be called with input + # "T". We have no way of knowing whether a semicolon + # was present originally, so we don't know whether + # this is an unknown entity or just a misplaced + # ampersand. + # + # The more common case is a misplaced ampersand, so I + # escape the ampersand and omit the trailing semicolon. + data = "&%s" % ref + if not data: + # This case is different from the one above, because we + # haven't already gone through a supposedly comprehensive + # mapping of entities to Unicode characters. We might not + # have gone through any mapping at all. So the chances are + # very high that this is a real entity, and not a + # misplaced ampersand. + data = "&%s;" % ref + self.handle_data(data) + + def handle_decl(self, data): + "Handle DOCTYPEs and the like as Declaration objects." + self._toStringSubclass(data, Declaration) + + def parse_declaration(self, i): + """Treat a bogus SGML declaration as raw data. Treat a CDATA + declaration as a CData object.""" + j = None + if self.rawdata[i:i+9] == '', i) + if k == -1: + k = len(self.rawdata) + data = self.rawdata[i+9:k] + j = k+3 + self._toStringSubclass(data, CData) + else: + try: + j = HTMLParser.parse_declaration(self, i) + except HTMLParseError: + toHandle = self.rawdata[i:] + self.handle_data(toHandle) + j = i + len(toHandle) + return j + + +class BeautifulStoneSoup(Tag): + + """This class contains the basic parser and search code. It defines + a parser that knows nothing about tag behavior except for the + following: + + You can't close a tag without closing all the tags it encloses. + That is, "" actually means + "". + + [Another possible explanation is "", but since + this class defines no SELF_CLOSING_TAGS, it will never use that + explanation.] + + This class is useful for parsing XML or made-up markup languages, + or when BeautifulSoup makes an assumption counter to what you were + expecting.""" + + SELF_CLOSING_TAGS = {} + NESTABLE_TAGS = {} + RESET_NESTING_TAGS = {} + QUOTE_TAGS = {} + PRESERVE_WHITESPACE_TAGS = [] + + MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'), + lambda x: x.group(1) + ' />'), + (re.compile(']*)>'), + lambda x: '') + ] + + ROOT_TAG_NAME = u'[document]' + + HTML_ENTITIES = "html" + XML_ENTITIES = "xml" + XHTML_ENTITIES = "xhtml" + # TODO: This only exists for backwards-compatibility + ALL_ENTITIES = XHTML_ENTITIES + + # Used when determining whether a text node is all whitespace and + # can be replaced with a single space. A text node that contains + # fancy Unicode spaces (usually non-breaking) should be left + # alone. + STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, } + + def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None, + markupMassage=True, smartQuotesTo=XML_ENTITIES, + convertEntities=None, selfClosingTags=None, isHTML=False, + builder=HTMLParserBuilder): + """The Soup object is initialized as the 'root tag', and the + provided markup (which can be a string or a file-like object) + is fed into the underlying parser. + + HTMLParser will process most bad HTML, and the BeautifulSoup + class has some tricks for dealing with some HTML that kills + HTMLParser, but Beautiful Soup can nonetheless choke or lose data + if your data uses self-closing tags or declarations + incorrectly. + + By default, Beautiful Soup uses regexes to sanitize input, + avoiding the vast majority of these problems. If the problems + don't apply to you, pass in False for markupMassage, and + you'll get better performance. + + The default parser massage techniques fix the two most common + instances of invalid HTML that choke HTMLParser: + +
(No space between name of closing tag and tag close) + (Extraneous whitespace in declaration) + + You can pass in a custom list of (RE object, replace method) + tuples to get Beautiful Soup to scrub your input the way you + want.""" + + self.parseOnlyThese = parseOnlyThese + self.fromEncoding = fromEncoding + self.smartQuotesTo = smartQuotesTo + self.convertEntities = convertEntities + # Set the rules for how we'll deal with the entities we + # encounter + if self.convertEntities: + # It doesn't make sense to convert encoded characters to + # entities even while you're converting entities to Unicode. + # Just convert it all to Unicode. + self.smartQuotesTo = None + if convertEntities == self.HTML_ENTITIES: + self.convertXMLEntities = False + self.convertHTMLEntities = True + self.escapeUnrecognizedEntities = True + elif convertEntities == self.XHTML_ENTITIES: + self.convertXMLEntities = True + self.convertHTMLEntities = True + self.escapeUnrecognizedEntities = False + elif convertEntities == self.XML_ENTITIES: + self.convertXMLEntities = True + self.convertHTMLEntities = False + self.escapeUnrecognizedEntities = False + else: + self.convertXMLEntities = False + self.convertHTMLEntities = False + self.escapeUnrecognizedEntities = False + + self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags) + self.builder = builder(self) + self.reset() + + if hasattr(markup, 'read'): # It's a file-type object. + markup = markup.read() + self.markup = markup + self.markupMassage = markupMassage + try: + self._feed(isHTML=isHTML) + except StopParsing: + pass + self.markup = None # The markup can now be GCed. + self.builder = None # So can the builder. + + def _feed(self, inDocumentEncoding=None, isHTML=False): + # Convert the document to Unicode. + markup = self.markup + if isinstance(markup, unicode): + if not hasattr(self, 'originalEncoding'): + self.originalEncoding = None + else: + dammit = UnicodeDammit\ + (markup, [self.fromEncoding, inDocumentEncoding], + smartQuotesTo=self.smartQuotesTo, isHTML=isHTML) + markup = dammit.unicode + self.originalEncoding = dammit.originalEncoding + self.declaredHTMLEncoding = dammit.declaredHTMLEncoding + if markup: + if self.markupMassage: + if not isList(self.markupMassage): + self.markupMassage = self.MARKUP_MASSAGE + for fix, m in self.markupMassage: + markup = fix.sub(m, markup) + # TODO: We get rid of markupMassage so that the + # soup object can be deepcopied later on. Some + # Python installations can't copy regexes. If anyone + # was relying on the existence of markupMassage, this + # might cause problems. + del(self.markupMassage) + self.builder.reset() + + self.builder.feed(markup) + # Close out any unfinished strings and close all the open tags. + self.endData() + while self.currentTag.name != self.ROOT_TAG_NAME: + self.popTag() + + def isSelfClosingTag(self, name): + """Returns true iff the given string is the name of a + self-closing tag according to this parser.""" + return self.SELF_CLOSING_TAGS.has_key(name) \ + or self.instanceSelfClosingTags.has_key(name) + + def reset(self): + Tag.__init__(self, self, self.ROOT_TAG_NAME) + self.hidden = 1 + self.builder.reset() + self.currentData = [] + self.currentTag = None + self.tagStack = [] + self.quoteStack = [] + self.pushTag(self) + + def popTag(self): + tag = self.tagStack.pop() + # Tags with just one string-owning child get the child as a + # 'string' property, so that soup.tag.string is shorthand for + # soup.tag.contents[0] + if len(self.currentTag.contents) == 1 and \ + isinstance(self.currentTag.contents[0], NavigableString): + self.currentTag.string = self.currentTag.contents[0] + + #print "Pop", tag.name + if self.tagStack: + self.currentTag = self.tagStack[-1] + return self.currentTag + + def pushTag(self, tag): + #print "Push", tag.name + if self.currentTag: + self.currentTag.contents.append(tag) + self.tagStack.append(tag) + self.currentTag = self.tagStack[-1] + + def endData(self, containerClass=NavigableString): + if self.currentData: + currentData = u''.join(self.currentData) + if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and + not set([tag.name for tag in self.tagStack]).intersection( + self.PRESERVE_WHITESPACE_TAGS)): + if '\n' in currentData: + currentData = '\n' + else: + currentData = ' ' + self.currentData = [] + if self.parseOnlyThese and len(self.tagStack) <= 1 and \ + (not self.parseOnlyThese.text or \ + not self.parseOnlyThese.search(currentData)): + return + o = containerClass(currentData) + o.setup(self.currentTag, self.previous) + if self.previous: + self.previous.next = o + self.previous = o + self.currentTag.contents.append(o) + + + def _popToTag(self, name, inclusivePop=True): + """Pops the tag stack up to and including the most recent + instance of the given tag. If inclusivePop is false, pops the tag + stack up to but *not* including the most recent instqance of + the given tag.""" + #print "Popping to %s" % name + if name == self.ROOT_TAG_NAME: + return + + numPops = 0 + mostRecentTag = None + for i in range(len(self.tagStack)-1, 0, -1): + if name == self.tagStack[i].name: + numPops = len(self.tagStack)-i + break + if not inclusivePop: + numPops = numPops - 1 + + for i in range(0, numPops): + mostRecentTag = self.popTag() + return mostRecentTag + + def _smartPop(self, name): + + """We need to pop up to the previous tag of this type, unless + one of this tag's nesting reset triggers comes between this + tag and the previous tag of this type, OR unless this tag is a + generic nesting trigger and another generic nesting trigger + comes between this tag and the previous tag of this type. + + Examples: +

FooBar *

* should pop to 'p', not 'b'. +

FooBar *

* should pop to 'table', not 'p'. +

Foo

Bar *

* should pop to 'tr', not 'p'. + +

    • *
    • * should pop to 'ul', not the first 'li'. +
  • ** should pop to 'table', not the first 'tr' + tag should + implicitly close the previous tag within the same
    ** should pop to 'tr', not the first 'td' + """ + + nestingResetTriggers = self.NESTABLE_TAGS.get(name) + isNestable = nestingResetTriggers != None + isResetNesting = self.RESET_NESTING_TAGS.has_key(name) + popTo = None + inclusive = True + for i in range(len(self.tagStack)-1, 0, -1): + p = self.tagStack[i] + if (not p or p.name == name) and not isNestable: + #Non-nestable tags get popped to the top or to their + #last occurance. + popTo = name + break + if (nestingResetTriggers != None + and p.name in nestingResetTriggers) \ + or (nestingResetTriggers == None and isResetNesting + and self.RESET_NESTING_TAGS.has_key(p.name)): + + #If we encounter one of the nesting reset triggers + #peculiar to this tag, or we encounter another tag + #that causes nesting to reset, pop up to but not + #including that tag. + popTo = p.name + inclusive = False + break + p = p.parent + if popTo: + self._popToTag(popTo, inclusive) + + def unknown_starttag(self, name, attrs, selfClosing=0): + #print "Start tag %s: %s" % (name, attrs) + if self.quoteStack: + #This is not a real tag. + #print "<%s> is not real!" % name + attrs = ''.join(map(lambda(x, y): ' %s="%s"' % (x, y), attrs)) + self.handle_data('<%s%s>' % (name, attrs)) + return + self.endData() + + if not self.isSelfClosingTag(name) and not selfClosing: + self._smartPop(name) + + if self.parseOnlyThese and len(self.tagStack) <= 1 \ + and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)): + return + + tag = Tag(self, name, attrs, self.currentTag, self.previous) + if self.previous: + self.previous.next = tag + self.previous = tag + self.pushTag(tag) + if selfClosing or self.isSelfClosingTag(name): + self.popTag() + if name in self.QUOTE_TAGS: + #print "Beginning quote (%s)" % name + self.quoteStack.append(name) + self.literal = 1 + return tag + + def unknown_endtag(self, name): + #print "End tag %s" % name + if self.quoteStack and self.quoteStack[-1] != name: + #This is not a real end tag. + #print " is not real!" % name + self.handle_data('' % name) + return + self.endData() + self._popToTag(name) + if self.quoteStack and self.quoteStack[-1] == name: + self.quoteStack.pop() + self.literal = (len(self.quoteStack) > 0) + + def handle_data(self, data): + self.currentData.append(data) + + def extractCharsetFromMeta(self, attrs): + self.unknown_starttag('meta', attrs) + + +class BeautifulSoup(BeautifulStoneSoup): + + """This parser knows the following facts about HTML: + + * Some tags have no closing tag and should be interpreted as being + closed as soon as they are encountered. + + * The text inside some tags (ie. 'script') may contain tags which + are not really part of the document and which should be parsed + as text, not tags. If you want to parse the text as tags, you can + always fetch it and parse it explicitly. + + * Tag nesting rules: + + Most tags can't be nested at all. For instance, the occurance of + a

    tag should implicitly close the previous

    tag. + +

    Para1

    Para2 + should be transformed into: +

    Para1

    Para2 + + Some tags can be nested arbitrarily. For instance, the occurance + of a

    tag should _not_ implicitly close the previous +
    tag. + + Alice said:
    Bob said:
    Blah + should NOT be transformed into: + Alice said:
    Bob said:
    Blah + + Some tags can be nested, but the nesting is reset by the + interposition of other tags. For instance, a
    , + but not close a tag in another table. + +
    BlahBlah + should be transformed into: +
    BlahBlah + but, + Blah
    Blah + should NOT be transformed into + Blah
    Blah + + Differing assumptions about tag nesting rules are a major source + of problems with the BeautifulSoup class. If BeautifulSoup is not + treating as nestable a tag your page author treats as nestable, + try ICantBelieveItsBeautifulSoup, MinimalSoup, or + BeautifulStoneSoup before writing your own subclass.""" + + def __init__(self, *args, **kwargs): + if not kwargs.has_key('smartQuotesTo'): + kwargs['smartQuotesTo'] = self.HTML_ENTITIES + kwargs['isHTML'] = True + BeautifulStoneSoup.__init__(self, *args, **kwargs) + + SELF_CLOSING_TAGS = buildTagMap(None, + ['br' , 'hr', 'input', 'img', 'meta', + 'spacer', 'link', 'frame', 'base']) + + PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea']) + + QUOTE_TAGS = {'script' : None, 'textarea' : None} + + #According to the HTML standard, each of these inline tags can + #contain another tag of the same type. Furthermore, it's common + #to actually use these tags this way. + NESTABLE_INLINE_TAGS = ['span', 'font', 'q', 'object', 'bdo', 'sub', 'sup', + 'center'] + + #According to the HTML standard, these block tags can contain + #another tag of the same type. Furthermore, it's common + #to actually use these tags this way. + NESTABLE_BLOCK_TAGS = ['blockquote', 'div', 'fieldset', 'ins', 'del'] + + #Lists can contain other lists, but there are restrictions. + NESTABLE_LIST_TAGS = { 'ol' : [], + 'ul' : [], + 'li' : ['ul', 'ol'], + 'dl' : [], + 'dd' : ['dl'], + 'dt' : ['dl'] } + + #Tables can contain other tables, but there are restrictions. + NESTABLE_TABLE_TAGS = {'table' : [], + 'tr' : ['table', 'tbody', 'tfoot', 'thead'], + 'td' : ['tr'], + 'th' : ['tr'], + 'thead' : ['table'], + 'tbody' : ['table'], + 'tfoot' : ['table'], + } + + NON_NESTABLE_BLOCK_TAGS = ['address', 'form', 'p', 'pre'] + + #If one of these tags is encountered, all tags up to the next tag of + #this type are popped. + RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript', + NON_NESTABLE_BLOCK_TAGS, + NESTABLE_LIST_TAGS, + NESTABLE_TABLE_TAGS) + + NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS, + NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS) + + # Used to detect the charset in a META tag; see start_meta + CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M) + + def extractCharsetFromMeta(self, attrs): + """Beautiful Soup can detect a charset included in a META tag, + try to convert the document to that charset, and re-parse the + document from the beginning.""" + httpEquiv = None + contentType = None + contentTypeIndex = None + tagNeedsEncodingSubstitution = False + + for i in range(0, len(attrs)): + key, value = attrs[i] + key = key.lower() + if key == 'http-equiv': + httpEquiv = value + elif key == 'content': + contentType = value + contentTypeIndex = i + + if httpEquiv and contentType: # It's an interesting meta tag. + match = self.CHARSET_RE.search(contentType) + if match: + if (self.declaredHTMLEncoding is not None or + self.originalEncoding == self.fromEncoding): + # An HTML encoding was sniffed while converting + # the document to Unicode, or an HTML encoding was + # sniffed during a previous pass through the + # document, or an encoding was specified + # explicitly and it worked. Rewrite the meta tag. + def rewrite(match): + return match.group(1) + "%SOUP-ENCODING%" + newAttr = self.CHARSET_RE.sub(rewrite, contentType) + attrs[contentTypeIndex] = (attrs[contentTypeIndex][0], + newAttr) + tagNeedsEncodingSubstitution = True + else: + # This is our first pass through the document. + # Go through it again with the encoding information. + newCharset = match.group(3) + if newCharset and newCharset != self.originalEncoding: + self.declaredHTMLEncoding = newCharset + self._feed(self.declaredHTMLEncoding) + raise StopParsing + pass + tag = self.unknown_starttag("meta", attrs) + if tag and tagNeedsEncodingSubstitution: + tag.containsSubstitutions = True + + +class StopParsing(Exception): + pass + +class ICantBelieveItsBeautifulSoup(BeautifulSoup): + + """The BeautifulSoup class is oriented towards skipping over + common HTML errors like unclosed tags. However, sometimes it makes + errors of its own. For instance, consider this fragment: + + FooBar + + This is perfectly valid (if bizarre) HTML. However, the + BeautifulSoup class will implicitly close the first b tag when it + encounters the second 'b'. It will think the author wrote + "FooBar", and didn't close the first 'b' tag, because + there's no real-world reason to bold something that's already + bold. When it encounters '' it will close two more 'b' + tags, for a grand total of three tags closed instead of two. This + can throw off the rest of your document structure. The same is + true of a number of other tags, listed below. + + It's much more common for someone to forget to close a 'b' tag + than to actually use nested 'b' tags, and the BeautifulSoup class + handles the common case. This class handles the not-co-common + case: where you can't believe someone wrote what they did, but + it's valid HTML and BeautifulSoup screwed up by assuming it + wouldn't be.""" + + I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \ + ['em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong', + 'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b', + 'big'] + + I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ['noscript'] + + NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS, + I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS, + I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS) + +class MinimalSoup(BeautifulSoup): + """The MinimalSoup class is for parsing HTML that contains + pathologically bad markup. It makes no assumptions about tag + nesting, but it does know which tags are self-closing, that +