--- a/clone_packages/sf_fcl_packages.txt Thu Jan 21 16:55:34 2010 +0000
+++ b/clone_packages/sf_fcl_packages.txt Wed Feb 03 16:24:25 2010 +0000
@@ -1,4 +1,4 @@
-https://developer.symbian.org/sfl/FCL/sf/adaptation/stubs
+https://developer.symbian.org/oss/FCL/sf/adaptation/stubs
https://developer.symbian.org/sfl/FCL/sf/app/camera
https://developer.symbian.org/sfl/FCL/sf/app/commonemail
https://developer.symbian.org/sfl/FCL/sf/app/conntools
--- a/clone_packages/sf_mcl_packages.txt Thu Jan 21 16:55:34 2010 +0000
+++ b/clone_packages/sf_mcl_packages.txt Wed Feb 03 16:24:25 2010 +0000
@@ -1,4 +1,4 @@
-https://developer.symbian.org/sfl/MCL/sf/adaptation/stubs
+https://developer.symbian.org/oss/MCL/sf/adaptation/stubs
https://developer.symbian.org/sfl/MCL/sf/app/camera
https://developer.symbian.org/sfl/MCL/sf/app/commonemail
https://developer.symbian.org/sfl/MCL/sf/app/conntools
--- a/clone_packages/sftools_mcl_packages.txt Thu Jan 21 16:55:34 2010 +0000
+++ b/clone_packages/sftools_mcl_packages.txt Wed Feb 03 16:24:25 2010 +0000
@@ -18,8 +18,8 @@
https://developer.symbian.org/sfl/MCL/sftools/dev/devicedbgsrvs
https://developer.symbian.org/sfl/MCL/sftools/dev/iss
https://developer.symbian.org/sfl/MCL/sftools/dev/ui
-https://developer.symbian.org/oss/MCL/sftools/depl/docscontent/
-https://developer.symbian.org/oss/MCL/sftools/depl/docstools
+https://developer.symbian.org/oss/MCL/sftools/depl/docscontent
+https://developer.symbian.org/oss/MCL/sftools/depl/doctools
https://developer.symbian.org/oss/MCL/sftools/dev/build
https://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/buildlayout34
https://developer.symbian.org/oss/MCL/sftools/dev/eclipseenv/buildlayout35
--- a/downloadkit/downloadkit.py Thu Jan 21 16:55:34 2010 +0000
+++ b/downloadkit/downloadkit.py Wed Feb 03 16:24:25 2010 +0000
@@ -19,11 +19,15 @@
import sys
import getpass
import re
+import time
from BeautifulSoup import BeautifulSoup
+from optparse import OptionParser
user_agent = 'downloadkit.py script'
headers = { 'User-Agent' : user_agent }
top_level_url = "http://developer.symbian.org"
+download_list = []
+unzip_list = []
username = ''
password = ''
@@ -112,6 +116,36 @@
def run(self):
self.status = self.unzip(self.filename, self.levels, self.deletelevels)
+threadlist = []
+def schedule_unzip(filename, unziplevel, deletelevel):
+ global options
+ if options.nounzip :
+ return
+ if options.dryrun :
+ global unzip_list
+ if unziplevel > 0:
+ unzip_list.append("7z x -y %s" % filename)
+ if unziplevel > 1:
+ unzip_list.append("# unzip recursively %d more times" % unziplevel-1)
+ if deletelevel > 0:
+ unzip_list.append("# delete %s" % filename)
+ if deletelevel > 1:
+ unzip_list.append("# delete zip files recursively %d more times" % deletelevel-1)
+ return
+
+ unzipthread = unzipfile(filename, unziplevel, deletelevel)
+ global threadlist
+ threadlist.append(unzipthread)
+ unzipthread.start()
+
+def complete_outstanding_unzips():
+ global options
+ if options.dryrun or options.nounzip:
+ return
+ print "Waiting for outstanding commands to finish..."
+ for thread in threadlist:
+ thread.join()
+
def orderResults(x,y) :
def ranking(name) :
# 1st = release_metadata, build_BOM.zip (both small things!)
@@ -135,8 +169,70 @@
ytitle = y['title']
return cmp(ranking(xtitle)+cmp(xtitle,ytitle), ranking(ytitle))
-def downloadkit(version):
- headers = { 'User-Agent' : user_agent }
+def download_file(filename,url):
+ global options
+ if options.dryrun :
+ global download_list
+ download_info = "download %s %s" % (filename, url)
+ download_list.append(download_info)
+ return True
+
+ print 'Downloading ' + filename
+ global headers
+ req = urllib2.Request(url, None, headers)
+
+ try:
+ response = urllib2.urlopen(req)
+ CHUNK = 128 * 1024
+ size = 0
+ filesize = -1
+ last_time = time.time()
+ last_size = size
+ fp = open(filename, 'wb')
+ while True:
+ chunk = response.read(CHUNK)
+ if not chunk: break
+ if size == 0 and chunk.find('<div id="sign_in_box">') != -1:
+ # our urllib2 cookies have gone awol - login again
+ login(False)
+ req = urllib2.Request(url, None, headers)
+ response = urllib2.urlopen(req)
+ chunk = response.read(CHUNK)
+ if size == 0:
+ filesize = int(response.info()['Content-Length'])
+ fp.write(chunk)
+ size += len(chunk)
+ now = time.time()
+ if options.progress and now-last_time > 20:
+ rate = (size-last_size)/(now-last_time)
+ estimate = ""
+ if filesize > 0 and rate > 0:
+ remaining_seconds = (filesize-size)/rate
+ if remaining_seconds > 110:
+ remaining = "%d minutes" % (remaining_seconds/60)
+ else:
+ remaining = "%d seconds" % remaining_seconds
+ estimate = "- %d%% est. %s" % ((100*size/filesize), remaining)
+ print "- %d Kb (%d Kb/s) %s" % (size/1024, (rate/1024)+0.5, estimate)
+ last_time = now
+ last_size = size
+ fp.close()
+ if options.progress:
+ now = time.time()
+ print "- Completed %s - %d Kb in %d seconds" % (filename, (filesize/1024)+0.5, now-last_time)
+
+ #handle errors
+ except urllib2.HTTPError, e:
+ print "HTTP Error:",e.code , downloadurl
+ return False
+ except urllib2.URLError, e:
+ print "URL Error:",e.reason , downloadurl
+ return False
+ return True
+
+def downloadkit(version):
+ global headers
+ global options
urlbase = 'http://developer.symbian.org/main/tools_and_kits/downloads/'
viewid = 5 # default to Symbian^3
@@ -167,54 +263,51 @@
for result in results:
downloadurl = urlbase + result['href']
filename = result['title']
- print 'Downloading ' + filename
- req = urllib2.Request(downloadurl, None, headers)
-
- try:
- response = urllib2.urlopen(req)
- CHUNK = 128 * 1024
- first_chunk = True
- fp = open(filename, 'wb')
- while True:
- chunk = response.read(CHUNK)
- if not chunk: break
- if first_chunk and chunk.find('<div id="sign_in_box">') != -1:
- # our urllib2 cookies have gone awol - login again
- login(False)
- req = urllib2.Request(downloadurl, None, headers)
- response = urllib2.urlopen(req)
- chunk = response.read(CHUNK)
- fp.write(chunk)
- first_chunk = False
- fp.close()
- #handle errors
- except urllib2.HTTPError, e:
- print "HTTP Error:",e.code , downloadurl
- except urllib2.URLError, e:
- print "URL Error:",e.reason , downloadurl
+ if options.nosrc and re.match(r"(src_sfl|src_oss)", filename) :
+ continue # no snapshots of Mercurial source thanks...
+
+ if download_file(filename, downloadurl) != True :
+ continue # download failed
# unzip the file (if desired)
+ if re.match(r"patch", filename):
+ complete_outstanding_unzips() # ensure that the thing we are patching is completed first
+
if re.match(r"(bin|tools).*\.zip", filename):
- unzipthread = unzipfile(filename, 1, 0) # unzip once, don't delete
- threadlist.append(unzipthread)
- unzipthread.start()
+ schedule_unzip(filename, 1, 0) # unzip once, don't delete
elif re.match(r"src_.*\.zip", filename):
- unzipthread = unzipfile(filename, 1, 1) # zip of zips, delete top level
- threadlist.append(unzipthread)
- unzipthread.start()
+ schedule_unzip(filename, 1, 1) # zip of zips, delete top level
elif re.match(r"build_BOM.zip", filename):
- unzipthread = unzipfile(filename, 1, 1) # unpack then delete zip as it's not needed again
- threadlist.append(unzipthread)
- unzipthread.start()
+ schedule_unzip(filename, 1, 1) # unpack then delete zip as it's not needed again
# wait for the unzipping threads to complete
- print "Waiting for unzipping to finish..."
- for thread in threadlist:
- thread.join()
+ complete_outstanding_unzips()
return 1
+parser = OptionParser(version="%prog 0.5.1", usage="Usage: %prog [options] version")
+parser.add_option("-n", "--dryrun", action="store_true", dest="dryrun",
+ help="print the files to be downloaded, the 7z commands, and the recommended deletions")
+parser.add_option("--nosrc", action="store_true", dest="nosrc",
+ help="Don't download any of the source code available directly from Mercurial")
+parser.add_option("--nounzip", action="store_true", dest="nounzip",
+ help="Just download, don't unzip or delete any files")
+parser.add_option("--progress", action="store_true", dest="progress",
+ help="Report download progress")
+parser.set_defaults(dryrun=False, nosrc=False, nounzip=False, progress=False)
+
+(options, args) = parser.parse_args()
+if len(args) != 1:
+ parser.error("Must supply a PDK version, e.g. 3.0.e")
login(True)
-downloadkit(sys.argv[1])
+downloadkit(args[0])
+
+if options.dryrun:
+ print "# instructions for downloading kit " + args[0]
+ for download in download_list:
+ print download
+ for command in unzip_list:
+ print command
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/williamr/convert_to_eula.py Wed Feb 03 16:24:25 2010 +0000
@@ -0,0 +1,142 @@
+#!/usr/bin/python
+# Copyright (c) 2009 Symbian Foundation.
+# All rights reserved.
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation - Initial contribution
+#
+# Description:
+# Map the SFL license to the EULA license, keeping a copy of the original file
+# in a parallel tree for creation of a "repair" kit to reinstate the SFL
+
+import os
+import os.path
+import re
+import codecs
+
+oldtext0 = re.compile('terms of the License "Symbian Foundation License v1.0"(to Symbian Foundation)?')
+oldtext1 = re.compile('the URL "http:..www.symbianfoundation.org/legal/sfl-v10.html"')
+
+newtext = [
+ 'terms of the License "Symbian Foundation License v1.0" to Symbian Foundation members and "Symbian Foundation End User License Agreement v1.0" to non-members',
+ 'the URL "http://www.symbianfoundation.org/legal/licencesv10.html"'
+]
+
+errorfiles = []
+multinoticefiles = []
+shadowroot = 'shadow_epoc32'
+
+def file_type(file) :
+ f = open(file, 'r')
+ data = f.read(256)
+ f.close()
+ if len(data) < 2:
+ return None # too short to be worth bothering about anyway
+ if data[0] == chr(255) and data[1] == chr(254) :
+ return 'utf_16_le'
+ if data.find(chr(0)) >= 0 :
+ return None # zero byte implies binary file
+ return 'text'
+
+def map_eula(dir, name, encoded) :
+ global oldtext0
+ global newtext1
+ global newtext
+ file = os.path.join(dir, name)
+ if encoded == 'text':
+ f = open(file, 'r')
+ else:
+ f = codecs.open(file, 'r', encoding=encoded)
+ lines = f.readlines()
+ # print ">> %s encoded as %s" % (file, f.encoding)
+ f.close()
+
+ updated = 0
+ newlines = []
+ while len(lines) > 0:
+ line = lines.pop(0)
+ pos1 = oldtext0.search(line)
+ if pos1 != None:
+ # be careful - oldtext is a prefix of newtext
+ if pos1.group(1) != None:
+ # line already converted - nothing to do
+ newlines.append(line)
+ continue
+ midlines = []
+ midlinecount = 1
+ while len(lines) > 0:
+ nextline = lines.pop(0)
+ if not re.match('^\s$', nextline):
+ # non-blank line
+ if midlinecount == 0:
+ break
+ midlinecount -= 1
+ midlines.append(nextline)
+ urlline = nextline
+ pos2 = oldtext1.search(urlline)
+ if pos2 != None:
+ # found it - assume that there's only one instance
+ newline = oldtext0.sub(newtext[0], line)
+ newurl = oldtext1.sub(newtext[1], urlline)
+ newlines.append(newline)
+ newlines.extend(midlines)
+ newlines.append(newurl)
+ updated += 1
+ continue
+ else:
+ if updated != 0:
+ lineno = 1 + len(newlines)
+ print "Problem in " + file + " at " + lineno + ": incorrectly formatted >"
+ print line
+ print midlines
+ print urlline
+ global errorfiles
+ errorfiles.append(file)
+ break
+ newlines.append(line)
+
+ if updated == 0:
+ print " = no change to " + file
+ return
+
+ if updated > 1:
+ global multinoticefiles
+ multinoticefiles.append(file)
+ print '! found %d SFL notices in %s' % (updated, file)
+
+ global shadowroot
+ shadowdir = os.path.join(shadowroot, dir)
+ if not os.path.exists(shadowdir) :
+ os.makedirs(shadowdir)
+ newfile = os.path.join(shadowroot,file)
+ os.rename(file, newfile)
+ if encoded == 'text':
+ f = open(file, 'w')
+ else:
+ f = codecs.open(file, 'w', encoding=encoded)
+ f.writelines(newlines)
+ f.close()
+ print "* updated %s (encoding %s)" % (file, f.encoding)
+
+# process tree
+
+for root, dirs, files in os.walk('epoc32', topdown=True):
+ if re.compile('epoc32$').match(root) >= 0:
+ if 'build' in dirs:
+ dirs.remove('build') # don't recurse into the epoc32/build subtree
+ for name in files:
+ encoding = file_type(os.path.join(root, name))
+ if encoding:
+ map_eula(root, name, encoding)
+
+print '%d problem files' % len(errorfiles)
+print errorfiles
+
+print '%d files with multiple notices' % len(multinoticefiles)
+print multinoticefiles
+
+