downloadkit/downloadkit.py
changeset 139 7b2d146ef884
parent 129 249ca6c587b6
child 140 9baccbcc5509
equal deleted inserted replaced
138:b9165bfa078f 139:7b2d146ef884
    18 import cookielib
    18 import cookielib
    19 import sys
    19 import sys
    20 import getpass
    20 import getpass
    21 import re
    21 import re
    22 from BeautifulSoup import BeautifulSoup
    22 from BeautifulSoup import BeautifulSoup
       
    23 from optparse import OptionParser
    23 
    24 
    24 user_agent = 'downloadkit.py script'
    25 user_agent = 'downloadkit.py script'
    25 headers = { 'User-Agent' : user_agent }
    26 headers = { 'User-Agent' : user_agent }
    26 top_level_url = "http://developer.symbian.org"
    27 top_level_url = "http://developer.symbian.org"
       
    28 download_list = []
       
    29 unzip_list = []
    27 
    30 
    28 username = ''
    31 username = ''
    29 password = ''
    32 password = ''
    30 
    33 
    31 COOKIEFILE = 'cookies.lwp'
    34 COOKIEFILE = 'cookies.lwp'
   110 				self.unzip(subzip, unziplevel-1, deletelevel-1)
   113 				self.unzip(subzip, unziplevel-1, deletelevel-1)
   111 		return topstatus
   114 		return topstatus
   112 	def run(self):
   115 	def run(self):
   113 		self.status = self.unzip(self.filename, self.levels, self.deletelevels)
   116 		self.status = self.unzip(self.filename, self.levels, self.deletelevels)
   114 
   117 
       
   118 threadlist = []
       
   119 def schedule_unzip(filename, unziplevel, deletelevel):
       
   120 	global options
       
   121 	if options.dryrun :
       
   122 		global unzip_list
       
   123 		if unziplevel > 0:
       
   124 			unzip_list.append("7z x -y %s" % filename)
       
   125 			if unziplevel > 1:
       
   126 				unzip_list.append("# unzip recursively %d more times" % unziplevel-1)
       
   127 		if deletelevel > 0:
       
   128 			unzip_list.append("# delete %s" % filename)
       
   129 			if deletelevel > 1:
       
   130 				unzip_list.append("# delete zip files recursively %d more times" % deletelevel-1)
       
   131 		return
       
   132 		
       
   133 	unzipthread = unzipfile(filename, unziplevels, deletelevels)
       
   134 	global threadlist
       
   135 	threadlist.append(unzipthread)
       
   136 	unzipthread.start()
       
   137 
       
   138 def complete_outstanding_unzips():
       
   139 	global options
       
   140 	if options.dryrun:
       
   141 		return
       
   142 	print "Waiting for outstanding commands to finish..."
       
   143 	for thread in threadlist:
       
   144 		thread.join()  
       
   145 	
   115 def orderResults(x,y) :
   146 def orderResults(x,y) :
   116 	def ranking(name) :
   147 	def ranking(name) :
   117 		# 1st = release_metadata, build_BOM.zip (both small things!)
   148 		# 1st = release_metadata, build_BOM.zip (both small things!)
   118 		if re.match(r"(build_BOM|release_metadata)", name):
   149 		if re.match(r"(build_BOM|release_metadata)", name):
   119 			return 1000;
   150 			return 1000;
   133 		return 10000;
   164 		return 10000;
   134 	xtitle = x['title']
   165 	xtitle = x['title']
   135 	ytitle = y['title']
   166 	ytitle = y['title']
   136 	return cmp(ranking(xtitle)+cmp(xtitle,ytitle), ranking(ytitle))
   167 	return cmp(ranking(xtitle)+cmp(xtitle,ytitle), ranking(ytitle))
   137 
   168 
   138 def downloadkit(version):
   169 def download_file(filename,url):
   139 	headers = { 'User-Agent' : user_agent }
   170 	global options
       
   171 	if options.dryrun :
       
   172 		global download_list
       
   173 		download_info = "download %s %s" % (filename, url)
       
   174 		download_list.append(download_info)
       
   175 		return True
       
   176 	
       
   177 	print 'Downloading ' + filename
       
   178 	global headers
       
   179 	req = urllib2.Request(url, None, headers)
       
   180 	
       
   181 	try:
       
   182 		response = urllib2.urlopen(req)
       
   183 		CHUNK = 128 * 1024
       
   184 		first_chunk = True
       
   185 		fp = open(filename, 'wb')
       
   186 		while True:
       
   187 			chunk = response.read(CHUNK)
       
   188 			if not chunk: break
       
   189 			if first_chunk and chunk.find('<div id="sign_in_box">') != -1:
       
   190 				# our urllib2 cookies have gone awol - login again
       
   191 				login(False)
       
   192 				req = urllib2.Request(url, None, headers)
       
   193 				response = urllib2.urlopen(req)
       
   194 				chunk = response.read(CHUNK)	  
       
   195 			fp.write(chunk)
       
   196 			first_chunk = False
       
   197 		fp.close()
       
   198 
       
   199 	#handle errors
       
   200 	except urllib2.HTTPError, e:
       
   201 		print "HTTP Error:",e.code , downloadurl
       
   202 		return False
       
   203 	except urllib2.URLError, e:
       
   204 		print "URL Error:",e.reason , downloadurl
       
   205 		return False
       
   206 	return True
       
   207 
       
   208 def downloadkit(version):	
       
   209 	global headers
       
   210 	global options
   140 	urlbase = 'http://developer.symbian.org/main/tools_and_kits/downloads/'
   211 	urlbase = 'http://developer.symbian.org/main/tools_and_kits/downloads/'
   141 
   212 
   142 	viewid = 5   # default to Symbian^3
   213 	viewid = 5   # default to Symbian^3
   143 	if version[0] == 2:
   214 	if version[0] == 2:
   144 		viewid= 1  # Symbian^2
   215 		viewid= 1  # Symbian^2
   165 	results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$"))
   236 	results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$"))
   166 	results.sort(orderResults)
   237 	results.sort(orderResults)
   167 	for result in results:
   238 	for result in results:
   168 		downloadurl = urlbase + result['href']
   239 		downloadurl = urlbase + result['href']
   169 		filename = result['title']
   240 		filename = result['title']
   170 		print 'Downloading ' + filename
   241 
   171 		req = urllib2.Request(downloadurl, None, headers)
   242 		if options.nosrc and re.match(r"(src_sfl|src_oss)", filename) :
   172 		
   243 			continue 	# no snapshots of Mercurial source thanks...
   173 		try:
   244 
   174 			response = urllib2.urlopen(req)
   245 		if download_file(filename, downloadurl) != True :
   175 			CHUNK = 128 * 1024
   246 			continue # download failed
   176 			first_chunk = True
       
   177 			fp = open(filename, 'wb')
       
   178 			while True:
       
   179 				chunk = response.read(CHUNK)
       
   180 				if not chunk: break
       
   181 				if first_chunk and chunk.find('<div id="sign_in_box">') != -1:
       
   182 					# our urllib2 cookies have gone awol - login again
       
   183 					login(False)
       
   184 					req = urllib2.Request(downloadurl, None, headers)
       
   185 					response = urllib2.urlopen(req)
       
   186 					chunk = response.read(CHUNK)	  
       
   187 				fp.write(chunk)
       
   188 				first_chunk = False
       
   189 			fp.close()
       
   190 
       
   191 		#handle errors
       
   192 		except urllib2.HTTPError, e:
       
   193 			print "HTTP Error:",e.code , downloadurl
       
   194 		except urllib2.URLError, e:
       
   195 			print "URL Error:",e.reason , downloadurl
       
   196 
   247 
   197 		# unzip the file (if desired)
   248 		# unzip the file (if desired)
       
   249 		if re.match(r"patch", filename):
       
   250 			complete_outstanding_unzips()	# ensure that the thing we are patching is completed first
       
   251 			
   198 		if re.match(r"(bin|tools).*\.zip", filename):
   252 		if re.match(r"(bin|tools).*\.zip", filename):
   199 			unzipthread = unzipfile(filename, 1, 0)   # unzip once, don't delete
   253 			schedule_unzip(filename, 1, 0)   # unzip once, don't delete
   200 			threadlist.append(unzipthread)
       
   201 			unzipthread.start()
       
   202 		elif re.match(r"src_.*\.zip", filename):
   254 		elif re.match(r"src_.*\.zip", filename):
   203 			unzipthread = unzipfile(filename, 1, 1)   # zip of zips, delete top level
   255 			schedule_unzip(filename, 1, 1)   # zip of zips, delete top level
   204 			threadlist.append(unzipthread)
       
   205 			unzipthread.start()
       
   206 		elif re.match(r"build_BOM.zip", filename):
   256 		elif re.match(r"build_BOM.zip", filename):
   207 			unzipthread = unzipfile(filename, 1, 1)   # unpack then delete zip as it's not needed again
   257 			schedule_unzip(filename, 1, 1)   # unpack then delete zip as it's not needed again
   208 			threadlist.append(unzipthread)
       
   209 			unzipthread.start()
       
   210 
   258 
   211 	# wait for the unzipping threads to complete
   259 	# wait for the unzipping threads to complete
   212 	print "Waiting for unzipping to finish..."
   260 	complete_outstanding_unzips()  
   213 	for thread in threadlist:
       
   214 		thread.join()  
       
   215 
   261 
   216 	return 1
   262 	return 1
   217 
   263 
       
   264 parser = OptionParser(usage="Usage: %prog [options] version", version="%prog 0.3")
       
   265 parser.add_option("-n", "--dryrun", action="store_true", dest="dryrun",
       
   266 	help="print the files to be downloaded, the 7z commands, and the recommended deletions")
       
   267 parser.add_option("--nosrc", action="store_true", dest="nosrc",
       
   268 	help="Don't download any of the source code available directly from Mercurial")
       
   269 parser.set_defaults(dryrun=False, nosrc=False)
       
   270 
       
   271 (options, args) = parser.parse_args()
       
   272 if len(args) != 1:
       
   273 	parser.error("Must supply a PDK version, e.g. 3.0.e")
   218 
   274 
   219 login(True)
   275 login(True)
   220 downloadkit(sys.argv[1])
   276 downloadkit(args[0])
       
   277 
       
   278 if options.dryrun:
       
   279 	print "# instructions for downloading kit " + args[0]
       
   280 	for download in download_list:
       
   281 		print download
       
   282 	for command in unzip_list:
       
   283 		print command
       
   284