downloadkit/downloadkit.py
changeset 193 f74ba2451a44
parent 174 5ebd70511e4c
child 196 32417d4f42da
equal deleted inserted replaced
192:b20cc3d58393 193:f74ba2451a44
    24 from BeautifulSoup import BeautifulSoup
    24 from BeautifulSoup import BeautifulSoup
    25 from optparse import OptionParser
    25 from optparse import OptionParser
    26 import hashlib
    26 import hashlib
    27 import xml.etree.ElementTree as ET 
    27 import xml.etree.ElementTree as ET 
    28 
    28 
    29 version = '0.11'
    29 version = '0.12'
    30 user_agent = 'downloadkit.py script v' + version
    30 user_agent = 'downloadkit.py script v' + version
    31 headers = { 'User-Agent' : user_agent }
    31 headers = { 'User-Agent' : user_agent }
    32 top_level_url = "http://developer.symbian.org"
    32 top_level_url = "https://developer.symbian.org"
       
    33 passman = urllib2.HTTPPasswordMgrWithDefaultRealm()	# not relevant for live Symbian website
    33 download_list = []
    34 download_list = []
    34 unzip_list = []
    35 unzip_list = []
    35 
    36 
    36 def build_opener(debug=False):
    37 def build_opener(debug=False):
    37     # Create a HTTP and HTTPS handler with the appropriate debug
    38 	# Create a HTTP and HTTPS handler with the appropriate debug
    38     # level.  We intentionally create a new one because the
    39 	# level.  We intentionally create a new one because the
    39     # OpenerDirector class in urllib2 is smart enough to replace
    40 	# OpenerDirector class in urllib2 is smart enough to replace
    40     # its internal versions with ours if we pass them into the
    41 	# its internal versions with ours if we pass them into the
    41     # urllib2.build_opener method.  This is much easier than trying
    42 	# urllib2.build_opener method.  This is much easier than trying
    42     # to introspect into the OpenerDirector to find the existing
    43 	# to introspect into the OpenerDirector to find the existing
    43     # handlers.
    44 	# handlers.
    44     http_handler = urllib2.HTTPHandler(debuglevel=debug)
    45 	http_handler = urllib2.HTTPHandler(debuglevel=debug)
    45     https_handler = urllib2.HTTPSHandler(debuglevel=debug)
    46 	https_handler = urllib2.HTTPSHandler(debuglevel=debug)
    46 
    47 	
    47     # We want to process cookies, but only in memory so just use
    48 	# We want to process cookies, but only in memory so just use
    48     # a basic memory-only cookie jar instance
    49 	# a basic memory-only cookie jar instance
    49     cookie_jar = cookielib.LWPCookieJar()
    50 	cookie_jar = cookielib.LWPCookieJar()
    50     cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar)
    51 	cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar)
    51 
    52 	
    52     handlers = [http_handler, https_handler, cookie_handler]
    53 	# add HTTP authentication password handler (only relevant for Symbian staging server)
    53     opener = urllib2.build_opener(*handlers)
    54 	authhandler = urllib2.HTTPBasicAuthHandler(passman)
    54 
    55 	
    55     # Save the cookie jar with the opener just in case it's needed
    56 	handlers = [authhandler, http_handler, https_handler, cookie_handler]
    56     # later on
    57 	opener = urllib2.build_opener(*handlers)
    57     opener.cookie_jar = cookie_jar
    58 	
    58 
    59 	# Save the cookie jar with the opener just in case it's needed
    59     return opener
    60 	# later on
       
    61 	opener.cookie_jar = cookie_jar
       
    62 
       
    63 	return opener
    60 
    64 
    61 urlopen = urllib2.urlopen
    65 urlopen = urllib2.urlopen
    62 Request = urllib2.Request
    66 Request = urllib2.Request
    63 
    67 
    64 def quick_networking_check():
    68 def quick_networking_check():
    65 	global options
    69 	global options
    66 	defaulttimeout = socket.getdefaulttimeout()
    70 	defaulttimeout = socket.getdefaulttimeout()
    67 	socket.setdefaulttimeout(15)
    71 	socket.setdefaulttimeout(15)
    68 	probesite = 'https://developer.symbian.org'
    72 	probesite = top_level_url
    69 	probeurl = probesite + '/main/user_profile/login.php'
    73 	probeurl = probesite + '/main/user_profile/login.php'
    70 	headers = { 'User-Agent' : user_agent }
    74 	headers = { 'User-Agent' : user_agent }
    71 
    75 
    72 	req = urllib2.Request(probeurl, None, headers)
    76 	req = urllib2.Request(probeurl, None, headers)
       
    77 
       
    78 	try:
       
    79 		response = urllib2.urlopen(req)
       
    80 		doc=response.read()
       
    81 	except urllib2.URLError, e:
       
    82 		if hasattr(e, 'code') and e.code == 401:#
       
    83 			# Needs HTTP basic authentication
       
    84 			print >> sys.stderr, 'HTTP username: ',
       
    85 			http_username=sys.stdin.readline().strip()
       
    86 			http_password=getpass.getpass('HTTP password: ')
       
    87 			passman.add_password(None, top_level_url, http_username, http_password)
       
    88 			# now try again...
    73 
    89 
    74 	try:
    90 	try:
    75 		response = urllib2.urlopen(req)
    91 		response = urllib2.urlopen(req)
    76 		doc=response.read()
    92 		doc=response.read()
    77 	except urllib2.URLError, e:
    93 	except urllib2.URLError, e:
    78 		print '*** Problem accessing ' + probesite
    94 		print '*** Problem accessing ' + probesite
    79 		if hasattr(e, 'reason'):
    95 		if hasattr(e, 'reason'):
    80 			print '*** Reason: ', e.reason
    96 			print '*** Reason: ', e.reason
    81 		elif hasattr(e, 'code'):
    97 		elif hasattr(e, 'code'):
    82 			print '*** Error code: ', e.code
    98 			print '*** Error code: ', e.code
    83 		print "Do you need to use a proxy server to access the developer.symbian.org website?"
    99 		print "Do you need to use a proxy server to access the %s website?" % probesite
    84 		sys.exit(1)
   100 		sys.exit(1)
    85 	socket.setdefaulttimeout(defaulttimeout)	# restore the default timeout
   101 	socket.setdefaulttimeout(defaulttimeout)	# restore the default timeout
    86 	if options.progress:
   102 	if options.progress:
    87 		print "Confirmed that we can access " + probesite
   103 		print "Confirmed that we can access " + probesite
    88 
   104 
    89 def login(prompt):
   105 def login(prompt):
    90 	global options
   106 	global options
    91 	loginurl = 'https://developer.symbian.org/main/user_profile/login.php'
   107 	loginurl =  top_level_url + '/main/user_profile/login.php'
    92 	
   108 	
    93 	if prompt:
   109 	if prompt:
    94 		if options.username == '':
   110 		if options.username == '':
    95 			print >> sys.stderr, 'username: ',
   111 			print >> sys.stderr, 'username: ',
    96 			options.username=sys.stdin.readline().strip()
   112 			options.username=sys.stdin.readline().strip()
   290 				return False
   306 				return False
   291 		info = response.info()
   307 		info = response.info()
   292 		if 'Content-Length' in info:
   308 		if 'Content-Length' in info:
   293 			filesize = int(info['Content-Length'])
   309 			filesize = int(info['Content-Length'])
   294 		else:
   310 		else:
       
   311 			match = re.search('>([^>]+Licen[^<]+)<', chunk, re.IGNORECASE)
       
   312 			if match:
       
   313 				license = match.group(1).replace('&amp;','&')
       
   314 				print "*** %s is subject to the %s which you have not yet accepted\n" % (filename,license)
       
   315 				return False
   295 			print "*** HTTP response did not contain 'Content-Length' when expected"
   316 			print "*** HTTP response did not contain 'Content-Length' when expected"
   296 			print info
   317 			if options.debug:
       
   318 				print info
       
   319 				print chunk
   297 			return False
   320 			return False
   298 
   321 
   299 	except urllib2.URLError, e:
   322 	except urllib2.URLError, e:
   300 		print '- ERROR: Failed to start downloading ' + filename
   323 		print '- ERROR: Failed to start downloading ' + filename
   301 		if hasattr(e, 'reason'):
   324 		if hasattr(e, 'reason'):
   352 	return True
   375 	return True
   353 
   376 
   354 def downloadkit(version):	
   377 def downloadkit(version):	
   355 	global headers
   378 	global headers
   356 	global options
   379 	global options
   357 	urlbase = 'http://developer.symbian.org/main/tools_and_kits/downloads/'
   380 	urlbase = top_level_url + '/main/tools_and_kits/downloads/'
   358 
   381 
   359 	viewid = 5   # default to Symbian^3
   382 	viewid = 5   # default to Symbian^3
   360 	if version[0] == 2:
   383 	if version[0] == 2:
   361 		viewid= 1  # Symbian^2
   384 		viewid= 1  # Symbian^2
   362 	if version[0] == 3:
   385 	if version[0] == 3:
   372 		bodystart=doc.find('<body>')
   395 		bodystart=doc.find('<body>')
   373 		doc = doc[bodystart:]
   396 		doc = doc[bodystart:]
   374 	except:
   397 	except:
   375 		pass
   398 		pass
   376 
   399 
   377 	threadlist = []
   400 	if options.debug:
       
   401 		f = open("downloadpage.html","w")
       
   402 		print >>f, doc 
       
   403 		f.close()
       
   404 
       
   405 	soup=BeautifulSoup(doc)
       
   406 
       
   407 	# check that this is the right version
       
   408 	match = re.search('Platform Release v(\d\.\d\.[0-9a-z]+)', doc, re.IGNORECASE)
       
   409 	if match.group(1) != version:
       
   410 		print "*** ERROR: version %s is not available" % version
       
   411 		print "*** the website is offering version %s instead" % match.group(1)
       
   412 		return 0
       
   413 		
   378 	# let's hope the HTML format never changes...
   414 	# let's hope the HTML format never changes...
   379 	# <a href='download.php?id=27&cid=60&iid=270' title='src_oss_mw.zip'> ...</a> 
   415 	# <a href='download.php?id=27&cid=60&iid=270' title='src_oss_mw.zip'> ...</a> 
   380 
   416 	threadlist = []
   381 	soup=BeautifulSoup(doc)
       
   382 	results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$"))
   417 	results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$"))
   383 	results.sort(orderResults)
   418 	results.sort(orderResults)
   384 	for result in results:
   419 	for result in results:
   385 		downloadurl = urlbase + result['href']
   420 		downloadurl = urlbase + result['href']
   386 		filename = result['title']
   421 		filename = result['title']
   428 	help="login to website as USER")
   463 	help="login to website as USER")
   429 parser.add_option("-p", "--password", dest="password", metavar="PWD",
   464 parser.add_option("-p", "--password", dest="password", metavar="PWD",
   430 	help="specify the account password")
   465 	help="specify the account password")
   431 parser.add_option("--debug", action="store_true", dest="debug", 
   466 parser.add_option("--debug", action="store_true", dest="debug", 
   432 	help="debug HTML traffic (not recommended!)")
   467 	help="debug HTML traffic (not recommended!)")
       
   468 parser.add_option("--webhost", dest="webhost", metavar="SITE",
       
   469 	help="use alternative website (for testing!)")
   433 parser.set_defaults(
   470 parser.set_defaults(
   434 	dryrun=False, 
   471 	dryrun=False, 
   435 	nosrc=False, 
   472 	nosrc=False, 
   436 	nowinscw=False, 
   473 	nowinscw=False, 
   437 	nounzip=False, 
   474 	nounzip=False, 
   438 	nodelete=False, 
   475 	nodelete=False, 
   439 	progress=False,
   476 	progress=False,
   440 	username='',
   477 	username='',
   441 	password='',
   478 	password='',
       
   479 	webhost = 'developer.symbian.org',
   442 	debug=False
   480 	debug=False
   443 	)
   481 	)
   444 
   482 
   445 (options, args) = parser.parse_args()
   483 (options, args) = parser.parse_args()
   446 if len(args) != 1:
   484 if len(args) != 1:
   447 	parser.error("Must supply a PDK version, e.g. 3.0.f")
   485 	parser.error("Must supply a PDK version, e.g. 3.0.f")
   448 if not check_unzip_environment() :
   486 if not check_unzip_environment() :
   449 	parser.error("Unable to execute 7z command")
   487 	parser.error("Unable to execute 7z command")
   450 
   488 
       
   489 top_level_url = "https://" + options.webhost
   451 opener = build_opener(options.debug)
   490 opener = build_opener(options.debug)
   452 urllib2.install_opener(opener)
   491 urllib2.install_opener(opener)
   453 
   492 
   454 quick_networking_check()
   493 quick_networking_check()
   455 login(True)
   494 login(True)