downloadkit/downloadkit.py
changeset 280 150026b6d3e6
parent 279 73890f073898
child 299 c0fb460d1a21
equal deleted inserted replaced
279:73890f073898 280:150026b6d3e6
    24 from BeautifulSoup import BeautifulSoup
    24 from BeautifulSoup import BeautifulSoup
    25 from optparse import OptionParser
    25 from optparse import OptionParser
    26 import hashlib
    26 import hashlib
    27 import xml.etree.ElementTree as ET 
    27 import xml.etree.ElementTree as ET 
    28 
    28 
    29 version = '0.19'
    29 version = '0.20'
    30 user_agent = 'downloadkit.py script v' + version
    30 user_agent = 'downloadkit.py script v' + version
    31 headers = { 'User-Agent' : user_agent }
    31 headers = { 'User-Agent' : user_agent }
    32 top_level_url = "https://developer.symbian.org"
    32 top_level_url = "https://developer-secure.symbian.org"
    33 passman = urllib2.HTTPPasswordMgrWithDefaultRealm()	# not relevant for live Symbian website
    33 passman = urllib2.HTTPPasswordMgrWithDefaultRealm()	# not relevant for live Symbian website
    34 download_list = []
    34 download_list = []
    35 failure_list = []
    35 failure_list = []
    36 unzip_list = []
    36 unzip_list = []
    37 
    37 
   432 
   432 
   433 def report_to_symbian(version, what):
   433 def report_to_symbian(version, what):
   434 	global options
   434 	global options
   435 	if not options.publicity:
   435 	if not options.publicity:
   436 		return
   436 		return
   437 	reporting_url = "http://developer.symbian.org/downloadkit_report/%s/%s/args=" % (version, what)
   437 	reporting_url = "http://developer-secure.symbian.org/downloadkit_report/%s/%s/args=" % (version, what)
   438 	if options.dryrun:
   438 	if options.dryrun:
   439 		reporting_url += "+dryrun" 
   439 		reporting_url += "+dryrun" 
   440 	if options.nosrc:
   440 	if options.nosrc:
   441 		reporting_url += "+nosrc" 
   441 		reporting_url += "+nosrc" 
   442 	if options.nowinscw:
   442 	if options.nowinscw:
   480 
   480 
   481 	req = urllib2.Request(url, None, headers)
   481 	req = urllib2.Request(url, None, headers)
   482 	response = urllib2.urlopen(req)
   482 	response = urllib2.urlopen(req)
   483 	doc=response.read()
   483 	doc=response.read()
   484 	
   484 	
       
   485 	if options.debug:
       
   486 		f = open("downloadpage.html","w")
       
   487 		print >>f, doc 
       
   488 		f.close()
       
   489 
   485 	# BeatifulSoup chokes on some javascript, so we cut away everything before the <body>
   490 	# BeatifulSoup chokes on some javascript, so we cut away everything before the <body>
   486 	try:
   491 	try:
   487 		bodystart=doc.find('<body>')
   492 		bodystart=doc.find('<body>')
   488 		doc = doc[bodystart:]
   493 		doc = doc[bodystart:]
   489 	except:
   494 	except:
   490 		pass
   495 		pass
   491 
       
   492 	if options.debug:
       
   493 		f = open("downloadpage.html","w")
       
   494 		print >>f, doc 
       
   495 		f.close()
       
   496 
   496 
   497 	soup=BeautifulSoup(doc)
   497 	soup=BeautifulSoup(doc)
   498 
   498 
   499 	# check that this is the right version
   499 	# check that this is the right version
   500 	match = re.search(' v(\S+)</h2>', doc, re.IGNORECASE)
   500 	match = re.search(' v(\S+)</h2>', doc, re.IGNORECASE)
   593 	nounzip=False, 
   593 	nounzip=False, 
   594 	nodelete=False, 
   594 	nodelete=False, 
   595 	progress=False,
   595 	progress=False,
   596 	username='',
   596 	username='',
   597 	password='',
   597 	password='',
   598 	webhost = 'developer.symbian.org',
   598 	webhost = 'developer-secure.symbian.org',
   599 	resume=True,
   599 	resume=True,
   600 	publicity=True,
   600 	publicity=True,
   601 	debug=False
   601 	debug=False
   602 	)
   602 	)
   603 
   603