24 from BeautifulSoup import BeautifulSoup |
24 from BeautifulSoup import BeautifulSoup |
25 from optparse import OptionParser |
25 from optparse import OptionParser |
26 import hashlib |
26 import hashlib |
27 import xml.etree.ElementTree as ET |
27 import xml.etree.ElementTree as ET |
28 |
28 |
29 version = '0.18' |
29 version = '0.19' |
30 user_agent = 'downloadkit.py script v' + version |
30 user_agent = 'downloadkit.py script v' + version |
31 headers = { 'User-Agent' : user_agent } |
31 headers = { 'User-Agent' : user_agent } |
32 top_level_url = "https://developer.symbian.org" |
32 top_level_url = "https://developer.symbian.org" |
33 passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # not relevant for live Symbian website |
33 passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # not relevant for live Symbian website |
34 download_list = [] |
34 download_list = [] |
428 print '- ERROR: %s checksum does not match' % filename |
428 print '- ERROR: %s checksum does not match' % filename |
429 return False |
429 return False |
430 |
430 |
431 return True |
431 return True |
432 |
432 |
|
433 def report_to_symbian(version, what): |
|
434 global options |
|
435 if not options.publicity: |
|
436 return |
|
437 reporting_url = "http://developer.symbian.org/downloadkit_report/%s/%s/args=" % (version, what) |
|
438 if options.dryrun: |
|
439 reporting_url += "+dryrun" |
|
440 if options.nosrc: |
|
441 reporting_url += "+nosrc" |
|
442 if options.nowinscw: |
|
443 reporting_url += "+nowinscw" |
|
444 if options.noarmv5: |
|
445 reporting_url += "+noarmv5" |
|
446 if options.nounzip: |
|
447 reporting_url += "+nounzip" |
|
448 if options.nodelete: |
|
449 reporting_url += "+nodelete" |
|
450 if options.progress: |
|
451 reporting_url += "+progress" |
|
452 if options.resume: |
|
453 reporting_url += "+resume" |
|
454 if options.debug: |
|
455 reporting_url += "+debug" |
|
456 req = urllib2.Request(reporting_url, None, headers) |
|
457 try: |
|
458 urllib2.urlopen(req) # ignore the response, which will always be 404 |
|
459 except urllib2.URLError, e: |
|
460 return |
|
461 |
433 def downloadkit(version): |
462 def downloadkit(version): |
434 global headers |
463 global headers |
435 global options |
464 global options |
436 global failure_list |
465 global failure_list |
437 urlbase = top_level_url + '/main/tools_and_kits/downloads/' |
466 urlbase = top_level_url + '/main/tools_and_kits/downloads/' |
438 |
467 |
439 viewid = 5 # default to Symbian^3 |
468 viewid = 5 # default to Symbian^3 |
440 if version[0] == '2': |
469 if version[0] == '2': |
441 viewid= 1 # Symbian^2 |
470 viewid = 1 # Symbian^2 |
442 if version[0] == '3': |
471 if version[0] == '3': |
443 viewid= 5 # Symbian^3 |
472 viewid = 5 # Symbian^3 |
|
473 if version.startswith('lucky'): |
|
474 viewid = 12 # Do you feel lucky? |
|
475 version = version[5:] |
444 url = urlbase + ('view.php?id=%d'% viewid) |
476 url = urlbase + ('view.php?id=%d'% viewid) |
445 if len(version) > 1: |
477 if len(version) > 1: |
446 # single character version means "give me the latest" |
478 # single character version means "give me the latest" |
447 url = url + '&vId=' + version |
479 url = url + '&vId=' + version |
448 |
480 |
463 f.close() |
495 f.close() |
464 |
496 |
465 soup=BeautifulSoup(doc) |
497 soup=BeautifulSoup(doc) |
466 |
498 |
467 # check that this is the right version |
499 # check that this is the right version |
468 match = re.search('Platform Release (\(Public\) )?v(\d\.\d\.[0-9a-z]+)', doc, re.IGNORECASE) |
500 match = re.search(' v(\S+)</h2>', doc, re.IGNORECASE) |
469 if not match: |
501 if not match: |
470 print "*** ERROR: no version information in the download page" |
502 print "*** ERROR: no version information in the download page" |
471 return 0 |
503 return 0 |
472 |
504 |
473 if len(version) > 1: |
505 if len(version) > 1: |
474 if match.group(2) != version: |
506 if match.group(1) != version: |
475 print "*** ERROR: version %s is not available" % version |
507 print "*** ERROR: version %s is not available" % version |
476 print "*** the website is offering version %s instead" % match.group(2) |
508 print "*** the website is offering version %s instead" % match.group(1) |
477 return 0 |
509 return 0 |
478 else: |
510 else: |
479 print "The latest version of Symbian^%s is PDK %s" % (version, match.group(2)) |
511 print "The latest version of Symbian^%s is PDK %s" % (version, match.group(1)) |
480 |
512 |
481 # let's hope the HTML format never changes... |
513 # let's hope the HTML format never changes... |
482 # <a href='download.php?id=27&cid=60&iid=270' title='src_oss_mw.zip'> ...</a> |
514 # <a href='download.php?id=27&cid=60&iid=270' title='src_oss_mw.zip'> ...</a> |
483 threadlist = [] |
515 threadlist = [] |
484 results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$")) |
516 results=soup.findAll('a', href=re.compile("^download"), title=re.compile("\.(zip|xml)$")) |
509 schedule_unzip(filename, 1, 0) # unzip once, don't delete |
541 schedule_unzip(filename, 1, 0) # unzip once, don't delete |
510 elif re.match(r"src_.*\.zip", filename): |
542 elif re.match(r"src_.*\.zip", filename): |
511 schedule_unzip(filename, 1, 1) # zip of zips, delete top level |
543 schedule_unzip(filename, 1, 1) # zip of zips, delete top level |
512 elif re.match(r"build_BOM.zip", filename): |
544 elif re.match(r"build_BOM.zip", filename): |
513 schedule_unzip(filename, 1, 1) # unpack then delete zip as it's not needed again |
545 schedule_unzip(filename, 1, 1) # unpack then delete zip as it's not needed again |
|
546 |
|
547 report_to_symbian(version, "downfailures_%d" % len(failure_list)) |
514 |
548 |
515 # wait for the unzipping threads to complete |
549 # wait for the unzipping threads to complete |
516 complete_outstanding_unzips() |
550 complete_outstanding_unzips() |
517 |
551 |
518 if len(failure_list) > 0: |
552 if len(failure_list) > 0: |
539 help="Do not delete files after unzipping") |
573 help="Do not delete files after unzipping") |
540 parser.add_option("--progress", action="store_true", dest="progress", |
574 parser.add_option("--progress", action="store_true", dest="progress", |
541 help="Report download progress") |
575 help="Report download progress") |
542 parser.add_option("-u", "--username", dest="username", metavar="USER", |
576 parser.add_option("-u", "--username", dest="username", metavar="USER", |
543 help="login to website as USER") |
577 help="login to website as USER") |
544 parser.add_option("-p", "--password", dest="password", metavar="PWD", |
578 parser.add_option("--password", dest="password", metavar="PWD", |
545 help="specify the account password") |
579 help="specify the account password") |
546 parser.add_option("--debug", action="store_true", dest="debug", |
580 parser.add_option("--debug", action="store_true", dest="debug", |
547 help="debug HTML traffic (not recommended!)") |
581 help="debug HTML traffic (not recommended!)") |
548 parser.add_option("--webhost", dest="webhost", metavar="SITE", |
582 parser.add_option("--webhost", dest="webhost", metavar="SITE", |
549 help="use alternative website (for testing!)") |
583 help="use alternative website (for testing!)") |
550 parser.add_option("--noresume", action="store_false", dest="resume", |
584 parser.add_option("--noresume", action="store_false", dest="resume", |
551 help="Do not attempt to continue a previous failed transfer") |
585 help="Do not attempt to continue a previous failed transfer") |
|
586 parser.add_option("--nopublicity", action="store_false", dest="publicity", |
|
587 help="Do not tell Symbian how I am using downloadkit") |
552 parser.set_defaults( |
588 parser.set_defaults( |
553 dryrun=False, |
589 dryrun=False, |
554 nosrc=False, |
590 nosrc=False, |
555 nowinscw=False, |
591 nowinscw=False, |
556 noarmv5=False, |
592 noarmv5=False, |
572 |
609 |
573 top_level_url = "https://" + options.webhost |
610 top_level_url = "https://" + options.webhost |
574 opener = build_opener(options.debug) |
611 opener = build_opener(options.debug) |
575 urllib2.install_opener(opener) |
612 urllib2.install_opener(opener) |
576 |
613 |
|
614 report_to_symbian(args[0], "what") |
577 quick_networking_check() |
615 quick_networking_check() |
578 login(True) |
616 login(True) |
579 downloadkit(args[0]) |
617 success = downloadkit(args[0]) |
580 |
618 if success: |
|
619 report_to_symbian(args[0], "success") |
|
620 else: |
|
621 report_to_symbian(args[0], "failed") |
|
622 |
581 if options.dryrun: |
623 if options.dryrun: |
582 print "# instructions for downloading kit " + args[0] |
624 print "# instructions for downloading kit " + args[0] |
583 for download in download_list: |
625 for download in download_list: |
584 print download |
626 print download |
585 for command in unzip_list: |
627 for command in unzip_list: |