# HG changeset patch # User wbernard # Date 1261589347 -7200 # Node ID d8ac696cc51f338efd96d0e61810cee490334bc0 # Parent be27ed110b50fb12348ad055d037632be8dcdf2d helium_7.0-r14027 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/build.xml --- a/buildframework/helium/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -24,15 +24,14 @@ Helium targets to build helium itself. - + - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/diamonds_config.xml.ftl --- a/buildframework/helium/config/diamonds_config.xml.ftl Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/diamonds_config.xml.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -23,7 +23,7 @@ - + @@ -47,12 +47,8 @@ - <#if (ant?keys?seq_contains('metadata.enable'))> - - <#else> - - + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/helium_data_model.xml --- a/buildframework/helium/config/helium_data_model.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/helium_data_model.xml Wed Dec 23 19:29:07 2009 +0200 @@ -46,25 +46,25 @@ ats.testrun.name recommended string - Modify the test-run name if you have understanding of test.xml file or leave it as it is. Deafault value is a string consist of build id, product name, major and minor versions + Modify the test-run name if you have understanding of test.xml file or leave it as it is. Default value is a string consist of build id, product name, major and minor versions ats.plan.name recommended string - Modify the plan name if you have understanding of test.xml file or leave it as it is. Deafault value is "plan" + Modify the plan name if you have understanding of test.xml file or leave it as it is. Default value is "plan" ats.trace.enabled recommended string - Should be "True" if tracing is needed during the tests running on ATS3. Deafault value is "False", the values are case-sensitive. + Should be "True" if tracing is needed during the tests running on ATS3. Default value is "False", the values are case-sensitive. ats.ctc.enabled recommended string - Should be "True" if coverage measurement and dynamic analysis (CTC) tool support is to be used by ATS. Deafault value is "False", the values are case-sensitive. + Should be "True" if coverage measurement and dynamic analysis (CTC) tool support is to be used by ATS. Default value is "False", the values are case-sensitive. ats.email.list @@ -152,6 +152,12 @@ Path for creation of the drop file during the build. Default is ATS3AsteDrop.zip + ats.drops.count + discouraged + string + ATSDrop files counter. Value is set to have total number of ATSDrops. + + ats.product.name must string @@ -176,14 +182,26 @@ ats.aste.testrun.name recommended string - Modify the test-run name if you have understanding of test.xml file or leave it as it is. Deafault value is a string consist of build id, product name, major and minor versions + Modify the test-run name if you have understanding of test.xml file or leave it as it is. Default value is a string consist of build id, product name, major and minor versions ats.aste.plan.name recommended string - Modify the plan name if you have understanding of test.xml file or leave it as it is. Deafault value is "plan" - + Modify the plan name if you have understanding of test.xml file or leave it as it is. Default value is "plan" + + + ats.evalid.pre + never + string + Temp dir + + + ats.evalid.post + never + string + Temp dir + disable.analysis.tool allowed @@ -311,6 +329,54 @@ This is where the build logs are stored. + archive.log.dir + discouraged + string + This is where the archive logs are stored. + + + compile.log.dir + discouraged + string + This is where the compile logs are stored. + + + post.log.dir + discouraged + string + This is where the post build logs are stored. + + + prep.log.dir + discouraged + string + This is where the preparation build logs are stored. + + + test.log.dir + discouraged + string + This is where the ats test logs are stored. + + + localisation.log.dir + discouraged + string + This is where the localisation logs are stored. + + + roms.log.dir + discouraged + string + This is where the roms logs are stored. + + + diamonds.build.output.dir + discouraged + string + This is where the diamonds output XML files are stored. + + build.signal.status.dir discouraged string @@ -377,6 +443,12 @@ Defined if build.system is set to "ebs", which is the default. + schema.new + allowed + boolean + enabling / disabling the new schema (schema 3.0). + + build.system.sbs never string @@ -686,6 +758,12 @@ string List of targets that should be logged in to Diamonds. + + exceptions.target + never + string + target to be executed in case of exceptions (mainly to generate the summary and raise signal). + documentation.url.root @@ -714,6 +792,12 @@ Ensures the cleanup-all target is called when the build finishes. + skip.sbs.parser.exception + allowed + string + to skip the exception and continue the build during log parsing for raptor + + sbs.config allowed string @@ -799,6 +883,12 @@ Location of sysdef2make.pl file, which is used to translate system definition files to make files. Internal property that normally does not require changes. + ec.sysdef2make.cmdline + allowed + string + Extra command line parameters for the sysdef2make call. + + ec.tools.dir discouraged string @@ -1020,6 +1110,13 @@ string Contains Helium version information + + helium.revision + never + string + Contains Helium version information + + @@ -1198,6 +1295,12 @@ Is a previous builds md5 being passed manually + old.bom.log.publish.dir + allowed + string + Publish dir location of the old bom log. + + metadata.db.file never string @@ -1252,12 +1355,6 @@ If defined the qmake stage will be enabled during compilation. - qt.dir - recommended - string - The location of the Qt framework to build. - - qt.qmake.ant.template allowed string @@ -1283,12 +1380,6 @@ then those arguments are discarded. - qt.configure.log.name - never - string - The name of the Qt configure output log. - - absolute.prep.root.dir never string @@ -1769,6 +1860,12 @@ Configuration file for SIS-file generation. + sis.config.name + allowed + string + Common Configuration config name. Override to select a specific config group. + + sms.number never string @@ -2216,6 +2313,193 @@ string Location of build metadata database. + + ant.loglevel + allowed + string + Level of ant logging. + + + last.major.release.database.url + never + string + URL for the database.xml of last major release + + + ivy.xml.file + never + string + Ivy configuration file + + + dependency.grph + never + string + Location of dependency.grph + + + python.dir + allowed + string + Location of python library + + + python.internal.dir + allowed + string + Location of nokia internal python library + + + subcon + allowed + string + subcon + + + dependency.external.grph + never + string + Location of dependencies_external.grph + + + build.logs.list + never + string + Contains comma separted list of build logs + + + binary.sizes.rom.logs.list + never + string + List of binary sizes rom logs + + + binary.sizes.output.file + never + string + File containing data related to flash image size + + + run.ccm + never + boolean + Enable CCM operations + + + old.bom.log + discouraged + string + Location of last usable XML BOM log file for creating a BOM delta. + + + symbian.version.week + allowed + string + Symbian release week + + + symbian.version + allowed + string + Symbian release version + + + s60.release + allowed + string + s60.release + + + symbian.version.year + allowed + string + Symbian release year + + + s60.version + allowed + string + s60.version + + + bom.log + never + string + Location of XML BOM log + + + build.number + allowed + integer + Build number + + + ado.quality.dirs.path + never + string + List of ado quality dirs + + + s60.getenv.path + never + string + Path referring to latest s60 release + + + ido.variant + allowed + string + Location of ido variant + + + overlay.files.path + never + string + List of overlay files to scan overlay structure and create a report + + + project.spec.name + allowed + string + project.spec.name + + + email.from + allowed + string + From email adress + + + diamonds.log.file + allowed + string + Location of diamonds log + + + md5.current.build + never + string + Location of current build md5 file + + + ccm.toplevel.project + never + string + Toplevel CCM project + + + project.startswith + allowed + string + Starting letters of project used to find complete CM name + + + release.notes.temp + never + string + Temp release notes + + Environment @@ -2234,6 +2518,11 @@ Preparing the build area. prep.config.file prep.delivery.file + old.bom.log.publish.dir + ado.quality.dirs.path + s60.getenv.path + ido.variant + overlay.files.path localFreeSpace @@ -2255,11 +2544,15 @@ build.system build.system.ebs ido.build.filter + build.logs.list + binary.sizes.rom.logs.list + binary.sizes.output.file sisFiles Creating SIS files. sis.config.file + sis.config.name rombuild @@ -2303,6 +2596,8 @@ publish.fileset.ids publish.release.subdir publish.release.dir + email.from + md5.current.build mobilecrash @@ -2325,6 +2620,9 @@ releasenotes.temp.props releasenotes.temp.props2 releasenotes.template + ccm.toplevel.project + project.startswith + release.notes.temp DeltaZip @@ -2366,8 +2664,10 @@ ec.history.option ec.scripts.dir ec.sysdef2make + ec.sysdef2make.cmdline ec.tools.dir ec.build.class + schema.new sbs.implicit.tools.build sbs.tools.config ec.mem.limit @@ -2423,6 +2723,7 @@ diamonds.build.id diamonds.build.url diamonds.target.list.to.log + diamonds.log.file Localisation @@ -2460,6 +2761,7 @@ ats.product.hwid ats.drop.file ats.aste.drop.file + ats.drops.count ats.aste.email.list ats.product.name ats.script.type @@ -2475,6 +2777,8 @@ eunitexerunner.flags drop.file.counter exclude.test.layers + ats.evalid.pre + ats.evalid.post Company @@ -2495,8 +2799,6 @@ Qt - qt.dir - qt.configure.log.name qt.qmake.ant.template qt.qmake.makefile.template qt.configure.args @@ -2519,6 +2821,14 @@ build.errors.limit build.id build.log.dir + archive.log.dir + compile.log.dir + roms.log.dir + localisation.log.dir + diamonds.build.output.dir + post.log.dir + prep.log.dir + test.log.dir build.signal.status.dir build.status.email.template build.log @@ -2530,6 +2840,7 @@ build.sisfiles.dir build.summary.file build.system.sbs + skip.sbs.parser.exception build.temp.dir build.type build.version @@ -2590,6 +2901,7 @@ helium.java.lib.dir helium.svn.workarea.dir helium.version + helium.revision ido.cenrep.root ido.cenrep.target ido.keep.old @@ -2690,5 +3002,32 @@ hlm.enable.asserts python.modules.path metadata.dbfile + exceptions.target + ant.loglevel + project.spec.name + + + Documentation + + last.major.release.database.url + ivy.xml.file + dependency.grph + python.dir + python.internal.dir + subcon + dependency.external.grph + + + Bom + + run.ccm + old.bom.log + symbian.version.week + symbian.version + symbian.version.year + s60.version + s60.release + bom.log + build.number diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/helium_distribution_test.cfg.xml --- a/buildframework/helium/config/helium_distribution_test.cfg.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/helium_distribution_test.cfg.xml Wed Dec 23 19:29:07 2009 +0200 @@ -29,7 +29,7 @@ - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/ivy/modules/python-2.5.ivy.xml --- a/buildframework/helium/config/ivy/modules/python-2.5.ivy.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/ivy/modules/python-2.5.ivy.xml Wed Dec 23 19:29:07 2009 +0200 @@ -23,8 +23,8 @@ + revision="2.x"/> - + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/java_checkstyle_config.xml --- a/buildframework/helium/config/java_checkstyle_config.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/java_checkstyle_config.xml Wed Dec 23 19:29:07 2009 +0200 @@ -49,8 +49,15 @@ - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/metadata_filter_config_default.ant.xml --- a/buildframework/helium/config/metadata_filter_config_default.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/metadata_filter_config_default.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -21,7 +21,9 @@ ============================================================================ --> - + + Definitions of metadata filter inputs. + @@ -41,6 +43,12 @@ + + + + + + @@ -54,8 +62,8 @@ - - + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/metadata_regex.csv --- a/buildframework/helium/config/metadata_regex.csv Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/metadata_regex.csv Wed Dec 23 19:29:07 2009 +0200 @@ -1,5 +1,5 @@ priority,regex,description -FATAL,.*mingw_make\.exe.*, +CRITICAL,.*Error:.*mingw_make\.exe.*, ERROR,.*\): Missing file:.*, ERROR,^(?:(?:\s*\d+\)\s*)|(?:\s*\*\*\*\s*))ERROR:.*, ERROR,.*Error:\s+.*, @@ -14,6 +14,7 @@ ERROR,^make(?:\[\d+\])?\: \*\*\*.*, ERROR,^make(?:\[\d+\])?:\s+.*\s+not\s+remade.*, ERROR,error: ((Internal fault):)$, +ERROR,.*No such file or directory$, ERROR,Exception: [A-Z0-9_]+.*, ERROR,.*target .* given more than once in the same rule.*, ERROR,^ERROR:.*, @@ -22,7 +23,14 @@ ERROR,.*no rule to make.*, ERROR,^ERROR\t.*, ERROR,.*Traceback \(most recent call last\).*, -CRITICAL,[Ww]arning:?\s+(#111-D|#1166-D|#117-D|#128-D|#1293-D|#1441-D|#170-D|#174-D|#175-D|#185-D|#186-D|#223-D|#231-D|#257-D|#284-D|#368-D|#414-D|#430-D|#47-D|#514-D|#546-D|#68-D|#69-D|#830-D|#940-D|#836-D|A1495E|L6318W|C2874W|C4127|C4355|C4530|C4702|C4786|LNK4049), +ERROR,^Application encountered an unexpected error\.\s*Stopping\.\s*, +ERROR,^Unable to write dump file .+, +ERROR,^Unable to connect to CM: .*, +ERROR,^.*: Incorrect slash in .*, +ERROR,^.*: Incorrect case for epoc32 tree in .*, +ERROR,^.*: Incorrect case versus exclusion list in .*, +ERROR,The system cannot find the path specified.*, +CRITICAL,.*[Ww]arning:?\s+(#111-D|#1166-D|#117-D|#128-D|#1293-D|#1441-D|#170-D|#174-D|#175-D|#185-D|#186-D|#223-D|#231-D|#257-D|#284-D|#368-D|#414-D|#430-D|#47-D|#514-D|#546-D|#68-D|#69-D|#830-D|#940-D|#836-D|A1495E|L6318W|C2874W|C4127|C4355|C4530|C4702|C4786|LNK4049).*, WARNING,^(\d+\))?\s.*WARNING:.*, WARNING,^MAKEDEF WARNING:.*, WARNING,.*\\\\(?)\(\d+\)\s:\sWarning:\s\(\d+\), diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/signaling_config_default.ant.xml --- a/buildframework/helium/config/signaling_config_default.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/signaling_config_default.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -29,13 +29,13 @@ - - @@ -103,6 +103,10 @@ + + + + @@ -185,7 +189,12 @@ - + + + + + + @@ -195,7 +204,11 @@ - + + + + + @@ -205,7 +218,11 @@ - + + + + + @@ -215,7 +232,11 @@ - + + + + + @@ -226,7 +247,11 @@ - + + + + + @@ -236,7 +261,11 @@ - + + + + + @@ -246,7 +275,11 @@ - + + + + + @@ -260,27 +293,39 @@ - + + + + + - + - + + + + + - + - + + + + + @@ -308,11 +353,18 @@ - + - + + + + + + + + @@ -331,7 +383,11 @@ - + + + + + @@ -339,23 +395,24 @@ + + + + - + + + + + - - - - - - - @@ -366,12 +423,16 @@ - + - + + + + + - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/stages_config_default.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/config/stages_config_default.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,59 @@ + + + + + Definitions of helium stages. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/config/version.txt --- a/buildframework/helium/config/version.txt Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/config/version.txt Wed Dec 23 19:29:07 2009 +0200 @@ -1,4 +1,4 @@ #Helium version - DO NOT EDIT -#Tue Sep 22 14:15:51 IST 2009 -last.major.helium.version=5.0 -helium.version=6.0.1 +#Fri Dec 18 15:07:03 EET 2009 +last.major.helium.version=6.0 +helium.version=7.0 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/api_changes.rst.ftl --- a/buildframework/helium/doc/src/api_changes.rst.ftl Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/api_changes.rst.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -93,3 +93,24 @@ * ${macro} + +Ant Tasks added +=============== +<#list doc.apiChanges.taskdef?sort as taskdef> + <#assign link = taskdef.@classname> + <#if taskdef.@state == 'added'> + <#if link?contains("com.nokia.helium")> +* `${taskdef} `_ + <#else> +* `${taskdef} `_ + + + + +Ant Tasks removed +================= +<#list doc.apiChanges.taskdef?sort as taskdef> + <#if taskdef.@state == 'removed'> +* ${taskdef} + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/feature_list.rst --- a/buildframework/helium/doc/src/feature_list.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/feature_list.rst Wed Dec 23 19:29:07 2009 +0200 @@ -374,6 +374,13 @@ .. index:: single: feature - API Analysis +NSIS installer file creation +---------------------------- + +* Installer executables based on the NSIS installation software can be created. + + * Plugins include environment setting modification. + API analysis ------------ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/helium_overview.xml --- a/buildframework/helium/doc/src/helium_overview.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/helium_overview.xml Wed Dec 23 19:29:07 2009 +0200 @@ -77,6 +77,7 @@ svnant xmltask ivy + helium-antlib diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/index.rst.ftl --- a/buildframework/helium/doc/src/index.rst.ftl Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/index.rst.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -78,20 +78,11 @@ tutorials/configuration/SimplestConfiguration tutorials/configuration/UseHlmTasksInConfiguration - tutorials/configuration/HowtoCreateANewSignal tutorials/rom_image tutorials/variant tutorials/qt_build <#if !ant?keys?seq_contains("sf")> nokia/blacktusk/howto-setup-blacktusk-with-helium - -* IDO_ - -.. _IDO: ido - -* TeamCI_ - -.. _TeamCI: teamci .. raw:: html @@ -112,36 +103,135 @@ :maxdepth: 1 <#if !ant?keys?seq_contains("sf")> + nokia/support nokia/nokia nokia/retrieving sf manual/running manual/configuring - manual/configuringdiamonds - manual/signaling + +.. raw:: html + + + + <#if !ant?keys?seq_contains("sf")> + + + +.. toctree:: + :maxdepth: 1 + +<#if !ant?keys?seq_contains("sf")> nokia/rndsdk_user_manual nokia/quality - manual/cruisecontrol manual/debugging metrics - manual/APIs -<#if !ant?keys?seq_contains("sf")> - api_changes + +.. raw:: html + + + + +<#if !ant?keys?seq_contains("sf")> +.. toctree:: + :maxdepth: 1 + + api_changes + nokia/releasenotes .. raw:: html +Helium Framework configuration +============================== + +.. toctree:: + :maxdepth: 1 + + tutorials/configuration/HowtoCreateANewSignal + manual/signaling + manual/configuringdiamonds + manual/cruisecontrol + manual/antlogging +* helium-antlib_ + +.. _helium-antlib: helium-antlib + +<#if !ant?keys?seq_contains("sf")> +Customer docs +============= + +* IDO_ + +.. _IDO: <#if ant['helium.version']?matches("^\\d+\\.0(?:\\.\\d+)?$")>../ido + +* TeamCI_ + +.. _TeamCI: <#if ant['helium.version']?matches("^\\d+\\.0(?:\\.\\d+)?$")>../teamci + + + Helium Architecture =================== diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/manual/antlogging.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/doc/src/manual/antlogging.rst Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,105 @@ +Configuring Logging +===================== + +Features: +----------------- +* Will be initiated by the ANT Listener. +* Logging will starts whenever build starts. +* Helium logging framework offers you to configure the ant logging system for different stages of builds. +* You can log the build process into seperate ant log files for each stage. +* You can configure the log system to log different level of information (ex: debug, vebose, error). + +The configuration: +----------------- + +We can configure the stages for which helium should log the build process. + + * Stages + + * Stages are like preparation, compilation, postbuild etc.. for which we need to log build process. + * Stages will have attributes for start and end targets. + * Stages will specify from which target we need log the build process and at which target we need to end logging build process. + + .. csv-table:: + :header: "Attribute", "Description", "Required" + + "id", "Name of Stage (preparation, compilation)","Yes" + "starttarget", "Name of target to start logging.","Yes" + "endtarget", "Name of target to end logging.","Yes" + + * Stagerecord + + * Will record/log the build process from start target to end target mentioned in the Stage type. + * Need provide attributes like output log file, loglevel. + * Supports passwordfilterset datatype. If we need to filter any passwords from specific stage log files. + + .. csv-table:: + :header: "Attribute", "Description", "Required" + + "id", "ID for stage record entry.", "Yes" + "defaultoutput", "File to record main ant log file" "Yes (should not have stagerefid attribute if stage record has defaultoutput)" + "stagerefid", "Stage reference ID. Exactly as given in the Stage", "Yes" + "output", "File to record the build process.", "Yes" + "loglevel", "Loglevel to record type of information. ex: debug, info, vebose", "No, Default it will be info" + "append", "To append the logging into existing file.", "No, Default it will be false" + +Example: +----------------- +.. code-block:: xml + + + + + + + + +logreplace Task (hlm:logreplace) +----------------- +* LogReplace task will filter out the string from stage logging files. +* If we need to filter out any user passwords and specific word which should n't be logged can passed to stage logging through this task. +* Specified string will be filtered out from all the stages logging files. +* It will not be filtered our by hlm:record task. To filter out the same need to passed to hlm:record task through recorderfilterset or recordfilter. + +Example: +----------------- +This example will filter out unix password value from all the stage logging files. + +.. code-block:: xml + + + + +Record Task (hlm:record) +----------------- +* Behaviour is same ANT record task with some addon features. +* Filerts the logging messages which are passed through the filters to hlm:record task. +* Will stops the logging happening by listener for any stages and resumes to stage logging once hlm:record task finishes. + +Example: +----------------- + +Below example + * Will sets one recoderfilteset. + * Will record the given target/tasks into ${build.id}_stagetest.log file by filtering the regexp mentioned in the recorderfilterset and recordfilter. + +.. code-block:: xml + + + + + + + + + + + ... Call tasks you would like to record the output ... + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/manual/product_example.rst --- a/buildframework/helium/doc/src/manual/product_example.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/manual/product_example.rst Wed Dec 23 19:29:07 2009 +0200 @@ -390,12 +390,6 @@ - - - - - - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/manual/signaling.rst --- a/buildframework/helium/doc/src/manual/signaling.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/manual/signaling.rst Wed Dec 23 19:29:07 2009 +0200 @@ -35,7 +35,7 @@ - + A signal will then be triggered each time the **target-name** completed. The signalInput will then defined how it should be handled. @@ -162,12 +162,12 @@ - + > - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/manual/stages.rst.ftl --- a/buildframework/helium/doc/src/manual/stages.rst.ftl Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/manual/stages.rst.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -229,10 +229,25 @@ The build preparation consists in two parts: - * Getting delivery content (Synergy, zips...), + * Getting delivery content (SCM, zips...), * Preparing the build area. -How to get delivery content from Synergy? +To get SCM source you just have to run:: + + hlm prep-work-area + +To create 'build of materials':: + + hlm create-bom + +Synergy +------- + +In order for the synergy commands to be executed you must define the property ccm.enabled=true in one of the your config files or on the command line. e.g. + +.. code-block:: xml + + It is possible to automatically get content from Synergy using the Helium framework. To handle that you have to configure the delivery.xml file from your family build configuration folder and reference by the property prep.delivery.file. @@ -320,9 +335,23 @@ The following properties are required: - database: the name of the synergy database you want to use. -To get synergy source you just have to run:: - - hlm prep-work-area +Mercurial +--------- + +Add to ant configuration: + +.. code-block:: xml + + + + + + + + +For more information see API_ + +.. _API: ../helium-antlib/api/doclet/index.SCM.html .. index:: single: Stage - Compilation @@ -355,7 +384,7 @@ - The order of the files is significant. If building Symbian OS, the Symbian System Definition file must come first. Here both ``fileset`` and ``pathelement`` are used. ``pathelement`` selects just one file whereas a ``fileset`` can use wildcards to select multiple files or handle problems of filenames changing across different platform releases. +The order of the files is significant. If building Symbian OS, the Symbian System Definition file must come first. Here both ``fileset`` and ``pathelement`` are used. ``pathelement`` selects just one file whereas a ``fileset`` can use wildcards to select multiple files or handle problems of filenames changing across different platform releases. 2. Determine if an existing build configuration in any of the build model sections of the files are suitable for what needs to be built. A build configuration typically looks something like this: @@ -377,7 +406,7 @@ - A ``unitListRef`` includes a ``unitList`` defined somewhere else as part of this configuration. The ``buildLayer`` elements define ``abld`` steps to run on each component. If an existing configuration is not sufficient a new one must be defined in a separate file (which should be included in the ``path`` type). +A ``unitListRef`` includes a ``unitList`` defined somewhere else as part of this configuration. The ``buildLayer`` elements define ``abld`` steps to run on each component. If an existing configuration is not sufficient a new one must be defined in a separate file (which should be included in the ``path`` type). 3. Define the ``sysdef.configurations`` Ant property to contain a comma-separated list of build configuration names that must match the ``name`` attribute of the ``configuration``. Each configuration will be built in turn in the ``compile-main`` Ant target. @@ -465,6 +494,26 @@ "``name``", "The name of the .pkg file to parse.", "" "``sis.name``", "The name of the .sis file to create. If omitted it will default to the name of the .pkg file.", "" "``path``", "The path where the .pkg file exists as input to building the .sis file.", "" + +Configuration enhancements +:::::::::::::::::::::::::: + +*Since Helium 7.0.* + +The configuration method above will be replaced by a more flexible approach: + +.. csv-table:: Property descriptions + :header: "Property", "Description", "Values" + + "``makesis.tool``", "The path for the makesis tool that builds a .sis file.", "" + "``signsis.tool``", "The path for the signsis tool that signs a .sis file to create a .sisx file.", "" + "``build.sisfiles.dir``", "The directory where the .sis file should be put.", "" + "``key``", "The key to use for signing.", "" + "``cert``", "The certificate to use for signing.", "" + "``input``", "The full path and filename of the input file. This can be a .pkg file, for generating a SIS file, a .sis file for signing, or a .sisx file for multiple signing.", "" + "``output``", "The full path and filename of the output file. This is only needed if the location or name needs to be different from the default, which is that the file extension changes appropriately.", "" + +Also a ``sis.config.name`` property is added that allows the name of a block to be supplied. This can be overridden to allow particular subsets of configurations to be built. Checking Stub SIS files ----------------------- @@ -522,7 +571,7 @@ .. csv-table:: Ant properties to modify :header: "Attribute", "Description", "Values" - "``regionalVariation``", "Enable regional variation switching.", "false" + "``regionalVariation``", "Enable regional variation switching. - Deprecated (always false)", "false" The imakerconfiguration supports three sub-types: @@ -537,62 +586,78 @@ Example of configuration: .. code-block:: xml - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + +Other example using product list and variable group: + +.. code-block:: xml + + + + + + + + + + + + + + + + + + + + + + + + + + .. index:: single: The iMaker Task -The imaker task ---------------- - -.. csv-table:: Ant properties to modify - :header: "Attribute", "Description", "Values" - - "``executor``", "Name of the build system to be used.", "ebs, helium-ec and ec" - "``name``", "Log and intermediate file differentiator.", "" - "``signal``", "Enable/disable signaling mechanism.", "true, false" - -Example: +How to configure the target +--------------------------- + +The target can be configured by defining an hlm:imakerconfigurationset element with the '''imaker.rom.config''' reference. .. code-block:: xml - - - - - - - - - - - - - - - - - - - - + + + ... + + +The other configurable element is the engine. The '''imaker.engine''' property defines the reference +to the engine configuration to use for building the roms. Helium defines two engines by default: +* imaker.engine.default: multithreaded engine (hlm:defaultEngine type) +* imaker.engine.ec: ECA engine - cluster base execution (hlm:emakeEngine type) + +If the property is not defined Helium will guess the best engine to used based on the build.system property. + .. index:: single: Legacy ROM creation @@ -1224,35 +1289,58 @@ Cenrep creation (S60 3.2.3 - 5.x) ::::::::::::::::::::::::::::::::: <#if !(ant?keys?seq_contains("sf"))> -See: http://s60wiki.nokia.com/S60Wiki/Central_Repository_Usage +See: http://configurationtools.nmp.nokia.com/builds/cone/docs/cli/generate.html?highlight=generate -The target ``configtool`` can be used to run the Configuration Tool. - -Currently supported Configuration Tool arguments are: - -command_line | name - --master_conf : master_conf --confml : confml --impl : impl --iby : iby --ignore_errors : keepgoing(true - uses -ignore_errors, otherwise not, setting - true generates cenrep incase of errors, and signals has to be configured to stop the build - in case of errors). - -Default values are: +The target ``ido-gen-cenrep`` can be used to run the ConE Tool to generate cenreps. + +* IDO can use the ido-gen-cenrep to generate the cenreps which are IDO specific. +* We should pass the sysdef.configurations.list as parameter to ido-gen-cenrep target. Else it will use the defualt one of helium. + +Example: +::::::::::::::::::::::::::::::::: + +Below example will generate the cenrep only for IDO specific confml files. + +.. code-block:: xml + + + + + + + +Below example will generate the cenreps for S60 SDK. .. code-block:: xml - - - - - - - - + + + + + + + + + + + +By using conEToolMacro you can pass any arguments which are mentioned in the above link. + +.. code-block:: xml + + + + + + + + + + + + +After running this command generated file can be found from /_cenrep_includefile.txt Running individual build commands @@ -1341,7 +1429,7 @@ Variation (S60 3.2.3 - 5.x) --------------------------- -See ../tutorials/imaker/iMakerUseCaseCustomerVariantConfml.html +See http://delivery.nmp.nokia.com/trac/imaker/wiki/iMakerUseCaseCustomerVariantConfml Variation (S60 3.2) ------------------- @@ -1594,16 +1682,16 @@ .. index:: - single: ATS3 - STIF, TEF, RTEST, MTF and EUnit - -.. _`Stage-ATS3-label`: - -Stage: ATS3 - STIF, TEF, RTEST, MTF and EUnit (also Qt) + single: ATS - STIF, TEF, RTEST, MTF and EUnit + +.. _`Stage-ATS-label`: + +Stage: ATS - STIF, TEF, RTEST, MTF and EUnit (also Qt) ======================================================= ATS testing is the automatic testing of the phone code once it has been compiled and linked to create a ROM image. -Explanation of the process for getting ATS3 (`STIF`_ and `EUnit`_) tests compiled and executed by Helium, through the use of the ``ats-test`` target. +Explanation of the process for getting ATS (`STIF`_ and `EUnit`_) tests compiled and executed by Helium, through the use of the ``ats-test`` target. http://developer.symbian.org/wiki/index.php/Symbian_Test_Tools @@ -1618,7 +1706,7 @@ ---------------- * `Harmonized Test Interface (HTI)`_ needs to be compiled and into the image. -* The reader is expected to already have a working ATS3 setup in which test cases can be executed. ATS3 server names, +* The reader is expected to already have a working ATS setup in which test cases can be executed. ATS server names, access rights and authentication etc. is supposed to be already taken care of. <#if !(ant?keys?seq_contains("sf"))> @@ -1672,7 +1760,7 @@ **STEP 2: Configure ATS properties in build.xml** -**(A)** Username and Password for the ATS3 should be set in the `.netrc file`_ +**(A)** Username and Password for the ATS should be set in the `.netrc file`_ .. code-block:: text @@ -1698,14 +1786,14 @@ **eunitexerunner.flags** [recommended] Flags for EUnit exerunner can be set by setting the value of this variable. The default flags are set to "/E S60AppEnv /R Off". **ats.email.list** [recommended] The property is needed if you want to get an email from ATS server after the tests are executed. There can be one to many semicolon(s) ";" separated email addresses. **ats.flashfiles.minlimit** [recommended] Limit of minimum number of flash files to execute ats-test target, otherwise ATSDrop.zip will not be generated. Default value is "2" files. - **ats.plan.name** [recommended] Modify the plan name if you have understanding of test.xml file or leave it as it is. Deafault value is "plan". + **ats.plan.name** [recommended] Modify the plan name if you have understanding of test.xml file or leave it as it is. Default value is "plan". **ats.product.hwid** [recommended] Product HardWare ID (HWID) attached to ATS. By default the value of HWID is not set. **ats.script.type** [recommended] There are two types of ats script files to send drop to ATS server, "runx" and "import"; only difference is that with "import" ATS doesn't have to have access rights to testdrop.zip file, as it is sent to the system over http and import doesn't need network shares. If that is not needed "import" should not be used. Default value is "runx" as "import" involves heavy processing on ATS server. **ats.target.platform** [recommended] Sets target platform for compiling test components. Default value is "armv5 urel". - **ats.test.timeout** [recommended] To set test commands execution time limit on ATS3 server, in seconds. Default value is "60". - **ats.testrun.name** [recommended] Modify the test-run name if you have understanding of test.xml file or leave it as it is. Deafault value is a string consist of build id, product name, major and minor versions. - **ats.trace.enabled** [recommended] Should be "True" if tracing is needed during the tests running on ATS3. Deafault value is "False", the values are case-sensitive. - **ats.ctc.enabled** [recommended] Should be "True" if coverage measurement and dynamic analysis (CTC) tool support is to be used by ATS. Deafault value is "False", the values are case-sensitive. + **ats.test.timeout** [recommended] To set test commands execution time limit on ATS server, in seconds. Default value is "60". + **ats.testrun.name** [recommended] Modify the test-run name if you have understanding of test.xml file or leave it as it is. Default value is a string consist of build id, product name, major and minor versions. + **ats.trace.enabled** [recommended] Should be "True" if tracing is needed during the tests running on ATS. Default value is "False", the values are case-sensitive. See http://s60wiki.nokia.com/S60Wiki/CATS/TraceTools + **ats.ctc.enabled** [recommended] Should be "True" if coverage measurement and dynamic analysis (CTC) tool support is to be used by ATS. Default value is "False", the values are case-sensitive. **ats.ctc.host** [recommended] CTC host, provided by CATS used to create coverage measurement reports. MON.sym files are copied to this location, for example "10.0.0.1". If not given, code coverage reports are not created **ats.obey.pkgfiles.rule** [recommended] If the property is set to "True", then the only test components which will have PKG files, will be included into the test.xml as a test-set. Which means, even if there's a test component (executable) but there's no PKG file, it should not be considered as a test component and hence not included into the test.xml as a separate test. By default the property value is False. **reference.ats.flash.images** [recommended] Fileset for list of flash images (can be .fpsx, .C00, .V01 etc) It is recommended to set the fileset, default filset is given below which can be overwritten. set *dir=""* attribute of the filset to "${r'$'}{build.output.dir}/variant_images" if "variant-image-creation" target is being used. @@ -1713,7 +1801,9 @@ **tsrc.path.list** [allowed] Contains list of the tsrc directories. Gets the list from system definition layer files. Assuming that the test components are defined already in te layers.sysdef.xml files to get compiled. Not recommended, but the property value can be set if there are no system definition file(s), and tsrc directories paths to set manually. **ats.report.location** [allowed] Sets ATS reports store location. Default location is "${r'$'}{publish.dir}/${r'$'}{publish.subdir}". **ats.multiset.enabled** [allowed] Should be "True" so a set is used for each pkg file in a component, this allows tests to run in parallel on several devices. - + **ats.diamonds.signal** [allowed] Should be "true" so at end of the build diamonds is checked for test results and helium fails if tests failed. + **ats.delta.enabled** [allowed] Should be "true" so only ado's changed during do-prep-work-area are tested by ats. + **ats4.enabled** [allowed] Should be "true" if ats4 is to be used. ============================== =============== =============== @@ -1722,7 +1812,7 @@ .. code-block:: xml - + @@ -1747,10 +1837,6 @@ - - - - @@ -1758,7 +1844,7 @@ **STEP 3: Call target ats-test** -To execute the target, a property should be set(````). +To execute the target, a property should be set(````). Then call ``ats-test``, which will create the ATSDrop.zip (test package). @@ -1767,9 +1853,20 @@ CTC: ---- -CTC code coverage measurement can be created automatically by enabling property ``ats.ctc.enabled`` - -Also, property ``ats.ctc.host`` must be defined (See the description above) +CTC code coverage measurements reports can be created as part of Test Automation process. + +1. Build the src using ``build_ctc`` configuration, which is in ``build.sysdef.xml`` file, to create ``MON.sym`` files. It means that a property ``sysdef.configurations.list`` should be modified either add or replace current build configuration with ``build_ctc`` + +2. Set the property, ``ats.ctc.host``, as described above, for sending the ``MON.sym`` files to the network drive. *(Please contact ATS server administrator and ask for the value to set this property)* + +3. Enable CTC process by setting up property ``ats.ctc.enabled`` to "true" + +4. Test drops are sent to the ATS server, where, after executing tests ``ctcdata.txt`` files are created. ``ctcdata.txt`` and ``MON.sym`` files are then further processed to create code coverage reports. + +5. View or download the Code coverage reports by following the link provided in the ATS report email (sent after the tests are executed on ATS) + +*NOTE: After receiving the email notification, it may take a few minutes before the code coverage reports are available.* + Qt Tests: --------- @@ -1782,12 +1879,12 @@ .. _`Skip-Sending-AtsDrop-label`: -Skip Sending AtsDrop to ATS3 +Skip Sending AtsDrop to ATS ---------------------------- -By setting property of ``skip.ats.sending``, ``ats-test`` target only creates a drop file, and does not send the drop (or package) to ATS3 server. - -Customizing the test.xml in ATS3 +By setting property of ``skip.ats.sending``, ``ats-test`` target only creates a drop file, and does not send the drop (or package) to ATS server. + +Customizing the test.xml in ATS -------------------------------- The user can customize the generated test.xml with files: @@ -1887,12 +1984,12 @@ .. index:: - single: ATS3 - ASTE - -Stage: ATS3 - ASTE + single: ATS - ASTE + +Stage: ATS - ASTE =================== -Explanation of the process for getting ATS3 `ASTE`_ tests compiled and executed by Helium, through the use of the ``ats-aste`` target. +Explanation of the process for getting ATS `ASTE`_ tests compiled and executed by Helium, through the use of the ``ats-aste`` target. <#if !(ant?keys?seq_contains("sf"))> .. _`ASTE`: http://s60wiki.nokia.com/S60Wiki/ASTE @@ -1902,7 +1999,7 @@ -------------- * `Harmonized Test Interface (HTI)`_ needs to be compiled and into the image. -* The reader is expected to already have a working ATS3 setup in which test cases can be executed. ATS3 server names, access rights and authentication etc. is supposed to be already taken care of. +* The reader is expected to already have a working ATS setup in which test cases can be executed. ATS server names, access rights and authentication etc. is supposed to be already taken care of. * `SW Test Asset`_ location and type of test should be known. <#if !(ant?keys?seq_contains("sf"))> @@ -1920,7 +2017,7 @@ **STEP 1: Configure ASTE properties in build.xml** -**(A)** Username and Password for the ATS3 should be set in the `.netrc file`_ +**(A)** Username and Password for the ATS should be set in the `.netrc file`_ .. code-block:: text @@ -1943,16 +2040,16 @@ **Property Name** **Edit Status** **Description** =============================== =============== =============== **ats.server** [must] For example: "4fio00105" or "catstresrv001.cats.noklab.net:80". Default server port is "8080", but it is not allowed between intra and Noklab. Because of this we need to define server port as 80. The host can be different depending on site and/or product. - **ats.drop.location** [must] Server location (UNC path) to save the ATS3Drop file, before sending to the ATS. For example: \\\\trwsem00\\some_folder\\. In case, ``ats.script.type`` is set to "import", ATS doesn't need to have access to ats.drop.location, its value can be any local folder on build machine, for example c:/temp (no network share needed). + **ats.drop.location** [must] Server location (UNC path) to save the ATSDrop file, before sending to the ATS. For example: \\\\trwsem00\\some_folder\\. In case, ``ats.script.type`` is set to "import", ATS doesn't need to have access to ats.drop.location, its value can be any local folder on build machine, for example c:/temp (no network share needed). **ats.product.name** [must] Name of the product to be tested. For example: "PRODUCT". **ats.aste.testasset.location** [must] Location of SW Test Assets, if the TestAsset is not packaged then it is first compressed to a ``.zip`` file. It should be a UNC path. **ats.aste.software.release** [must] Flash images releases, for example "SPP 51.32". **ats.aste.software.version** [must] Version of the software to be tested. For example: "W810" **ats.aste.email.list** [recommended] The property is needed if you want to get an email from ATS server after the tests are executed. There can be one to many semicolon(s) ";" separated email addresses. **ats.flashfiles.minlimit** [recommended] Limit of minimum number of flash files to execute ats-test target, otherwise ATSDrop.zip will not be generated. Default value is "2" files. - **ats.aste.plan.name** [recommended] Modify the plan name if you have understanding of test.xml file or leave it as it is. Deafault value is "plan". + **ats.aste.plan.name** [recommended] Modify the plan name if you have understanding of test.xml file or leave it as it is. Default value is "plan". **ats.product.hwid** [recommended] Product HardWare ID (HWID) attached to ATS. By default the value of HWID is not set. - **ats.test.timeout** [recommended] To set test commands execution time limit on ATS3 server, in seconds. Default value is "60". + **ats.test.timeout** [recommended] To set test commands execution time limit on ATS server, in seconds. Default value is "60". **ats.aste.testrun.name** [recommended] Modify the test-run name if you have understanding of test.xml file or leave it as it is. Default value is a string consists of build id, product name, major and minor versions. **ats.aste.test.type** [recommended] Type of test to run. Default is "smoke". **ats.aste.testasset.caseids** [recommended] These are the cases that which tests should be run from the TestAsset. For example, value can be set as "100,101,102,103,105,106,". A comma is needed to separate case IDs @@ -1966,7 +2063,7 @@ .. code-block:: xml - + @@ -1989,10 +2086,6 @@ - - - - @@ -2000,14 +2093,14 @@ **STEP 2: Call target ats-aste** -To execute the target, a property should be set(````). +To execute the target, a property should be set(````). Then call ``ats-aste``, which will create the ATSDrop.zip (test package). If property ``ats.aste.email.list`` is set, an email (test report) will be sent when the tests are ready on ATS/ASTE. -Skip Sending AtsDrop to ATS3 +Skip Sending AtsDrop to ATS ------------------------------ click :ref:`Skip-Sending-AtsDrop-label`: @@ -2018,15 +2111,15 @@ Stage: MATTI ============= -MATTI testing is very similar to ATS3 testing, so for details of how it all links together see :ref:`Stage-ATS3-label`: `and the matti website`_. +MATTI testing is very similar to ATS testing, so for details of how it all links together see :ref:`Stage-ATS-label`: `and the matti website`_. <#if !(ant?keys?seq_contains("sf"))> .. _`and the matti website`: http://trmatti1.nmp.nokia.com/help/ -The set up of parameters is very similar (a few less parameters and it mostly uses ATS3 values). The main difference is that once the drop file has been uploaded to the ATS3 server it uses MATTI to perform the tests and not ATS3, this is achieved by calling the MATTIDrop.py script instead of the ATSE or ATS3 scripts when creating the drop file (the drop file contains the flash files and the ruby tests to be performed). - -The following parameters are the ones that are not listed in the ATS3 parameters, all other parameters required are as listed in the ATS3 section above. +The set up of parameters is very similar (a few less parameters and it mostly uses ATS values). The main difference is that once the drop file has been uploaded to the ATS server it uses MATTI to perform the tests and not ATS, this is achieved by calling the MATTIDrop.py script instead of the ATSE or ATS scripts when creating the drop file (the drop file contains the flash files and the ruby tests to be performed). + +The following parameters are the ones that are not listed in the ATS parameters, all other parameters required are as listed in the ATS section above. * [must] - must be set by user * [recommended] - should be set by user but not mandatory @@ -2053,36 +2146,25 @@ .. code-block:: xml - - + - - - - - - - - - - - - + + -In order to upload and view the test run you need to have a valid user id and password that matches that in your .netrc file. To create the account open a web browser window and enter the name of the ats.server with /ats3 at the end e.g. http://123456:80/ats3. Click on the link in the top right hand corner to create the account. To view the test run once your account is active you need to click on the 'test runs' tab. +In order to upload and view the test run you need to have a valid user id and password that matches that in your .netrc file. To create the account open a web browser window and enter the name of the ats.server with /ATS at the end e.g. http://123456:80/ATS. Click on the link in the top right hand corner to create the account. To view the test run once your account is active you need to click on the 'test runs' tab. To run the tests call the target `matti-test` (you will need to define the 'build.drive', 'build.number' and it is best to create the 'core.build.version' on the command line as well if you do not add it to the list of targets run that create the ROM image). e.g. :: hlm -Dbuild.number=001 -Dbuild.drive=z: -Dcore.build.version=001 matti-test -If it displays the message 'Testdrop created!' with the file name then the MATTIDrops.py script has done what it needs to do. The next thing to check is that the drop file has been uploaded to the ATS3 server OK. If that is performed successfully then the rest of the testing needs to be performed by the ATS3 server. There is also a test.xml file created that contains details needed for debugging any problems that might occur. To determine if the tests have run correctly you need to read the test run details from the server. +If it displays the message 'Testdrop created!' with the file name then the MATTIDrops.py script has done what it needs to do. The next thing to check is that the drop file has been uploaded to the ATS server OK. If that is performed successfully then the rest of the testing needs to be performed by the ATS server. There is also a test.xml file created that contains details needed for debugging any problems that might occur. To determine if the tests have run correctly you need to read the test run details from the server. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/sf.rst --- a/buildframework/helium/doc/src/sf.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/sf.rst Wed Dec 23 19:29:07 2009 +0200 @@ -7,9 +7,9 @@ Install the following tools: -* `ActivePython 2.5`_ +* `ActivePython 2.6`_ -.. _`ActivePython 2.5`: http://www.activestate.com/activepython/downloads +.. _`ActivePython 2.6`: http://www.activestate.com/activepython/downloads * `ActivePerl 5.6.1`_ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/tutorials/configuration/HowtoCreateANewSignal.rst --- a/buildframework/helium/doc/src/tutorials/configuration/HowtoCreateANewSignal.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/tutorials/configuration/HowtoCreateANewSignal.rst Wed Dec 23 19:29:07 2009 +0200 @@ -48,14 +48,14 @@ -To declare a new signal to the framework you need to define a new signalConfig reference. +To declare a new signal to the framework you need to define a new signalListenerConfig reference. You also need to create a signalInput configuration to define your signal behaviour. .. code-block:: xml - + @@ -63,13 +63,13 @@ - + -The signalListenerConfig defines which target to listen and raise signal for. The target name is defined through the **name** attribute. +The signalListenerConfig defines which target to listen and raise signal for. The target name is defined through the **target** attribute. Then the nested **targetCondition** element is used to configure how the signal should be triggered. This element accepts any nested `Ant conditions `_. -In this case the signal will get raised only id the file is not present after the execution of the **custom-action** target. +In this case the signal will get raised only if the file is not present after the execution of the **custom-action** target. The framework then uses the defined signalInput from the signalNotifierInput configuration to know how to behave when the signal is raised. In the previous example it will simply keep running and fail the build at the end. Then files defined by the nested notifierInput will be passed to the notifier. @@ -110,4 +110,4 @@ BUILD SUCCESSFUL Total time: 2 seconds - \ No newline at end of file + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/doc/src/tutorials/qt_build.rst --- a/buildframework/helium/doc/src/tutorials/qt_build.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/doc/src/tutorials/qt_build.rst Wed Dec 23 19:29:07 2009 +0200 @@ -9,17 +9,6 @@ This tutorial explains how to update your configuration to enable Qt building. -Configuring Qt -============== - -Helium has a ready to use target which allows you to configure Qt. You just need to make sure the qt.dir -property is configured with the correct location of your Qt source. -Then you can run the target as follow:: - - > hlm -Dbuild.drive=Q: -Dbuild.number=1 configure-qt - ... - -This target has now been depreciated in Helium 6.0, please consider building Qt using Symbian toolchain (SBS or SBSv2). Building Qt components ====================== diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/COPYING.TXT --- a/buildframework/helium/external/filedisk/COPYING.TXT Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,340 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc. - 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Library General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Library General -Public License instead of this License. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/example.txt --- a/buildframework/helium/external/filedisk/example.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -C:\>filedisk -syntax: -filedisk /mount [size[k|M|G] | /ro | /cd] -filedisk /umount -filedisk /status - -filename formats: - c:\path\filedisk.img - \Device\Harddisk0\Partition1\path\filedisk.img - \\server\share\path\filedisk.img - -example: -filedisk /mount 0 c:\temp\filedisk.img 8M f: -filedisk /mount 1 c:\temp\cdimage.iso /cd i: -filedisk /umount f: -filedisk /umount i: - -C:\>filedisk /mount 0 c:\temp\filedisk.img 8M f: - -C:\>format /fs:fat f: -The type of the file system is RAW. -The new file system is FAT. - -WARNING, ALL DATA ON NON-REMOVABLE DISK -DRIVE F: WILL BE LOST! -Proceed with Format (Y/N)? y -Verifying 7M -Initializing the File Allocation Table (FAT)... -Volume label (11 characters, ENTER for none)? filedisk -Format complete. - - 8 353 792 bytes total disk space. - 8 353 792 bytes available on disk. - - 2 048 bytes in each allocation unit. - 4 079 allocation units available on disk. - - 12 bits in each FAT entry. - -Volume Serial Number is 28DF-0C81 - -C:\>dir f: - Volume in drive F is FILEDISK - Volume Serial Number is 28DF-0C81 - - Directory of F:\ - -File Not Found - -C:\>chkdsk f: -The type of the file system is FAT. -Volume FILEDISK created 2002-11-24 14:25 -Volume Serial Number is 28DF-0C81 -Windows is verifying files and folders... -File and folder verification is complete. -Windows has checked the file system and found no problems. - - 8 353 792 bytes total disk space. - 8 353 792 bytes available on disk. - - 2 048 bytes in each allocation unit. - 4 079 total allocation units on disk. - 4 079 allocation units available on disk. - -C:\>filedisk /status f: -f: \??\c:\temp\filedisk.img Size: 8388608 bytes - -C:\>filedisk /umount f: - -C:\> diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/exe/MAKEFILE --- a/buildframework/helium/external/filedisk/exe/MAKEFILE Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -# -# DO NOT EDIT THIS FILE!!! Edit .\sources. if you want to add a new source -# file to this component. This file merely indirects to the real make file -# that is shared by all the driver components of the Windows NT DDK -# - -!INCLUDE $(NTMAKEENV)\makefile.def diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/exe/filedisk.c --- a/buildframework/helium/external/filedisk/exe/filedisk.c Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,423 +0,0 @@ -/* - Control program for a virtual disk driver for Windows NT/2000/XP. - Copyright (C) 1999, 2000, 2001, 2002 Bo Brantén. - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ - -#include -#include -#include -#include -#include "filedisk.h" - -int FileDiskSyntax(void) -{ - fprintf(stderr, "syntax:\n"); - fprintf(stderr, "filedisk /mount [size[k|M|G] | /ro | /cd] \n"); - fprintf(stderr, "filedisk /umount \n"); - fprintf(stderr, "filedisk /status \n"); - fprintf(stderr, "\n"); - fprintf(stderr, "filename formats:\n"); - fprintf(stderr, " c:\\path\\filedisk.img\n"); - fprintf(stderr, " \\Device\\Harddisk0\\Partition1\\path\\filedisk.img\n"); - fprintf(stderr, " \\\\server\\share\\path\\filedisk.img\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "example:\n"); - fprintf(stderr, "filedisk /mount 0 c:\\temp\\filedisk.img 8M f:\n"); - fprintf(stderr, "filedisk /mount 1 c:\\temp\\cdimage.iso /cd i:\n"); - fprintf(stderr, "filedisk /umount f:\n"); - fprintf(stderr, "filedisk /umount i:\n"); - - return -1; -} - -void PrintLastError(char* Prefix) -{ - LPVOID lpMsgBuf; - - FormatMessage( - FORMAT_MESSAGE_ALLOCATE_BUFFER | - FORMAT_MESSAGE_FROM_SYSTEM | - FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, - GetLastError(), - 0, - (LPTSTR) &lpMsgBuf, - 0, - NULL - ); - - fprintf(stderr, "%s %s", Prefix, (LPTSTR) lpMsgBuf); - - LocalFree(lpMsgBuf); -} - -int -FileDiskMount( - int DeviceNumber, - POPEN_FILE_INFORMATION OpenFileInformation, - char DriveLetter, - BOOLEAN CdImage -) -{ - char VolumeName[] = "\\\\.\\ :"; - char DeviceName[255]; - HANDLE Device; - DWORD BytesReturned; - - VolumeName[4] = DriveLetter; - - Device = CreateFile( - VolumeName, - GENERIC_READ | GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE, - NULL, - OPEN_EXISTING, - FILE_FLAG_NO_BUFFERING, - NULL - ); - - if (Device != INVALID_HANDLE_VALUE) - { - SetLastError(ERROR_BUSY); - PrintLastError(&VolumeName[4]); - return -1; - } - - if (CdImage) - { - sprintf(DeviceName, DEVICE_NAME_PREFIX "Cd" "%u", DeviceNumber); - } - else - { - sprintf(DeviceName, DEVICE_NAME_PREFIX "%u", DeviceNumber); - } - - if (!DefineDosDevice( - DDD_RAW_TARGET_PATH, - &VolumeName[4], - DeviceName - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - Device = CreateFile( - VolumeName, - GENERIC_READ | GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE, - NULL, - OPEN_EXISTING, - FILE_FLAG_NO_BUFFERING, - NULL - ); - - if (Device == INVALID_HANDLE_VALUE) - { - PrintLastError(&VolumeName[4]); - DefineDosDevice(DDD_REMOVE_DEFINITION, &VolumeName[4], NULL); - return -1; - } - - if (!DeviceIoControl( - Device, - IOCTL_FILE_DISK_OPEN_FILE, - OpenFileInformation, - sizeof(OPEN_FILE_INFORMATION) + OpenFileInformation->FileNameLength - 1, - NULL, - 0, - &BytesReturned, - NULL - )) - { - PrintLastError("FileDisk:"); - DefineDosDevice(DDD_REMOVE_DEFINITION, &VolumeName[4], NULL); - return -1; - } - - return 0; -} - -int FileDiskUmount(char DriveLetter) -{ - char VolumeName[] = "\\\\.\\ :"; - HANDLE Device; - DWORD BytesReturned; - - VolumeName[4] = DriveLetter; - - Device = CreateFile( - VolumeName, - GENERIC_READ | GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE, - NULL, - OPEN_EXISTING, - FILE_FLAG_NO_BUFFERING, - NULL - ); - - if (Device == INVALID_HANDLE_VALUE) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - if (!DeviceIoControl( - Device, - FSCTL_LOCK_VOLUME, - NULL, - 0, - NULL, - 0, - &BytesReturned, - NULL - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - if (!DeviceIoControl( - Device, - IOCTL_FILE_DISK_CLOSE_FILE, - NULL, - 0, - NULL, - 0, - &BytesReturned, - NULL - )) - { - PrintLastError("FileDisk:"); - return -1; - } - - if (!DeviceIoControl( - Device, - FSCTL_DISMOUNT_VOLUME, - NULL, - 0, - NULL, - 0, - &BytesReturned, - NULL - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - if (!DeviceIoControl( - Device, - FSCTL_UNLOCK_VOLUME, - NULL, - 0, - NULL, - 0, - &BytesReturned, - NULL - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - CloseHandle(Device); - - if (!DefineDosDevice( - DDD_REMOVE_DEFINITION, - &VolumeName[4], - NULL - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - return 0; -} - -int FileDiskStatus(char DriveLetter) -{ - char VolumeName[] = "\\\\.\\ :"; - HANDLE Device; - POPEN_FILE_INFORMATION OpenFileInformation; - DWORD BytesReturned; - - VolumeName[4] = DriveLetter; - - Device = CreateFile( - VolumeName, - GENERIC_READ, - FILE_SHARE_READ | FILE_SHARE_WRITE, - NULL, - OPEN_EXISTING, - FILE_FLAG_NO_BUFFERING, - NULL - ); - - if (Device == INVALID_HANDLE_VALUE) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - OpenFileInformation = malloc(sizeof(OPEN_FILE_INFORMATION) + MAX_PATH); - - if (!DeviceIoControl( - Device, - IOCTL_FILE_DISK_QUERY_FILE, - NULL, - 0, - OpenFileInformation, - sizeof(OPEN_FILE_INFORMATION) + MAX_PATH, - &BytesReturned, - NULL - )) - { - PrintLastError(&VolumeName[4]); - return -1; - } - - if (BytesReturned < sizeof(OPEN_FILE_INFORMATION)) - { - SetLastError(ERROR_INSUFFICIENT_BUFFER); - PrintLastError(&VolumeName[4]); - return -1; - } - - printf("%c: %.*s Size: %I64u bytes%s\n", - DriveLetter, - OpenFileInformation->FileNameLength, - OpenFileInformation->FileName, - OpenFileInformation->FileSize, - OpenFileInformation->ReadOnly ? ", ReadOnly" : "" - ); - - return 0; -} - -int __cdecl main(int argc, char* argv[]) -{ - char* Command; - int DeviceNumber; - char* FileName; - char* Option; - char DriveLetter; - BOOLEAN CdImage = FALSE; - POPEN_FILE_INFORMATION OpenFileInformation; - - Command = argv[1]; - - if ((argc == 5 || argc == 6) && !strcmp(Command, "/mount")) - { - DeviceNumber = atoi(argv[2]); - FileName = argv[3]; - - if (strlen(FileName) < 2) - { - return FileDiskSyntax(); - } - - OpenFileInformation = - malloc(sizeof(OPEN_FILE_INFORMATION) + strlen(FileName) + 7); - - memset( - OpenFileInformation, - 0, - sizeof(OPEN_FILE_INFORMATION) + strlen(FileName) + 7 - ); - - if (FileName[0] == '\\') - { - if (FileName[1] == '\\') - // \\server\share\path\filedisk.img - { - strcpy(OpenFileInformation->FileName, "\\??\\UNC"); - strcat(OpenFileInformation->FileName, FileName + 1); - } - else - // \Device\Harddisk0\Partition1\path\filedisk.img - { - strcpy(OpenFileInformation->FileName, FileName); - } - } - else - // c:\path\filedisk.img - { - strcpy(OpenFileInformation->FileName, "\\??\\"); - strcat(OpenFileInformation->FileName, FileName); - } - - OpenFileInformation->FileNameLength = - (USHORT) strlen(OpenFileInformation->FileName); - - if (argc > 5) - { - Option = argv[4]; - DriveLetter = argv[5][0]; - - if (!strcmp(Option, "/ro")) - { - OpenFileInformation->ReadOnly = TRUE; - } - else if (!strcmp(Option, "/cd")) - { - CdImage = TRUE; - } - else - { - if (Option[strlen(Option) - 1] == 'G') - { - OpenFileInformation->FileSize.QuadPart = - _atoi64(Option) * 1024 * 1024 * 1024; - } - else if (Option[strlen(Option) - 1] == 'M') - { - OpenFileInformation->FileSize.QuadPart = - _atoi64(Option) * 1024 * 1024; - } - else if (Option[strlen(Option) - 1] == 'k') - { - OpenFileInformation->FileSize.QuadPart = - _atoi64(Option) * 1024; - } - else - { - OpenFileInformation->FileSize.QuadPart = - _atoi64(Option); - } - } - } - else - { - DriveLetter = argv[4][0]; - } - return FileDiskMount(DeviceNumber, OpenFileInformation, DriveLetter, CdImage); - } - else if (argc == 3 && !strcmp(Command, "/umount")) - { - DriveLetter = argv[2][0]; - return FileDiskUmount(DriveLetter); - } - else if (argc == 3 && !strcmp(Command, "/status")) - { - DriveLetter = argv[2][0]; - return FileDiskStatus(DriveLetter); - } - else - { - return FileDiskSyntax(); - } -} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/exe/filedisk.rc --- a/buildframework/helium/external/filedisk/exe/filedisk.rc Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,104 +0,0 @@ -//Microsoft Developer Studio generated resource script. -// - -#define APSTUDIO_READONLY_SYMBOLS -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 2 resource. -// -#include "afxres.h" - -///////////////////////////////////////////////////////////////////////////// -#undef APSTUDIO_READONLY_SYMBOLS - -///////////////////////////////////////////////////////////////////////////// -// English (U.S.) resources - -#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) -#ifdef _WIN32 -LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US -#pragma code_page(1252) -#endif //_WIN32 - -#ifndef _MAC -///////////////////////////////////////////////////////////////////////////// -// -// Version -// - -VS_VERSION_INFO VERSIONINFO - FILEVERSION 1,0,0,11 - PRODUCTVERSION 1,0,0,11 - FILEFLAGSMASK 0x3fL -#ifdef _DEBUG - FILEFLAGS 0x1L -#else - FILEFLAGS 0x0L -#endif - FILEOS 0x40004L - FILETYPE 0x1L - FILESUBTYPE 0x0L -BEGIN - BLOCK "StringFileInfo" - BEGIN - BLOCK "040904b0" - BEGIN - VALUE "CompanyName", "Bo Brantén\0" - VALUE "FileDescription", "FileDisk Virtual Disk Driver\0" - VALUE "FileVersion", "1.0.0.11\0" - VALUE "InternalName", "filedisk\0" - VALUE "LegalCopyright", "Copyright © 1999-2002 Bo Brantén\0" - VALUE "OriginalFilename", "filedisk.exe\0" - VALUE "ProductName", "filedisk\0" - VALUE "ProductVersion", "1.0.0.11\0" - END - END - BLOCK "VarFileInfo" - BEGIN - VALUE "Translation", 0x409, 1200 - END -END - -#endif // !_MAC - - -#ifdef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// TEXTINCLUDE -// - -1 TEXTINCLUDE DISCARDABLE -BEGIN - "resource.h\0" -END - -2 TEXTINCLUDE DISCARDABLE -BEGIN - "#include ""afxres.h""\r\n" - "\0" -END - -3 TEXTINCLUDE DISCARDABLE -BEGIN - "\r\n" - "\0" -END - -#endif // APSTUDIO_INVOKED - -#endif // English (U.S.) resources -///////////////////////////////////////////////////////////////////////////// - - - -#ifndef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 3 resource. -// - - -///////////////////////////////////////////////////////////////////////////// -#endif // not APSTUDIO_INVOKED - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/exe/sources --- a/buildframework/helium/external/filedisk/exe/sources Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -TARGETNAME=filedisk -TARGETPATH=obj -TARGETTYPE=PROGRAM -UMTYPE=console -INCLUDES=..\sys\inc -SOURCES=filedisk.c filedisk.rc diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/filedisk.exe Binary file buildframework/helium/external/filedisk/filedisk.exe has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/filedisk.reg --- a/buildframework/helium/external/filedisk/filedisk.reg Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -REGEDIT4 - -[HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\FileDisk] - -"ErrorControl"=dword:00000001 - -# -# When to start the driver: -# At boot: Start=1 -# Manually: Start=3 -# -"Start"=dword:00000001 - -"Type"=dword:00000001 - -[HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\FileDisk\Parameters] - -"NumberOfDevices"=dword:00000004 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/filedisk.sys Binary file buildframework/helium/external/filedisk/filedisk.sys has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/install.txt --- a/buildframework/helium/external/filedisk/install.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -Copy the driver (filedisk.sys) to %systemroot%\system32\drivers\. - -Optionally edit filedisk.reg for automatic/manually start and -number of devices. - -Import filedisk.reg to the Registry. - -Reboot. - -Use the program filedisk.exe to mount/umount files, for an example -of use see the file example.txt. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/readme.txt --- a/buildframework/helium/external/filedisk/readme.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,82 +0,0 @@ - - This is a virtual disk driver for Windows NT/2000/XP that uses - one or more files to emulate physical disks. - Copyright (C) 1999-2006 Bo Brantén. - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - The GNU General Public License is also available from: - http://www.gnu.org/copyleft/gpl.html - - Windows and Windows NT are either registered trademarks or trademarks of - Microsoft Corporation in the United States and/or other countries. - - Please send comments, corrections and contributions to bosse@acc.umu.se - - The most recent version of this program is available from: - http://www.acc.umu.se/~bosse/ - - Revision history: - - 14. 2006-01-05 - Updated impersonation so that use of image files stored on network - drives work on Windows 2000 SP4, Windows XP SP2 and - Windows Server 2003 SP1. - - 13. 2004-06-09 - Small fix in file size handling. - - 12. 2004-05-27 - Possible fix for memory leak. - - 11. 2002-11-30 - Added ioctl to query information about mounted disk image files by - request from developer of GUI. - - 10. 2002-11-24 - Added a check so that FileDisk doesn't use compressed or encrypted - images. For an explanation why this doesn't work see comment in the - source code. - - 9. 2002-08-26 - Corrected the share access for read-only FileDisk images. - - 8. 2002-08-11 - Updated the control application to support UNC paths. - Changed the handling of CD-ROM device objects to avoid some problems on - Windows XP. - Corrected the handling of file sizes so that FileDisk images can be - sparse files. - - 7. 2002-02-28 - Added support for CD-images. - - 6. 2002-01-21 - Added support for impersonation so that FileDisk images can be stored - on network drives. - - 5. 2002-01-18 - Updated for Windows XP by Robert A. Rose. - - 4. 2001-07-08 - Formating to FAT on Windows 2000 now works. - - 3. 2001-05-14 - Corrected the error messages from the usermode control application. - - 2. 2000-03-15 - Added handling of IOCTL_DISK_CHECK_VERIFY to make the driver work on - Windows 2000 (tested on beta 3, build 2031). Formating to FAT still - doesn't work but formating to NTFS does. - - 1. 1999-06-09 - Initial release. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/inc/filedisk.h --- a/buildframework/helium/external/filedisk/sys/inc/filedisk.h Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -/* - This is a virtual disk driver for Windows NT/2000/XP that uses one or more - files to emulate physical disks. - Copyright (C) 1999, 2000, 2001, 2002 Bo Brantén. - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ - -#ifndef _FILE_DISK_ -#define _FILE_DISK_ - -#ifndef __T -#ifdef _NTDDK_ -#define __T(x) L ## x -#else -#define __T(x) x -#endif -#endif - -#ifndef _T -#define _T(x) __T(x) -#endif - -#define DEVICE_BASE_NAME _T("\\FileDisk") -#define DEVICE_DIR_NAME _T("\\Device") DEVICE_BASE_NAME -#define DEVICE_NAME_PREFIX DEVICE_DIR_NAME DEVICE_BASE_NAME - -#define FILE_DEVICE_FILE_DISK 0x8000 - -#define IOCTL_FILE_DISK_OPEN_FILE CTL_CODE(FILE_DEVICE_FILE_DISK, 0x800, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS) -#define IOCTL_FILE_DISK_CLOSE_FILE CTL_CODE(FILE_DEVICE_FILE_DISK, 0x801, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS) -#define IOCTL_FILE_DISK_QUERY_FILE CTL_CODE(FILE_DEVICE_FILE_DISK, 0x802, METHOD_BUFFERED, FILE_READ_ACCESS) - -typedef struct _OPEN_FILE_INFORMATION { - LARGE_INTEGER FileSize; - BOOLEAN ReadOnly; - USHORT FileNameLength; - UCHAR FileName[1]; -} OPEN_FILE_INFORMATION, *POPEN_FILE_INFORMATION; - -#endif diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/obj/i386/checked/filedisk.sys Binary file buildframework/helium/external/filedisk/sys/obj/i386/checked/filedisk.sys has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/obj/i386/free/filedisk.sys Binary file buildframework/helium/external/filedisk/sys/obj/i386/free/filedisk.sys has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/src/MAKEFILE --- a/buildframework/helium/external/filedisk/sys/src/MAKEFILE Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -# -# DO NOT EDIT THIS FILE!!! Edit .\sources. if you want to add a new source -# file to this component. This file merely indirects to the real make file -# that is shared by all the driver components of the Windows NT DDK -# - -!INCLUDE $(NTMAKEENV)\makefile.def diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/src/Sources --- a/buildframework/helium/external/filedisk/sys/src/Sources Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -TARGETNAME=filedisk -TARGETPATH=..\obj -TARGETTYPE=DRIVER -INCLUDES=..\inc -SOURCES=filedisk.c filedisk.rc diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/src/filedisk.c --- a/buildframework/helium/external/filedisk/sys/src/filedisk.c Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1537 +0,0 @@ -/* - This is a virtual disk driver for Windows NT/2000/XP that uses - one or more files to emulate physical disks. - Copyright (C) 1999-2006 Bo Brantén. - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ - -#include -#include -#include -#include - -// -// We include some stuff from newer DDK:s here so that one -// version of the driver for all versions of Windows can -// be compiled with the Windows NT 4.0 DDK. -// -#if (VER_PRODUCTBUILD < 2195) - -#define FILE_DEVICE_MASS_STORAGE 0x0000002d -#define IOCTL_STORAGE_CHECK_VERIFY2 CTL_CODE(IOCTL_STORAGE_BASE, 0x0200, METHOD_BUFFERED, FILE_ANY_ACCESS) -#define FILE_ATTRIBUTE_ENCRYPTED 0x00004000 - -#endif - -#if (VER_PRODUCTBUILD < 2600) - -#define IOCTL_DISK_GET_PARTITION_INFO_EX CTL_CODE(IOCTL_DISK_BASE, 0x0012, METHOD_BUFFERED, FILE_ANY_ACCESS) -#define IOCTL_DISK_GET_LENGTH_INFO CTL_CODE(IOCTL_DISK_BASE, 0x0017, METHOD_BUFFERED, FILE_READ_ACCESS) - -typedef enum _PARTITION_STYLE { - PARTITION_STYLE_MBR, - PARTITION_STYLE_GPT -} PARTITION_STYLE; - -typedef unsigned __int64 ULONG64, *PULONG64; - -typedef struct _PARTITION_INFORMATION_MBR { - UCHAR PartitionType; - BOOLEAN BootIndicator; - BOOLEAN RecognizedPartition; - ULONG HiddenSectors; -} PARTITION_INFORMATION_MBR, *PPARTITION_INFORMATION_MBR; - -typedef struct _PARTITION_INFORMATION_GPT { - GUID PartitionType; - GUID PartitionId; - ULONG64 Attributes; - WCHAR Name[36]; -} PARTITION_INFORMATION_GPT, *PPARTITION_INFORMATION_GPT; - -typedef struct _PARTITION_INFORMATION_EX { - PARTITION_STYLE PartitionStyle; - LARGE_INTEGER StartingOffset; - LARGE_INTEGER PartitionLength; - ULONG PartitionNumber; - BOOLEAN RewritePartition; - union { - PARTITION_INFORMATION_MBR Mbr; - PARTITION_INFORMATION_GPT Gpt; - }; -} PARTITION_INFORMATION_EX, *PPARTITION_INFORMATION_EX; - -typedef struct _GET_LENGTH_INFORMATION { - LARGE_INTEGER Length; -} GET_LENGTH_INFORMATION, *PGET_LENGTH_INFORMATION; - -#endif // (VER_PRODUCTBUILD < 2600) - -// -// We include some stuff from ntifs.h here so that -// the driver can be compiled with only the DDK. -// - -#define TOKEN_SOURCE_LENGTH 8 - -typedef enum _TOKEN_TYPE { - TokenPrimary = 1, - TokenImpersonation -} TOKEN_TYPE; - -typedef struct _TOKEN_SOURCE { - CCHAR SourceName[TOKEN_SOURCE_LENGTH]; - LUID SourceIdentifier; -} TOKEN_SOURCE, *PTOKEN_SOURCE; - -typedef struct _TOKEN_CONTROL { - LUID TokenId; - LUID AuthenticationId; - LUID ModifiedId; - TOKEN_SOURCE TokenSource; -} TOKEN_CONTROL, *PTOKEN_CONTROL; - -typedef struct _SECURITY_CLIENT_CONTEXT { - SECURITY_QUALITY_OF_SERVICE SecurityQos; - PACCESS_TOKEN ClientToken; - BOOLEAN DirectlyAccessClientToken; - BOOLEAN DirectAccessEffectiveOnly; - BOOLEAN ServerIsRemote; - TOKEN_CONTROL ClientTokenControl; -} SECURITY_CLIENT_CONTEXT, *PSECURITY_CLIENT_CONTEXT; - -#define PsDereferenceImpersonationToken(T) \ - {if (ARGUMENT_PRESENT(T)) { \ - (ObDereferenceObject((T))); \ - } else { \ - ; \ - } \ -} - -#define PsDereferencePrimaryToken(T) (ObDereferenceObject((T))) - -NTKERNELAPI -VOID -PsRevertToSelf ( - VOID -); - -NTKERNELAPI -NTSTATUS -SeCreateClientSecurity ( - IN PETHREAD Thread, - IN PSECURITY_QUALITY_OF_SERVICE QualityOfService, - IN BOOLEAN RemoteClient, - OUT PSECURITY_CLIENT_CONTEXT ClientContext -); - -#define SeDeleteClientSecurity(C) { \ - if (SeTokenType((C)->ClientToken) == TokenPrimary) { \ - PsDereferencePrimaryToken( (C)->ClientToken ); \ - } else { \ - PsDereferenceImpersonationToken( (C)->ClientToken ); \ - } \ -} - -NTKERNELAPI -VOID -SeImpersonateClient ( - IN PSECURITY_CLIENT_CONTEXT ClientContext, - IN PETHREAD ServerThread OPTIONAL -); - -NTKERNELAPI -TOKEN_TYPE -SeTokenType ( - IN PACCESS_TOKEN Token -); - -#ifndef SE_IMPERSONATE_PRIVILEGE -#define SE_IMPERSONATE_PRIVILEGE (29L) -#endif - -#define TOKEN_ASSIGN_PRIMARY (0x0001) -#define TOKEN_DUPLICATE (0x0002) -#define TOKEN_IMPERSONATE (0x0004) -#define TOKEN_QUERY (0x0008) -#define TOKEN_QUERY_SOURCE (0x0010) -#define TOKEN_ADJUST_PRIVILEGES (0x0020) -#define TOKEN_ADJUST_GROUPS (0x0040) -#define TOKEN_ADJUST_DEFAULT (0x0080) - -#define TOKEN_ALL_ACCESS (STANDARD_RIGHTS_REQUIRED |\ - TOKEN_ASSIGN_PRIMARY |\ - TOKEN_DUPLICATE |\ - TOKEN_IMPERSONATE |\ - TOKEN_QUERY |\ - TOKEN_QUERY_SOURCE |\ - TOKEN_ADJUST_PRIVILEGES |\ - TOKEN_ADJUST_GROUPS |\ - TOKEN_ADJUST_DEFAULT) - -typedef struct _TOKEN_PRIVILEGES { - ULONG PrivilegeCount; - LUID_AND_ATTRIBUTES Privileges[1]; -} TOKEN_PRIVILEGES, *PTOKEN_PRIVILEGES; - -NTSYSAPI -NTSTATUS -NTAPI -ZwOpenProcessToken ( - IN HANDLE ProcessHandle, - IN ACCESS_MASK DesiredAccess, - OUT PHANDLE TokenHandle -); - -NTSYSAPI -NTSTATUS -NTAPI -NtAdjustPrivilegesToken ( - IN HANDLE TokenHandle, - IN BOOLEAN DisableAllPrivileges, - IN PTOKEN_PRIVILEGES NewState, - IN ULONG BufferLength, - OUT PTOKEN_PRIVILEGES PreviousState OPTIONAL, - OUT PULONG ReturnLength -); - -// -// For backward compatibility with Windows NT 4.0 by Bruce Engle. -// -#ifndef MmGetSystemAddressForMdlSafe -#define MmGetSystemAddressForMdlSafe(MDL, PRIORITY) MmGetSystemAddressForMdlPrettySafe(MDL) - -PVOID -MmGetSystemAddressForMdlPrettySafe ( - PMDL Mdl - ) -{ - CSHORT MdlMappingCanFail; - PVOID MappedSystemVa; - - MdlMappingCanFail = Mdl->MdlFlags & MDL_MAPPING_CAN_FAIL; - - Mdl->MdlFlags |= MDL_MAPPING_CAN_FAIL; - - MappedSystemVa = MmGetSystemAddressForMdl(Mdl); - - if (MdlMappingCanFail == 0) - { - Mdl->MdlFlags &= ~MDL_MAPPING_CAN_FAIL; - } - - return MappedSystemVa; -} -#endif - -#include "filedisk.h" - -#define PARAMETER_KEY L"\\Parameters" - -#define NUMBEROFDEVICES_VALUE L"NumberOfDevices" - -#define DEFAULT_NUMBEROFDEVICES 4 - -#define SECTOR_SIZE 512 - -#define TOC_DATA_TRACK 0x04 - -HANDLE dir_handle; - -typedef struct _DEVICE_EXTENSION { - BOOLEAN media_in_device; - HANDLE file_handle; - ANSI_STRING file_name; - LARGE_INTEGER file_size; - BOOLEAN read_only; - PSECURITY_CLIENT_CONTEXT security_client_context; - LIST_ENTRY list_head; - KSPIN_LOCK list_lock; - KEVENT request_event; - PVOID thread_pointer; - BOOLEAN terminate_thread; -} DEVICE_EXTENSION, *PDEVICE_EXTENSION; - -NTSTATUS -DriverEntry ( - IN PDRIVER_OBJECT DriverObject, - IN PUNICODE_STRING RegistryPath -); - -NTSTATUS -FileDiskCreateDevice ( - IN PDRIVER_OBJECT DriverObject, - IN ULONG Number, - IN DEVICE_TYPE DeviceType -); - -VOID -FileDiskUnload ( - IN PDRIVER_OBJECT DriverObject -); - -PDEVICE_OBJECT -FileDiskDeleteDevice ( - IN PDEVICE_OBJECT DeviceObject -); - -NTSTATUS -FileDiskCreateClose ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp -); - -NTSTATUS -FileDiskReadWrite ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp -); - -NTSTATUS -FileDiskDeviceControl ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp -); - -VOID -FileDiskThread ( - IN PVOID Context -); - -NTSTATUS -FileDiskOpenFile ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp -); - -NTSTATUS -FileDiskCloseFile ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp -); - -NTSTATUS -FileDiskAdjustPrivilege ( - IN ULONG Privilege, - IN BOOLEAN Enable -); - -int swprintf(wchar_t *, const wchar_t *, ...); - -#pragma code_seg("INIT") - -NTSTATUS -DriverEntry ( - IN PDRIVER_OBJECT DriverObject, - IN PUNICODE_STRING RegistryPath - ) -{ - UNICODE_STRING parameter_path; - RTL_QUERY_REGISTRY_TABLE query_table[2]; - ULONG n_devices; - NTSTATUS status; - UNICODE_STRING device_dir_name; - OBJECT_ATTRIBUTES object_attributes; - ULONG n; - USHORT n_created_devices; - - parameter_path.Length = 0; - - parameter_path.MaximumLength = RegistryPath->Length + sizeof(PARAMETER_KEY); - - parameter_path.Buffer = (PWSTR) ExAllocatePool(PagedPool, parameter_path.MaximumLength); - - if (parameter_path.Buffer == NULL) - { - return STATUS_INSUFFICIENT_RESOURCES; - } - - RtlCopyUnicodeString(¶meter_path, RegistryPath); - - RtlAppendUnicodeToString(¶meter_path, PARAMETER_KEY); - - RtlZeroMemory(&query_table[0], sizeof(query_table)); - - query_table[0].Flags = RTL_QUERY_REGISTRY_DIRECT | RTL_QUERY_REGISTRY_REQUIRED; - query_table[0].Name = NUMBEROFDEVICES_VALUE; - query_table[0].EntryContext = &n_devices; - - status = RtlQueryRegistryValues( - RTL_REGISTRY_ABSOLUTE, - parameter_path.Buffer, - &query_table[0], - NULL, - NULL - ); - - ExFreePool(parameter_path.Buffer); - - if (!NT_SUCCESS(status)) - { - KdPrint(("FileDisk: Query registry failed, using default values.\n")); - n_devices = DEFAULT_NUMBEROFDEVICES; - } - - RtlInitUnicodeString(&device_dir_name, DEVICE_DIR_NAME); - - InitializeObjectAttributes( - &object_attributes, - &device_dir_name, - OBJ_PERMANENT, - NULL, - NULL - ); - - status = ZwCreateDirectoryObject( - &dir_handle, - DIRECTORY_ALL_ACCESS, - &object_attributes - ); - - if (!NT_SUCCESS(status)) - { - return status; - } - - ZwMakeTemporaryObject(dir_handle); - - for (n = 0, n_created_devices = 0; n < n_devices; n++) - { - status = FileDiskCreateDevice(DriverObject, n, FILE_DEVICE_DISK); - - if (NT_SUCCESS(status)) - { - n_created_devices++; - } - } - - for (n = 0; n < n_devices; n++) - { - status = FileDiskCreateDevice(DriverObject, n, FILE_DEVICE_CD_ROM); - - if (NT_SUCCESS(status)) - { - n_created_devices++; - } - } - - if (n_created_devices == 0) - { - ZwClose(dir_handle); - return status; - } - - DriverObject->MajorFunction[IRP_MJ_CREATE] = FileDiskCreateClose; - DriverObject->MajorFunction[IRP_MJ_CLOSE] = FileDiskCreateClose; - DriverObject->MajorFunction[IRP_MJ_READ] = FileDiskReadWrite; - DriverObject->MajorFunction[IRP_MJ_WRITE] = FileDiskReadWrite; - DriverObject->MajorFunction[IRP_MJ_DEVICE_CONTROL] = FileDiskDeviceControl; - - DriverObject->DriverUnload = FileDiskUnload; - - return STATUS_SUCCESS; -} - -NTSTATUS -FileDiskCreateDevice ( - IN PDRIVER_OBJECT DriverObject, - IN ULONG Number, - IN DEVICE_TYPE DeviceType - ) -{ - WCHAR device_name_buffer[MAXIMUM_FILENAME_LENGTH]; - UNICODE_STRING device_name; - NTSTATUS status; - PDEVICE_OBJECT device_object; - PDEVICE_EXTENSION device_extension; - HANDLE thread_handle; - - ASSERT(DriverObject != NULL); - - if (DeviceType == FILE_DEVICE_CD_ROM) - { - swprintf( - device_name_buffer, - DEVICE_NAME_PREFIX L"Cd" L"%u", - Number - ); - } - else - { - swprintf( - device_name_buffer, - DEVICE_NAME_PREFIX L"%u", - Number - ); - } - - RtlInitUnicodeString(&device_name, device_name_buffer); - - status = IoCreateDevice( - DriverObject, - sizeof(DEVICE_EXTENSION), - &device_name, - DeviceType, - 0, - FALSE, - &device_object - ); - - if (!NT_SUCCESS(status)) - { - return status; - } - - device_object->Flags |= DO_DIRECT_IO; - - device_extension = (PDEVICE_EXTENSION) device_object->DeviceExtension; - - device_extension->media_in_device = FALSE; - - if (DeviceType == FILE_DEVICE_CD_ROM) - { - device_object->Characteristics |= FILE_READ_ONLY_DEVICE; - device_extension->read_only = TRUE; - } - - InitializeListHead(&device_extension->list_head); - - KeInitializeSpinLock(&device_extension->list_lock); - - KeInitializeEvent( - &device_extension->request_event, - SynchronizationEvent, - FALSE - ); - - device_extension->terminate_thread = FALSE; - - status = PsCreateSystemThread( - &thread_handle, - (ACCESS_MASK) 0L, - NULL, - NULL, - NULL, - FileDiskThread, - device_object - ); - - if (!NT_SUCCESS(status)) - { - IoDeleteDevice(device_object); - return status; - } - - status = ObReferenceObjectByHandle( - thread_handle, - THREAD_ALL_ACCESS, - NULL, - KernelMode, - &device_extension->thread_pointer, - NULL - ); - - if (!NT_SUCCESS(status)) - { - ZwClose(thread_handle); - - device_extension->terminate_thread = TRUE; - - KeSetEvent( - &device_extension->request_event, - (KPRIORITY) 0, - FALSE - ); - - IoDeleteDevice(device_object); - - return status; - } - - ZwClose(thread_handle); - - return STATUS_SUCCESS; -} - -#pragma code_seg("PAGE") - -VOID -FileDiskUnload ( - IN PDRIVER_OBJECT DriverObject - ) -{ - PDEVICE_OBJECT device_object; - - PAGED_CODE(); - - device_object = DriverObject->DeviceObject; - - while (device_object) - { - device_object = FileDiskDeleteDevice(device_object); - } - - ZwClose(dir_handle); -} - -PDEVICE_OBJECT -FileDiskDeleteDevice ( - IN PDEVICE_OBJECT DeviceObject - ) -{ - PDEVICE_EXTENSION device_extension; - PDEVICE_OBJECT next_device_object; - - PAGED_CODE(); - - ASSERT(DeviceObject != NULL); - - device_extension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension; - - device_extension->terminate_thread = TRUE; - - KeSetEvent( - &device_extension->request_event, - (KPRIORITY) 0, - FALSE - ); - - KeWaitForSingleObject( - device_extension->thread_pointer, - Executive, - KernelMode, - FALSE, - NULL - ); - - ObDereferenceObject(device_extension->thread_pointer); - - if (device_extension->security_client_context != NULL) - { - SeDeleteClientSecurity(device_extension->security_client_context); - ExFreePool(device_extension->security_client_context); - } - - next_device_object = DeviceObject->NextDevice; - - IoDeleteDevice(DeviceObject); - - return next_device_object; -} - -NTSTATUS -FileDiskCreateClose ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp - ) -{ - PAGED_CODE(); - - Irp->IoStatus.Status = STATUS_SUCCESS; - Irp->IoStatus.Information = FILE_OPENED; - - IoCompleteRequest(Irp, IO_NO_INCREMENT); - - return STATUS_SUCCESS; -} - -#pragma code_seg() - -NTSTATUS -FileDiskReadWrite ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp - ) -{ - PDEVICE_EXTENSION device_extension; - PIO_STACK_LOCATION io_stack; - - device_extension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension; - - if (!device_extension->media_in_device) - { - Irp->IoStatus.Status = STATUS_NO_MEDIA_IN_DEVICE; - Irp->IoStatus.Information = 0; - - IoCompleteRequest(Irp, IO_NO_INCREMENT); - - return STATUS_NO_MEDIA_IN_DEVICE; - } - - io_stack = IoGetCurrentIrpStackLocation(Irp); - - if (io_stack->Parameters.Read.Length == 0) - { - Irp->IoStatus.Status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - - IoCompleteRequest(Irp, IO_NO_INCREMENT); - - return STATUS_SUCCESS; - } - - IoMarkIrpPending(Irp); - - ExInterlockedInsertTailList( - &device_extension->list_head, - &Irp->Tail.Overlay.ListEntry, - &device_extension->list_lock - ); - - KeSetEvent( - &device_extension->request_event, - (KPRIORITY) 0, - FALSE - ); - - return STATUS_PENDING; -} - -NTSTATUS -FileDiskDeviceControl ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp - ) -{ - PDEVICE_EXTENSION device_extension; - PIO_STACK_LOCATION io_stack; - NTSTATUS status; - - device_extension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension; - - io_stack = IoGetCurrentIrpStackLocation(Irp); - - if (!device_extension->media_in_device && - io_stack->Parameters.DeviceIoControl.IoControlCode != - IOCTL_FILE_DISK_OPEN_FILE) - { - Irp->IoStatus.Status = STATUS_NO_MEDIA_IN_DEVICE; - Irp->IoStatus.Information = 0; - - IoCompleteRequest(Irp, IO_NO_INCREMENT); - - return STATUS_NO_MEDIA_IN_DEVICE; - } - - switch (io_stack->Parameters.DeviceIoControl.IoControlCode) - { - case IOCTL_FILE_DISK_OPEN_FILE: - { - SECURITY_QUALITY_OF_SERVICE security_quality_of_service; - - if (device_extension->media_in_device) - { - KdPrint(("FileDisk: IOCTL_FILE_DISK_OPEN_FILE: Media already opened\n")); - - status = STATUS_INVALID_DEVICE_REQUEST; - Irp->IoStatus.Information = 0; - break; - } - - if (io_stack->Parameters.DeviceIoControl.InputBufferLength < - sizeof(OPEN_FILE_INFORMATION)) - { - status = STATUS_INVALID_PARAMETER; - Irp->IoStatus.Information = 0; - break; - } - - if (io_stack->Parameters.DeviceIoControl.InputBufferLength < - sizeof(OPEN_FILE_INFORMATION) + - ((POPEN_FILE_INFORMATION)Irp->AssociatedIrp.SystemBuffer)->FileNameLength - - sizeof(UCHAR)) - { - status = STATUS_INVALID_PARAMETER; - Irp->IoStatus.Information = 0; - break; - } - - if (device_extension->security_client_context != NULL) - { - SeDeleteClientSecurity(device_extension->security_client_context); - } - else - { - device_extension->security_client_context = - ExAllocatePool(NonPagedPool, sizeof(SECURITY_CLIENT_CONTEXT)); - } - - RtlZeroMemory(&security_quality_of_service, sizeof(SECURITY_QUALITY_OF_SERVICE)); - - security_quality_of_service.Length = sizeof(SECURITY_QUALITY_OF_SERVICE); - security_quality_of_service.ImpersonationLevel = SecurityImpersonation; - security_quality_of_service.ContextTrackingMode = SECURITY_STATIC_TRACKING; - security_quality_of_service.EffectiveOnly = FALSE; - - SeCreateClientSecurity( - PsGetCurrentThread(), - &security_quality_of_service, - FALSE, - device_extension->security_client_context - ); - - IoMarkIrpPending(Irp); - - ExInterlockedInsertTailList( - &device_extension->list_head, - &Irp->Tail.Overlay.ListEntry, - &device_extension->list_lock - ); - - KeSetEvent( - &device_extension->request_event, - (KPRIORITY) 0, - FALSE - ); - - status = STATUS_PENDING; - - break; - } - - case IOCTL_FILE_DISK_CLOSE_FILE: - { - IoMarkIrpPending(Irp); - - ExInterlockedInsertTailList( - &device_extension->list_head, - &Irp->Tail.Overlay.ListEntry, - &device_extension->list_lock - ); - - KeSetEvent( - &device_extension->request_event, - (KPRIORITY) 0, - FALSE - ); - - status = STATUS_PENDING; - - break; - } - - case IOCTL_FILE_DISK_QUERY_FILE: - { - POPEN_FILE_INFORMATION open_file_information; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(OPEN_FILE_INFORMATION) + device_extension->file_name.Length - sizeof(UCHAR)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - open_file_information = (POPEN_FILE_INFORMATION) Irp->AssociatedIrp.SystemBuffer; - - open_file_information->FileSize.QuadPart = device_extension->file_size.QuadPart; - open_file_information->ReadOnly = device_extension->read_only; - open_file_information->FileNameLength = device_extension->file_name.Length; - - RtlCopyMemory( - open_file_information->FileName, - device_extension->file_name.Buffer, - device_extension->file_name.Length - ); - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(OPEN_FILE_INFORMATION) + - open_file_information->FileNameLength - sizeof(UCHAR); - - break; - } - - case IOCTL_DISK_CHECK_VERIFY: - case IOCTL_CDROM_CHECK_VERIFY: - case IOCTL_STORAGE_CHECK_VERIFY: - case IOCTL_STORAGE_CHECK_VERIFY2: - { - status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - break; - } - - case IOCTL_DISK_GET_DRIVE_GEOMETRY: - case IOCTL_CDROM_GET_DRIVE_GEOMETRY: - { - PDISK_GEOMETRY disk_geometry; - ULONGLONG length; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(DISK_GEOMETRY)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - disk_geometry = (PDISK_GEOMETRY) Irp->AssociatedIrp.SystemBuffer; - - length = device_extension->file_size.QuadPart; - - disk_geometry->Cylinders.QuadPart = length / SECTOR_SIZE / 32 / 2; - disk_geometry->MediaType = FixedMedia; - disk_geometry->TracksPerCylinder = 2; - disk_geometry->SectorsPerTrack = 32; - disk_geometry->BytesPerSector = SECTOR_SIZE; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(DISK_GEOMETRY); - - break; - } - - case IOCTL_DISK_GET_LENGTH_INFO: - { - PGET_LENGTH_INFORMATION get_length_information; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(GET_LENGTH_INFORMATION)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - get_length_information = (PGET_LENGTH_INFORMATION) Irp->AssociatedIrp.SystemBuffer; - - get_length_information->Length.QuadPart = device_extension->file_size.QuadPart; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(GET_LENGTH_INFORMATION); - - break; - } - - case IOCTL_DISK_GET_PARTITION_INFO: - { - PPARTITION_INFORMATION partition_information; - ULONGLONG length; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(PARTITION_INFORMATION)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - partition_information = (PPARTITION_INFORMATION) Irp->AssociatedIrp.SystemBuffer; - - length = device_extension->file_size.QuadPart; - - partition_information->StartingOffset.QuadPart = 0; - partition_information->PartitionLength.QuadPart = length; - partition_information->HiddenSectors = 1; - partition_information->PartitionNumber = 0; - partition_information->PartitionType = 0; - partition_information->BootIndicator = FALSE; - partition_information->RecognizedPartition = FALSE; - partition_information->RewritePartition = FALSE; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(PARTITION_INFORMATION); - - break; - } - - case IOCTL_DISK_GET_PARTITION_INFO_EX: - { - PPARTITION_INFORMATION_EX partition_information_ex; - ULONGLONG length; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(PARTITION_INFORMATION_EX)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - partition_information_ex = (PPARTITION_INFORMATION_EX) Irp->AssociatedIrp.SystemBuffer; - - length = device_extension->file_size.QuadPart; - - partition_information_ex->PartitionStyle = PARTITION_STYLE_MBR; - partition_information_ex->StartingOffset.QuadPart = 0; - partition_information_ex->PartitionLength.QuadPart = length; - partition_information_ex->PartitionNumber = 0; - partition_information_ex->RewritePartition = FALSE; - partition_information_ex->Mbr.PartitionType = 0; - partition_information_ex->Mbr.BootIndicator = FALSE; - partition_information_ex->Mbr.RecognizedPartition = FALSE; - partition_information_ex->Mbr.HiddenSectors = 1; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(PARTITION_INFORMATION_EX); - - break; - } - - case IOCTL_DISK_IS_WRITABLE: - { - if (!device_extension->read_only) - { - status = STATUS_SUCCESS; - } - else - { - status = STATUS_MEDIA_WRITE_PROTECTED; - } - Irp->IoStatus.Information = 0; - break; - } - - case IOCTL_DISK_MEDIA_REMOVAL: - case IOCTL_STORAGE_MEDIA_REMOVAL: - { - status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - break; - } - - case IOCTL_CDROM_READ_TOC: - { - PCDROM_TOC cdrom_toc; - - if (io_stack->Parameters.DeviceIoControl.OutputBufferLength < - sizeof(CDROM_TOC)) - { - status = STATUS_BUFFER_TOO_SMALL; - Irp->IoStatus.Information = 0; - break; - } - - cdrom_toc = (PCDROM_TOC) Irp->AssociatedIrp.SystemBuffer; - - RtlZeroMemory(cdrom_toc, sizeof(CDROM_TOC)); - - cdrom_toc->FirstTrack = 1; - cdrom_toc->LastTrack = 1; - cdrom_toc->TrackData[0].Control = TOC_DATA_TRACK; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = sizeof(CDROM_TOC); - - break; - } - - case IOCTL_DISK_SET_PARTITION_INFO: - { - if (device_extension->read_only) - { - status = STATUS_MEDIA_WRITE_PROTECTED; - Irp->IoStatus.Information = 0; - break; - } - - if (io_stack->Parameters.DeviceIoControl.InputBufferLength < - sizeof(SET_PARTITION_INFORMATION)) - { - status = STATUS_INVALID_PARAMETER; - Irp->IoStatus.Information = 0; - break; - } - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - - break; - } - - case IOCTL_DISK_VERIFY: - { - PVERIFY_INFORMATION verify_information; - - if (io_stack->Parameters.DeviceIoControl.InputBufferLength < - sizeof(VERIFY_INFORMATION)) - { - status = STATUS_INVALID_PARAMETER; - Irp->IoStatus.Information = 0; - break; - } - - verify_information = (PVERIFY_INFORMATION) Irp->AssociatedIrp.SystemBuffer; - - status = STATUS_SUCCESS; - Irp->IoStatus.Information = verify_information->Length; - - break; - } - - default: - { - KdPrint(( - "FileDisk: Unknown IoControlCode %#x\n", - io_stack->Parameters.DeviceIoControl.IoControlCode - )); - - status = STATUS_INVALID_DEVICE_REQUEST; - Irp->IoStatus.Information = 0; - } - } - - if (status != STATUS_PENDING) - { - Irp->IoStatus.Status = status; - - IoCompleteRequest(Irp, IO_NO_INCREMENT); - } - - return status; -} - -VOID -FileDiskThread ( - IN PVOID Context - ) -{ - PDEVICE_OBJECT device_object; - PDEVICE_EXTENSION device_extension; - PLIST_ENTRY request; - PIRP irp; - PIO_STACK_LOCATION io_stack; - PUCHAR system_buffer; - PUCHAR buffer; - - ASSERT(Context != NULL); - - device_object = (PDEVICE_OBJECT) Context; - - device_extension = (PDEVICE_EXTENSION) device_object->DeviceExtension; - - KeSetPriorityThread(KeGetCurrentThread(), LOW_REALTIME_PRIORITY); - - FileDiskAdjustPrivilege(SE_IMPERSONATE_PRIVILEGE, TRUE); - - for (;;) - { - KeWaitForSingleObject( - &device_extension->request_event, - Executive, - KernelMode, - FALSE, - NULL - ); - - if (device_extension->terminate_thread) - { - PsTerminateSystemThread(STATUS_SUCCESS); - } - - while (request = ExInterlockedRemoveHeadList( - &device_extension->list_head, - &device_extension->list_lock - )) - { - irp = CONTAINING_RECORD(request, IRP, Tail.Overlay.ListEntry); - - io_stack = IoGetCurrentIrpStackLocation(irp); - - switch (io_stack->MajorFunction) - { - case IRP_MJ_READ: - system_buffer = (PUCHAR) MmGetSystemAddressForMdlSafe(irp->MdlAddress, NormalPagePriority); - if (system_buffer == NULL) - { - irp->IoStatus.Status = STATUS_INSUFFICIENT_RESOURCES; - irp->IoStatus.Information = 0; - break; - } - buffer = (PUCHAR) ExAllocatePool(PagedPool, io_stack->Parameters.Read.Length); - if (buffer == NULL) - { - irp->IoStatus.Status = STATUS_INSUFFICIENT_RESOURCES; - irp->IoStatus.Information = 0; - break; - } - ZwReadFile( - device_extension->file_handle, - NULL, - NULL, - NULL, - &irp->IoStatus, - buffer, - io_stack->Parameters.Read.Length, - &io_stack->Parameters.Read.ByteOffset, - NULL - ); - RtlCopyMemory(system_buffer, buffer, io_stack->Parameters.Read.Length); - ExFreePool(buffer); - break; - - case IRP_MJ_WRITE: - if ((io_stack->Parameters.Write.ByteOffset.QuadPart + - io_stack->Parameters.Write.Length) > - device_extension->file_size.QuadPart) - { - irp->IoStatus.Status = STATUS_INVALID_PARAMETER; - irp->IoStatus.Information = 0; - } - ZwWriteFile( - device_extension->file_handle, - NULL, - NULL, - NULL, - &irp->IoStatus, - MmGetSystemAddressForMdlSafe(irp->MdlAddress, NormalPagePriority), - io_stack->Parameters.Write.Length, - &io_stack->Parameters.Write.ByteOffset, - NULL - ); - break; - - case IRP_MJ_DEVICE_CONTROL: - switch (io_stack->Parameters.DeviceIoControl.IoControlCode) - { - case IOCTL_FILE_DISK_OPEN_FILE: - - SeImpersonateClient(device_extension->security_client_context, NULL); - - irp->IoStatus.Status = FileDiskOpenFile(device_object, irp); - - PsRevertToSelf(); - - break; - - case IOCTL_FILE_DISK_CLOSE_FILE: - irp->IoStatus.Status = FileDiskCloseFile(device_object, irp); - break; - - default: - irp->IoStatus.Status = STATUS_DRIVER_INTERNAL_ERROR; - } - break; - - default: - irp->IoStatus.Status = STATUS_DRIVER_INTERNAL_ERROR; - } - - IoCompleteRequest( - irp, - (CCHAR) (NT_SUCCESS(irp->IoStatus.Status) ? - IO_DISK_INCREMENT : IO_NO_INCREMENT) - ); - } - } -} - -#pragma code_seg("PAGE") - -NTSTATUS -FileDiskOpenFile ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp - ) -{ - PDEVICE_EXTENSION device_extension; - POPEN_FILE_INFORMATION open_file_information; - UNICODE_STRING ufile_name; - NTSTATUS status; - OBJECT_ATTRIBUTES object_attributes; - FILE_END_OF_FILE_INFORMATION file_eof; - FILE_BASIC_INFORMATION file_basic; - FILE_STANDARD_INFORMATION file_standard; - FILE_ALIGNMENT_INFORMATION file_alignment; - - PAGED_CODE(); - - ASSERT(DeviceObject != NULL); - ASSERT(Irp != NULL); - - device_extension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension; - - open_file_information = (POPEN_FILE_INFORMATION) Irp->AssociatedIrp.SystemBuffer; - - if (DeviceObject->DeviceType != FILE_DEVICE_CD_ROM) - { - device_extension->read_only = open_file_information->ReadOnly; - } - - device_extension->file_name.Length = open_file_information->FileNameLength; - device_extension->file_name.MaximumLength = open_file_information->FileNameLength; - device_extension->file_name.Buffer = ExAllocatePool(NonPagedPool, open_file_information->FileNameLength); - - RtlCopyMemory( - device_extension->file_name.Buffer, - open_file_information->FileName, - open_file_information->FileNameLength - ); - - status = RtlAnsiStringToUnicodeString( - &ufile_name, - &device_extension->file_name, - TRUE - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - Irp->IoStatus.Status = status; - Irp->IoStatus.Information = 0; - return status; - } - - InitializeObjectAttributes( - &object_attributes, - &ufile_name, - OBJ_CASE_INSENSITIVE, - NULL, - NULL - ); - - status = ZwCreateFile( - &device_extension->file_handle, - device_extension->read_only ? GENERIC_READ : GENERIC_READ | GENERIC_WRITE, - &object_attributes, - &Irp->IoStatus, - NULL, - FILE_ATTRIBUTE_NORMAL, - device_extension->read_only ? FILE_SHARE_READ : 0, - FILE_OPEN, - FILE_NON_DIRECTORY_FILE | - FILE_RANDOM_ACCESS | - FILE_NO_INTERMEDIATE_BUFFERING | - FILE_SYNCHRONOUS_IO_NONALERT, - NULL, - 0 - ); - - if (status == STATUS_OBJECT_NAME_NOT_FOUND || status == STATUS_NO_SUCH_FILE) - { - if (device_extension->read_only || open_file_information->FileSize.QuadPart == 0) - { - ExFreePool(device_extension->file_name.Buffer); - RtlFreeUnicodeString(&ufile_name); - - Irp->IoStatus.Status = STATUS_NO_SUCH_FILE; - Irp->IoStatus.Information = 0; - - return STATUS_NO_SUCH_FILE; - } - else - { - status = ZwCreateFile( - &device_extension->file_handle, - GENERIC_READ | GENERIC_WRITE, - &object_attributes, - &Irp->IoStatus, - &open_file_information->FileSize, - FILE_ATTRIBUTE_NORMAL, - 0, - FILE_OPEN_IF, - FILE_NON_DIRECTORY_FILE | - FILE_RANDOM_ACCESS | - FILE_NO_INTERMEDIATE_BUFFERING | - FILE_SYNCHRONOUS_IO_NONALERT, - NULL, - 0 - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - RtlFreeUnicodeString(&ufile_name); - return status; - } - - if (Irp->IoStatus.Information == FILE_CREATED) - { - file_eof.EndOfFile.QuadPart = open_file_information->FileSize.QuadPart; - - status = ZwSetInformationFile( - device_extension->file_handle, - &Irp->IoStatus, - &file_eof, - sizeof(FILE_END_OF_FILE_INFORMATION), - FileEndOfFileInformation - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - RtlFreeUnicodeString(&ufile_name); - ZwClose(device_extension->file_handle); - return status; - } - } - } - } - else if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - RtlFreeUnicodeString(&ufile_name); - return status; - } - - RtlFreeUnicodeString(&ufile_name); - - status = ZwQueryInformationFile( - device_extension->file_handle, - &Irp->IoStatus, - &file_basic, - sizeof(FILE_BASIC_INFORMATION), - FileBasicInformation - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - ZwClose(device_extension->file_handle); - return status; - } - -/* - // - // The NT cache manager can deadlock if a filesystem that is using the cache - // manager is used in a virtual disk that stores its file on a filesystem - // that is also using the cache manager, this is why we open the file with - // FILE_NO_INTERMEDIATE_BUFFERING above, however if the file is compressed - // or encrypted NT will not honor this request and cache it anyway since it - // need to store the decompressed/unencrypted data somewhere, therefor we put - // an extra check here and don't alow disk images to be compressed/encrypted. - // - if (file_basic.FileAttributes & (FILE_ATTRIBUTE_COMPRESSED | FILE_ATTRIBUTE_ENCRYPTED)) - { - ExFreePool(device_extension->file_name.Buffer); - ZwClose(device_extension->file_handle); - Irp->IoStatus.Status = STATUS_ACCESS_DENIED; - Irp->IoStatus.Information = 0; - return STATUS_ACCESS_DENIED; - } -*/ - - status = ZwQueryInformationFile( - device_extension->file_handle, - &Irp->IoStatus, - &file_standard, - sizeof(FILE_STANDARD_INFORMATION), - FileStandardInformation - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - ZwClose(device_extension->file_handle); - return status; - } - - device_extension->file_size.QuadPart = file_standard.EndOfFile.QuadPart; - - status = ZwQueryInformationFile( - device_extension->file_handle, - &Irp->IoStatus, - &file_alignment, - sizeof(FILE_ALIGNMENT_INFORMATION), - FileAlignmentInformation - ); - - if (!NT_SUCCESS(status)) - { - ExFreePool(device_extension->file_name.Buffer); - ZwClose(device_extension->file_handle); - return status; - } - - DeviceObject->AlignmentRequirement = file_alignment.AlignmentRequirement; - - if (device_extension->read_only) - { - DeviceObject->Characteristics |= FILE_READ_ONLY_DEVICE; - } - else - { - DeviceObject->Characteristics &= ~FILE_READ_ONLY_DEVICE; - } - - device_extension->media_in_device = TRUE; - - Irp->IoStatus.Status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - - return STATUS_SUCCESS; -} - -NTSTATUS -FileDiskCloseFile ( - IN PDEVICE_OBJECT DeviceObject, - IN PIRP Irp - ) -{ - PDEVICE_EXTENSION device_extension; - - PAGED_CODE(); - - ASSERT(DeviceObject != NULL); - ASSERT(Irp != NULL); - - device_extension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension; - - ExFreePool(device_extension->file_name.Buffer); - - ZwClose(device_extension->file_handle); - - device_extension->media_in_device = FALSE; - - Irp->IoStatus.Status = STATUS_SUCCESS; - Irp->IoStatus.Information = 0; - - return STATUS_SUCCESS; -} - -NTSTATUS -FileDiskAdjustPrivilege ( - IN ULONG Privilege, - IN BOOLEAN Enable - ) -{ - NTSTATUS status; - HANDLE token_handle; - TOKEN_PRIVILEGES token_privileges; - - PAGED_CODE(); - - status = ZwOpenProcessToken( - NtCurrentProcess(), - TOKEN_ALL_ACCESS, - &token_handle - ); - - if (!NT_SUCCESS(status)) - { - return status; - } - - token_privileges.PrivilegeCount = 1; - token_privileges.Privileges[0].Luid = RtlConvertUlongToLuid(Privilege); - token_privileges.Privileges[0].Attributes = Enable ? SE_PRIVILEGE_ENABLED : 0; - - // - // Normaly one would use ZwAdjustPrivilegesToken but it is only available - // on Windows 2000 and later versions, however since we are in a system - // thread does ExGetPreviousMode always return KernelMode and therefore - // can NtAdjustPrivilegesToken be used directly. - // - status = NtAdjustPrivilegesToken( - token_handle, - FALSE, - &token_privileges, - sizeof(token_privileges), - NULL, - NULL - ); - - ZwClose(token_handle); - - return status; -} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/filedisk/sys/src/filedisk.rc --- a/buildframework/helium/external/filedisk/sys/src/filedisk.rc Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,104 +0,0 @@ -//Microsoft Developer Studio generated resource script. -// - -#define APSTUDIO_READONLY_SYMBOLS -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 2 resource. -// -#include "afxres.h" - -///////////////////////////////////////////////////////////////////////////// -#undef APSTUDIO_READONLY_SYMBOLS - -///////////////////////////////////////////////////////////////////////////// -// English (U.S.) resources - -#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) -#ifdef _WIN32 -LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US -#pragma code_page(1252) -#endif //_WIN32 - -#ifndef _MAC -///////////////////////////////////////////////////////////////////////////// -// -// Version -// - -VS_VERSION_INFO VERSIONINFO - FILEVERSION 1,0,0,14 - PRODUCTVERSION 1,0,0,14 - FILEFLAGSMASK 0x3fL -#ifdef _DEBUG - FILEFLAGS 0x1L -#else - FILEFLAGS 0x0L -#endif - FILEOS 0x40004L - FILETYPE 0x3L - FILESUBTYPE 0x0L -BEGIN - BLOCK "StringFileInfo" - BEGIN - BLOCK "040904b0" - BEGIN - VALUE "CompanyName", "Bo Brantén\0" - VALUE "FileDescription", "FileDisk Virtual Disk Driver\0" - VALUE "FileVersion", "1.0.0.14\0" - VALUE "InternalName", "filedisk\0" - VALUE "LegalCopyright", "Copyright © 1999-2006 Bo Brantén\0" - VALUE "OriginalFilename", "filedisk.sys\0" - VALUE "ProductName", "filedisk\0" - VALUE "ProductVersion", "1.0.0.14\0" - END - END - BLOCK "VarFileInfo" - BEGIN - VALUE "Translation", 0x409, 1200 - END -END - -#endif // !_MAC - - -#ifdef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// TEXTINCLUDE -// - -1 TEXTINCLUDE DISCARDABLE -BEGIN - "resource.h\0" -END - -2 TEXTINCLUDE DISCARDABLE -BEGIN - "#include ""afxres.h""\r\n" - "\0" -END - -3 TEXTINCLUDE DISCARDABLE -BEGIN - "\r\n" - "\0" -END - -#endif // APSTUDIO_INVOKED - -#endif // English (U.S.) resources -///////////////////////////////////////////////////////////////////////////// - - - -#ifndef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 3 resource. -// - - -///////////////////////////////////////////////////////////////////////////// -#endif // not APSTUDIO_INVOKED - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-core.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-core.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-diamonds.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-diamonds.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-imaker.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-imaker.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-logging.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-logging.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-metadata.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-metadata.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-quality.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-quality.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-sbs.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-sbs.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-scm.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-scm.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-signaling.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-signaling.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/bin/helium-sysdef.jar Binary file buildframework/helium/external/helium-antlib/bin/helium-sysdef.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/build.xml --- a/buildframework/helium/external/helium-antlib/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -24,7 +24,9 @@ Helium Antlib build file. - + + + @@ -52,23 +54,41 @@ + + - + + - + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/EmailDataSender.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/EmailDataSender.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/EmailDataSender.java Wed Dec 23 19:29:07 2009 +0200 @@ -150,15 +150,16 @@ toListLength = toAddressList.length; InternetAddress[] addressList = new InternetAddress[toListLength]; try { - log.debug("EmailDataSender:getToAddressList:length: " + log.debug("getToAddressList:length: " + toListLength); for (int i = 0; i < toListLength; i++) { - log.debug("EmailDataSender:getToAddressList:address:" + log.debug("getToAddressList:address:" + toAddressList[i]); addressList[i] = new InternetAddress(toAddressList[i]); } } catch (Exception aex) { - log.error("EmailDataSender:AddressException: " + aex); + // We are Ignoring the errors as no need to fail the build. + log.error("AddressException: " + aex); } return addressList; } @@ -217,10 +218,9 @@ public void sendData(String purpose, String fileToSend, String mimeType, String subject, String header, boolean compressData) { try { - log.debug("EmailDataSender:sendData:file: " + fileToSend); - log.debug("EmailDataSender:sendData:mimetype: " + mimeType); + log.debug("sendData:Send file: " + fileToSend + " and mimetype: " + mimeType); if (fileToSend != null) { - log.debug("EmailDataSender:sendData:smtp address: " + log.debug("sendData:smtp address: " + smtpServerAddress); InternetAddress[] toAddresses = getToAddressList(); Properties props = new Properties(); @@ -235,15 +235,14 @@ MimeMultipart multipart = new MimeMultipart("related"); BodyPart messageBodyPart = new MimeBodyPart(); ByteArrayDataSource dataSrc = null; - log.debug("EmailDataSender:sendData:Send file: " + fileToSend); String fileName = new File(fileToSend).getName(); if (compressData) { - log.debug("EmailDataSender: Sending compressed data"); + log.debug("Sending compressed data"); dataSrc = compressFile(fileToSend); dataSrc.setName(fileName + ".gz"); messageBodyPart.setFileName(fileName + ".gz"); } else { - log.debug("EmailDataSender: Sending uncompressed data:"); + log.debug("Sending uncompressed data:"); dataSrc = new ByteArrayDataSource(new FileInputStream( new File(fileToSend)), mimeType); @@ -267,15 +266,21 @@ InternetAddress fromAddress = getFromAddress(); message.setFrom(fromAddress); } catch (Exception e) { - log.debug("Error retrieving current user email address: " + e.getMessage()); + // We are Ignoring the errors as no need to fail the build. + log.debug("Error retrieving current user email address: " + e.getMessage(), e); } message.addRecipients(Message.RecipientType.TO, toAddresses); log.info("Sending email alert: " + subject); Transport.send(message); } } catch (Exception e) { - log.info("Not sending e-mail signal because of errors"); - log.debug("Failed sending e-mail: " + purpose + ": ", e); + String errorMessage = e.getMessage(); + String fullErrorMessage = "Failed sending e-mail: " + purpose; + if (errorMessage != null) { + fullErrorMessage += " " + errorMessage; + } + // We are Ignoring the errors as no need to fail the build. + log.info(fullErrorMessage); } } @@ -342,12 +347,11 @@ if (en.hasMore()) { SearchResult sr = en.next(); String email = (String) sr.getAttributes().get("mail").get(); - log.debug("EmailDataSender:getUserEmail:" + email); + log.debug("getUserEmail:" + email); return email; } } catch (javax.naming.NameNotFoundException ex) { - log.debug("EmailDataSender:username:" + username + "finding error"); - throw new HlmAntLibException("Error find user email"); + throw new HlmAntLibException("Error finding user email for " + username ); } throw new HlmAntLibException("Could not find user email in LDAP."); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/TemplateProcessor.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/TemplateProcessor.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/TemplateProcessor.java Wed Dec 23 19:29:07 2009 +0200 @@ -68,15 +68,12 @@ } } } catch (java.io.IOException e) { - log.warn("I/O Error during template conversion: " + e.getMessage()); throw new TemplateProcessorException( "I/O Error during template conversion: " + e.getMessage()); } catch (org.xml.sax.SAXException e1) { - log.warn("XML parser error: " + e1); throw new TemplateProcessorException("XML parser error: " + e1.getMessage()); } catch (javax.xml.parsers.ParserConfigurationException e3) { - log.warn("Parser error: " + e3); throw new TemplateProcessorException("Parser error: " + e3.getMessage()); } @@ -111,7 +108,6 @@ convertTemplate(templateFile.getParent(), templateFile.getName(), outputFile.toString(), sourceList); } else { - log.error("Template file is not defined."); throw new TemplateProcessorException("Template file not defined."); } } @@ -145,16 +141,10 @@ .debug("diamonds:TemplateProcessor:adding template directory loader: " + templateDir); } catch (java.io.IOException ie) { - log.debug("Template directory configuring error."); - log.info("Template directory configuring error."); throw new TemplateProcessorException( "Template directory configuring error: " + ie); } } else { - log.debug("Template directory does not exist: " - + templateDirFile.getAbsolutePath()); - log.info("Template directory does not exist: " - + templateDirFile.getAbsolutePath()); throw new TemplateProcessorException( "Template directory does not exist: " + templateDirFile.getAbsolutePath()); @@ -164,14 +154,11 @@ Map templateMap = getTemplateMap(sourceList); template.process(templateMap, new FileWriter(outputFile)); } catch (freemarker.core.InvalidReferenceException ivx) { - log.warn("Invalid reference in configuration: ", ivx); throw new TemplateProcessorException( "Invalid reference in config: " + ivx); } catch (freemarker.template.TemplateException e2) { - log.warn("TemplateException: ", e2); throw new TemplateProcessorException("TemplateException: " + e2); } catch (java.io.IOException e) { - log.warn("I/O Error during template conversion: " + e.getMessage()); throw new TemplateProcessorException( "I/O Error during template conversion: " + e); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/HeliumExecutor.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/HeliumExecutor.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/HeliumExecutor.java Wed Dec 23 19:29:07 2009 +0200 @@ -67,7 +67,7 @@ */ public void executeTargets(Project project, String[] targetNames) { this.project = project; - log.debug("[HeliumExecutor] Running executeTargets"); + log.debug("Running executeTargets"); BuildException failure = null; try { loadModules(project); @@ -75,6 +75,7 @@ super.executeTargets(project, targetNames); } catch (BuildException e) { // Saving current issue + // We are Ignoring the errors as no need to fail the build. failure = e; } @@ -119,7 +120,7 @@ if (file == null) { return; } - log.debug("[HeliumExecutor] Loading " + moduleLib.getName()); + log.debug("Loading " + moduleLib.getName()); ImportTask task = new ImportTask(); Target target = new Target(); target.setName(""); @@ -152,13 +153,13 @@ if (tempExceptionDefList != null) { exceptionHandlers.put(moduleName, tempExceptionDefList); } - log.debug("HeliumExecutor:loadModule:pre-opsize" + log.debug("loadModule:pre-opsize" + preOperations.size()); - log.debug("HeliumExecutor:loadModule:post-opsize" + log.debug("loadModule:post-opsize" + postOperations.size()); - log.debug("HeliumExecutor:loadModule:exception-opsize" + log.debug("loadModule:exception-opsize" + exceptionHandlers.size()); - log.debug("[HeliumExecutor] Checking " + moduleLib); + log.debug("Checking " + moduleLib); } } @@ -195,7 +196,7 @@ URL url = findHeliumAntlibXml(moduleLib); if (url == null) return null; - log.debug("[HeliumExecutor] Getting " + url); + log.debug("Getting " + url); JarURLConnection jarConnection = (JarURLConnection) url .openConnection(); @@ -213,10 +214,10 @@ } writer.close(); reader.close(); - log.debug("[HeliumExecutor] Temp file " + file.getAbsolutePath()); + log.debug("Temp file " + file.getAbsolutePath()); return file.getAbsolutePath(); } catch (Exception ex) { - log.error("[HeliumExecutor] Error: ", ex); + log.error("Error: " + ex.getMessage(), ex); return null; } } @@ -224,9 +225,9 @@ private void doOperations( HashMap> operations, Project prj, String[] targetNames) { - log.debug("HeliumExecutor:doOperations: start"); + log.debug("doOperations: start"); for (String moduleName : operations.keySet()) { - log.debug("HeliumExecutor:doOperations: module" + moduleName); + log.debug("doOperations: module" + moduleName); for (HlmDefinition definition : operations.get(moduleName)) { definition.execute(prj, moduleName, targetNames); } @@ -234,9 +235,8 @@ } private void handleExceptions(Project prj, Exception e) { - log.debug("HeliumExecutor:handleExceptions: start"); for (String moduleName : this.exceptionHandlers.keySet()) { - log.debug("HeliumExecutor:handleExceptions: module" + moduleName); + log.debug("handleExceptions: module" + moduleName); for (HlmExceptionHandler exceptionHandler : this.exceptionHandlers .get(moduleName)) { exceptionHandler.handleException(prj, moduleName, e); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/VariableIFImpl.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/VariableIFImpl.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,40 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.core.ant; + +import org.apache.tools.ant.types.DataType; +import com.nokia.helium.core.ant.types.Variable; +import java.util.Collection; + +/** + * Interface to get the list of variables of type VariableSet + */ +public class VariableIFImpl extends DataType +{ + + + /** + * Get the name of the variable. + * @return name. + */ + public Collection getVariables() { + //Implemented by sub class + return null; + } + +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/VariableInterface.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/VariableInterface.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,36 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.core.ant; + +import com.nokia.helium.core.ant.types.Variable; +import java.util.Collection; + +/** + * Interface to get the list of variables of type VariableSet + */ +public interface VariableInterface +{ + + + /** + * Get the name of the variable. + * @return name. + */ + Collection getVariables(); + +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/antlib.xml --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/antlib.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -24,10 +24,14 @@ + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/conditions/XMLLogCondition.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/conditions/XMLLogCondition.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/conditions/XMLLogCondition.java Wed Dec 23 19:29:07 2009 +0200 @@ -87,8 +87,7 @@ if (severity == null) throw new BuildException("'severity' attribute is not defined"); - this.log("Looking for severity '" + severity + "' under '" - + fileName.getAbsolutePath() + "'"); + //this.log("Looking for severity '" + severity + "' under '" + fileName.getAbsolutePath() + "'"); SAXParserFactory factory = SAXParserFactory.newInstance(); try { SAXParser saxParser = factory.newSAXParser(); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/helium.antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/helium.antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,33 @@ + + + + + Ant task definition declarations. + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/taskdefs/GetValueFromVariableSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/taskdefs/GetValueFromVariableSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,126 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.core.ant.taskdefs; + +import org.apache.tools.ant.BuildException; +import java.util.Vector; +import java.util.Collection; +import com.nokia.helium.core.ant.VariableIFImpl; +import org.apache.tools.ant.Task; +import com.nokia.helium.core.ant.types.Variable; + + +/** + * To retrive a variable value from a collection of variable set based on name, which contains property-value in pair. + * + *
+ * Example:
+ * 
+ * <hlm:argSet id="test.variableSet">
+ * <variable name="v1" value="the_value_1"/>
+ *     <variable name="v2" value="the_value_2"/>
+ *      <variable name="v3" value="the_value_3"/>
+ * </hlm:argSet>
+ *       
+ *  <hlm:getVariableValue name="v3" property="v1.value">
+ * <hlm:argSet refid="test.variableSet"/>
+ * </hlm:getVariableValue>
+ * 
+ * @ant.task name="getVariableValue" + */ +public class GetValueFromVariableSet extends Task { + private String name; + private String property; + private boolean failOnError = true; + + private Vector variableIntefaces = new Vector(); + + public void setName(String name) { + this.name = name; + } + + /** + * Helper function to set failonerror attribute for the task. + * @param failStatus, if true will fail the build if no variable is found for + * matching name. + */ + public void setFailOnError(boolean failStatus) { + failOnError = failStatus; + } + + /** + * Helper function to store the name of the property where the value to be stored + * @param property name of the property where the result to be stored + */ + public void setProperty(String property) { + this.property = property; + } + + /** + * Helper function to create the VariableIFImpl object. + * @return created VariableIFImpl instance + */ + public VariableIFImpl createVariableIFImpl() { + VariableIFImpl var = new VariableIFImpl(); + add(var); + return var; + } + + + /** + * Helper function to add the newly created variable set. Called by ant. + * @param vs variable set to be added. + */ + public void add(VariableIFImpl vs) { + variableIntefaces.add(vs); + } + + public VariableIFImpl getVariableInterface() { + if (variableIntefaces.isEmpty()) { + throw new BuildException("variable interface cannot be null"); + } + if (variableIntefaces.size() > 1 ) { + throw new BuildException("maximum one variable interface can be set"); + } + return variableIntefaces.elementAt(0); + } + + + /** + * Task to get the name / value pair + * @return return the name / value pair for the variable set. + */ + public void execute() { + if (name == null) + throw new BuildException("'name' attribute has not been defined."); + if (property == null) + throw new BuildException( + "'property' attribute has not been defined."); + VariableIFImpl varInterface = getVariableInterface(); + Collection variables = varInterface.getVariables(); + for (Variable var : variables) { + if (var.getName().equals(name)) { + getProject().setProperty(property, var.getValue()); + return; + } + } + if (failOnError) { + throw new BuildException("Could not find '" + name + "' variable."); + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/taskdefs/ValidateUserLogin.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/taskdefs/ValidateUserLogin.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/taskdefs/ValidateUserLogin.java Wed Dec 23 19:29:07 2009 +0200 @@ -18,6 +18,7 @@ package com.nokia.helium.core.ant.taskdefs; import org.apache.tools.ant.Task; +import org.apache.tools.ant.Project; import org.apache.tools.ant.BuildException; import javax.naming.*; import javax.naming.directory.*; @@ -138,6 +139,8 @@ DirContext authContext = new InitialDirContext(env); return true; } catch (NamingException e) { + // We are Ignoring the errors as no need to fail the build. + log("Not able to validate the user. " + e.getMessage(), Project.MSG_DEBUG); return false; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmFinalTargetDef.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmFinalTargetDef.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,54 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + + +package com.nokia.helium.core.ant.types; + +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Target; +import java.util.Hashtable; +import org.apache.log4j.Logger; + + +/** + * Class to execute the final target as post operation. + */ +public class HlmFinalTargetDef extends HlmPostDefImpl +{ + private Logger log = Logger.getLogger(HlmFinalTargetDef.class); + + /** + * This post action will execute the final target if any to be executed. + * @param prj + * @param module + * @param targetNames + * + */ + public void execute(Project prj, String module, String[] targetNames) { + String finalTargetName = prj.getProperty("hlm.target.final"); + log.debug("Calling final target" + finalTargetName); + if (finalTargetName != null) { + Hashtable targets = prj.getTargets(); + Target finalTarget = (Target)targets.get(finalTargetName); + if (finalTarget == null) { + log.info("The final target : " + finalTargetName + " not available skipping"); + return; + } + finalTarget.execute(); + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmImportDef.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmImportDef.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmImportDef.java Wed Dec 23 19:29:07 2009 +0200 @@ -45,8 +45,7 @@ * Will import the given file. */ public void execute(Project prj, String module, String[] targetNames) { - log.debug("importdef:fileName" + file.toString()); - log.debug("importdef:prj name" + prj.getName()); + log.debug("importdef:prj name" + prj.getName() + ". fileName" + file.toString()); ImportTask task = new ImportTask(); Target target = new Target(); target.setName(""); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmListenerDef.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmListenerDef.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmListenerDef.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,7 @@ import org.apache.tools.ant.Project; import org.apache.tools.ant.BuildListener; +import org.apache.log4j.Logger; /** * This class implements a listener registration action. @@ -29,6 +30,7 @@ public class HlmListenerDef extends HlmPreDefImpl { private String classname; + private Logger log = Logger.getLogger(HlmListenerDef.class); public void setClassname(String classname) { this.classname = classname; @@ -44,11 +46,11 @@ .newInstance(); prj.addBuildListener(listener); } catch (ClassNotFoundException ex) { - ex.printStackTrace(); + log.debug("Class not found exception:" + ex.getMessage(), ex); } catch (InstantiationException ex1) { - ex1.printStackTrace(); + log.debug("Class Instantiation exception:" + ex1.getMessage(), ex1); } catch (IllegalAccessException ex1) { - ex1.printStackTrace(); + log.debug("Illegal Class Access exception:" + ex1.getMessage(), ex1); } } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmPostDefImpl.java --- a/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmPostDefImpl.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/HlmPostDefImpl.java Wed Dec 23 19:29:07 2009 +0200 @@ -35,6 +35,6 @@ * Do nothing. */ public void execute(Project prj, String module, String[] targetNames) { - log.debug("HlmPostDefImpl:execute:" + module); + // Empty method. Implemented by extending classes. } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/Variable.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/Variable.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,130 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.core.ant.types; + +import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.BuildException; +import org.apache.log4j.Logger; + +/** + * Helper class to store the command line variables + * with name / value pair. + * @ant.type name="arg" + * @ant.type name="makeOption" + */ +public class Variable extends DataType +{ + private static Logger log = Logger.getLogger(Variable.class); + private String name; + private String value; + private String cmdLine; + + + public Variable() { + } + + /** + * Set the name of the variable. + * @param name + */ + public void setName(String nm) { + name = nm; + } + + + /** + * Get the name of the variable. + * @return name. + */ + public String getName() { + if ( cmdLine == null) { + if (name == null ) { + throw new BuildException( "name should not be null"); + } + if (value == null) { + throw new BuildException( "value should not be null"); + } + return name; + } else { + if (name != null && value != null) { + throw new BuildException( "you can define either name, value or line attribute and not both"); + } + String cmdPart = cmdLine.trim(); + String[] cmdArgs = cmdPart.split(" "); + return cmdArgs[0]; + } + } + + /** + * Set the value of the variable. + * @param value + */ + public void setValue(String vlue) { + value = vlue; + } + + /** + * Helper function to set the command line string + * @param line, string as input to command line. + */ + public void setLine(String line) { + cmdLine = line; + } + + /** + * Get the value of the variable. + * @return value. + */ + public String getValue() { + if ( cmdLine == null) { + if (name == null ) { + throw new BuildException( "name should not be null"); + } + if (value == null) { + throw new BuildException( "value should not be null"); + } + return value; + } else { + if (name != null && value != null) { + throw new BuildException( "you can define either name, value or line attribute and not both"); + } + return cmdLine; + } + } + /** + * Get the command line parameter + * @return command line string. + */ + public String getParameter() { + if ( cmdLine == null) { + if (name == null ) { + throw new BuildException( "name should not be null"); + } + if (value == null) { + throw new BuildException( "value should not be null"); + } + return name + "=" + value; + } else { + if (name != null && value != null) { + throw new BuildException( "you can define either name, value or line attribute and not both"); + } + return cmdLine; + } + } + +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/VariableSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/ant/types/VariableSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,160 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.core.ant.types; + +import java.util.Vector; +import java.util.HashMap; +import java.util.List; +import com.nokia.helium.core.ant.VariableIFImpl; +import java.util.Collection; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.types.Reference; +import java.util.ArrayList; +import org.apache.log4j.Logger; + +/** + * Helper class to store the variable set (list of variables + * with name / value pair) + * @ant.type name="argSet" + */ +public class VariableSet extends VariableIFImpl { + + private static Logger log = Logger.getLogger(VariableSet.class); + + private HashMap variablesMap = new HashMap(); + + private List variables = new ArrayList (); + + private Vector varSets = new Vector(); + + private VariableSet currentSet; + + /** + * Constructor + */ + public VariableSet() { + } + + /** + * Helper function to add the created varset + * @param filter to be added to the varset + */ + public void add(VariableSet varSet) { + currentSet = null; + if (varSet != null) { + varSets.add(varSet); + } + } + + /** + * Creates an empty variable element and adds + * it to the variables list + * @return empty Variable pair + */ + public VariableSet createArgSet() { + VariableSet varSet = new VariableSet(); + add(varSet); + return varSet; + } + + /** + * Creates an empty variable element and adds + * it to the variables list + * @return empty Variable pair + */ + public Variable createArg() { + Variable var = new Variable(); + add(var); + return var; + } + + private void addVariable(Variable var) { + variables.add(var); + } + + /** + * Add a given variable to the list + * @param var variable to add + */ + public void add(Variable var) { + if ( currentSet == null) { + currentSet = new VariableSet(); + varSets.add(currentSet); + } + currentSet.addVariable(var); + } + + /** + * Helper function to get the list of variables defined for this set. + * @return variable list for this set. + */ + public List getVariablesList() { + return variables; + } + + public List getVariableSets() { + return varSets; + } + + + /** + * Helper function to return the list of variables and its references + * @return variable list for this set and its references. + */ + public Collection getVariables() { + HashMap varMap = getVariablesMap(); + //if (varMap.isEmpty()) { + // throw new BuildException("Variable should not be empty and should contain one arg"); + //} + return getVariablesMap().values(); + } + /** + * Returns the list of variables available in the VariableSet + * @return variable list + */ + public HashMap getVariablesMap() { + HashMap allVariables = new HashMap(); + // Then filters as reference in filterset + Reference refId = getRefid(); + Object varSetObject = null; + if (refId != null) { + try { + varSetObject = refId.getReferencedObject(); + } catch ( Exception ex) { + log.debug("exception in getting variable", ex); + throw new BuildException("Not found: " + ex.getMessage()); + } + if (varSetObject != null && varSetObject instanceof VariableSet) { + HashMap varSetMap = ((VariableSet)varSetObject).getVariablesMap(); + allVariables.putAll(varSetMap); + } + } + if (varSets != null && (!varSets.isEmpty())) { + for (VariableSet varSet : varSets) { + HashMap variablesMap = varSet.getVariablesMap(); + allVariables.putAll(variablesMap); + } + } + if (variables != null && !variables.isEmpty()) { + for (Variable var : variables) { + allVariables.put(var.getName(), var); + } + } + return allVariables; + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/AntStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/AntStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,62 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus; + +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Task; +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Handle the output lines and redirect them to Ant logging. + * The logging level is configurable. + * + */ +public class AntStreamConsumer implements StreamConsumer { + + private Task task; + private int level = Project.MSG_INFO; + + /** + * Initialize the consumer with the task that will be used to + * redirect the consumed lines. Default logging level will be + * Project.MSG_INFO. + * @param task an Ant task + */ + public AntStreamConsumer(Task task) { + this.task = task; + } + + /** + * Initialize the consumer with the task that will be used to + * redirect the consumed lines, and the level of logging. + * @param task ant Ant task. + * @param level ant logging level to use. + */ + public AntStreamConsumer(Task task, int level) { + this.task = task; + this.level = level; + } + + /** + * {@inheritDoc} + */ + @Override + public void consumeLine(String line) { + task.log(line, level); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/CommandBase.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/CommandBase.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,214 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus; + +import java.io.File; +import java.util.Map; +import java.util.Vector; + +import org.apache.log4j.Logger; +import org.codehaus.plexus.util.Os; +import org.codehaus.plexus.util.cli.CommandLineException; +import org.codehaus.plexus.util.cli.CommandLineUtils; +import org.codehaus.plexus.util.cli.Commandline; +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * This abstract class implements some basic support to execute commands and + * redirect outputs to StreamConsumer. You can have common stream consumers or + * execution base streamconsumers. The exception type raised + * can be controlled by the implementing class. + * + * @param + */ +public abstract class CommandBase { + private final Logger log = Logger.getLogger(getClass()); + private Vector outputHandlers = new Vector(); + private Vector errorHandlers = new Vector(); + + /** + * Get the executable name. + * @return + */ + protected abstract String getExecutable(); + + /** + * Throw an exception with message and cause. + * @param message + * @param t + * @throws T + */ + protected abstract void throwException(String message, Throwable t) throws T; + + /** + * Throw an exception with message only. + * @param message + * @throws T + */ + protected void throwException(String message) throws T { + throwException(message, null); + } + + /** + * Location where to execute the command. The default location + * is the current directory. + * @return a File object pointing to a directory. + */ + public File getWorkingDir() { + return new File("."); + } + + /** + * Add a LineHandler to the CommandBase instance. + * LineHandlers could be used to record/log the output stream + * command invocation. + * @param lineHandler a lineHandle instance + */ + public void addOutputLineHandler(StreamConsumer lineHandler) { + if (lineHandler != null) { + outputHandlers.add(lineHandler); + } + } + + /** + * Add a LineHandler to the CommandBase instance. + * LineHandlers could be used to record/log the output error stream + * command invocation. + * @param lineHandler a lineHandle instance + */ + public void addErrorLineHandler(StreamConsumer lineHandler) { + if (lineHandler != null) { + errorHandlers.add(lineHandler); + } + } + + /** + * Execute the command defined by getExecutable with args as list of arguments. + * + * @param args + * @throws T extends + */ + public void execute(String[] args) throws T { + execute(args, null); + } + + /** + * Execute the command given by getExecutable with args as list of arguments and custom StreamConsumer. + * + * @param args + * an array representing blocks arguments + * @param output + * the StreamConsumer to analyze the output with. If null it is + * ignored. + * @throws T + */ + public void execute(String[] args, StreamConsumer output) throws T { + execute(args, null, output); + } + + /** + * Execute the command given by getExecutable with args as list of arguments and custom StreamConsumer. + * Also env content will be added to the environment. + * + * @param args + * an array representing blocks arguments + * @param env + * additional key to add the environment + * @param output + * the StreamConsumer to analyze the output with. If null it is + * ignored. + * @throws T + */ + public void executeCmdLine(String argLine, Map env, StreamConsumer output) + throws T { + Commandline cmdLine = new Commandline(); + cmdLine.createArg().setValue(getExecutable()); + if (argLine != null) { + cmdLine.createArg().setLine(argLine); + } + executeCmd(cmdLine, env, output); + } + + private void executeCmd(Commandline cmdLine, Map env, StreamConsumer output) throws T { + if (env != null) { + for (Map.Entry entry : env.entrySet()) { + cmdLine.addEnvironment(entry.getKey(), entry.getValue()); + } + } + cmdLine.setWorkingDirectory(getWorkingDir()); + + // This is only needed on windows. + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + cmdLine.createArg().setLine("&& exit %%ERRORLEVEL%%"); + } + + + StreamMultiplexer inputMux = new StreamMultiplexer(); + if (output != null) { + inputMux.addHandler(output); + } + for (StreamConsumer lh : outputHandlers) { + inputMux.addHandler(lh); + } + + StreamMultiplexer errorMux = new StreamMultiplexer(); + StreamRecorder errorRecorder = new StreamRecorder(); + errorMux.addHandler(errorRecorder); + for (StreamConsumer lh : errorHandlers) { + errorMux.addHandler(lh); + } + + try { + int err = CommandLineUtils.executeCommandLine(cmdLine, inputMux, + errorMux); + // check its exit value + log.debug("Execution of " + getExecutable() + " returned: " + err); + if (err != 0) { + throwException(errorRecorder.getBuffer() + " (return code: " + err + + ")"); + } + } catch (CommandLineException e) { + throwException( + "Error executing " + getExecutable() + ": " + + e.toString()); + } + } + + /** + * Execute the command given by getExecutable with args as list of arguments and custom StreamConsumer. + * Also env content will be added to the environment. + * + * @param args + * an array representing blocks arguments + * @param env + * additional key to add the environment + * @param output + * the StreamConsumer to analyze the output with. If null it is + * ignored. + * @throws T + */ + public void execute(String[] args, Map env, StreamConsumer output) + throws T { + Commandline cmdLine = new Commandline(); + cmdLine.createArg().setValue(getExecutable()); + if (args != null) { + cmdLine.addArguments(args); + } + executeCmd(cmdLine, env, output); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/FileStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/FileStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,70 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; + +import org.apache.log4j.Logger; +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Record a stream into a file. + * + */ +public class FileStreamConsumer implements StreamConsumer { + private Logger log = Logger.getLogger(getClass()); + private Writer writer; + + /** + * Create a FileStreamConsumer which will record content to + * the output file. + * @param output the file to write the output to. + * @throws FileNotFoundException if an error occur while opening the file. + */ + public FileStreamConsumer(File output) throws FileNotFoundException { + writer = new OutputStreamWriter(new FileOutputStream(output)); + } + + /** + * {@inheritDoc} + */ + @Override + public synchronized void consumeLine(String line) { + try { + writer.append(line + "\n"); + } catch (IOException e) { + log.error("Error while writing to file: " + e.getMessage(), e); + } + } + + /** + * Closing the file. + */ + public void close() { + try { + writer.flush(); + writer.close(); + } catch (IOException e) { + log.error("Error while writing to file: " + e.getMessage(), e); + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/StreamMultiplexer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/StreamMultiplexer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus; + +import java.util.Vector; + +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Get the line consumed by a set of StreamConsumer. + * + */ +public class StreamMultiplexer implements StreamConsumer { + + private Vector handlers = new Vector(); + + /** + * Add an StreamConsumer to the multiplexing. + * @param handler the StreamConsumer to add. + */ + public void addHandler(StreamConsumer handler) { + handlers.add(handler); + } + + /** + * {@inheritDoc} + */ + @Override + public void consumeLine(String line) { + for (StreamConsumer handler : handlers) { + handler.consumeLine(line); + } + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/StreamRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/src/com/nokia/helium/core/plexus/StreamRecorder.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,69 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus; + +import org.apache.log4j.Logger; +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Record the consumed lines into a StringBuffer. + * + */ +public class StreamRecorder implements StreamConsumer { + private Logger log = Logger.getLogger(this.getClass()); + private StringBuffer buffer = new StringBuffer(); + + /** + * Default constructor. + */ + public StreamRecorder() { + } + + /** + * This constructor allows you to set a custom + * buffer. + * @param buffer custom buffer object. + */ + public StreamRecorder(StringBuffer buffer) { + this.buffer = buffer; + } + + /** + * Get the current buffer. + * @return the current buffer object + */ + public StringBuffer getBuffer() { + return buffer; + } + + /** + * Set the buffer object. + * @param buffer custom buffer object. + */ + public synchronized void setBuffer(StringBuffer buffer) { + this.buffer = buffer; + } + + /** + * {@inheritDoc} + */ + @Override + public synchronized void consumeLine(String line) { + log.debug(line); + buffer.append(line + "\n"); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/bld.sh --- a/buildframework/helium/external/helium-antlib/core/tests/bld.sh Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/bld.sh Wed Dec 23 19:29:07 2009 +0200 @@ -19,5 +19,5 @@ module load java/1.6.0 -setenv ANT_ARGS "-lib ../../antlibs -lib ../../bin/helium-core.jar -lib ../../lib" +setenv ANT_ARGS "-lib ../../antlibs -lib ../lib -lib ../../lib -lib ../../bin/helium-core.jar " ant -Dant.executor.class="com.nokia.helium.core.ant.HeliumExecutor" $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/build.bat --- a/buildframework/helium/external/helium-antlib/core/tests/build.bat Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -21,6 +21,6 @@ set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 ) ELSE set TESTED_JAVA=%JAVA_6_HOME% if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) -set ANT_ARGS=-lib %CD%\lib -lib %CD%\..\..\lib -lib %CD%\..\..\antlibs -lib %CD%\..\..\bin\helium-core.jar +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\lib -lib %CD%\..\..\antlibs -lib %CD%\..\..\bin\helium-core.jar ant -Dant.executor.class=com.nokia.helium.core.ant.HeliumExecutor %* endlocal \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/build.xml --- a/buildframework/helium/external/helium-antlib/core/tests/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -22,13 +22,13 @@ --> Helium antlib core tests. + + + - - - - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/core/executor/test_executor.ant.xml --- a/buildframework/helium/external/helium-antlib/core/tests/core/executor/test_executor.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/core/executor/test_executor.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -23,24 +23,59 @@ Helium antlib core executor tests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/core/variableset/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/core/variableset/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,30 @@ + + + + Helium antlib core executor test config. + + + Running target. + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/core/variableset/test_variableset.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/core/variableset/test_variableset.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,127 @@ + + + + Helium antlib core variableset tests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/lib/ant-antunit-1.0.jar Binary file buildframework/helium/external/helium-antlib/core/tests/lib/ant-antunit-1.0.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestAntStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestAntStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,110 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus.tests; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildListener; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.taskdefs.Echo; +import org.junit.Test; +import static org.junit.Assert.*; +import com.nokia.helium.core.plexus.AntStreamConsumer; + +public class TestAntStreamConsumer { + + public class AntBuildListener implements BuildListener { + private StringBuffer log = new StringBuffer(); + + public StringBuffer getLog() { + return log; + } + + @Override + public void buildFinished(BuildEvent arg0) { + } + + @Override + public void buildStarted(BuildEvent arg0) { + } + + @Override + public void messageLogged(BuildEvent arg0) { + log.append(arg0.getMessage()); + } + + @Override + public void targetFinished(BuildEvent arg0) { + } + + @Override + public void targetStarted(BuildEvent arg0) { + } + + @Override + public void taskFinished(BuildEvent arg0) { + } + + @Override + public void taskStarted(BuildEvent arg0) { + } + + } + + @Test + public void testLoggingThroughAnt() { + // Setting up an Ant task + Project project = new Project(); + AntBuildListener listener = new AntBuildListener(); + project.addBuildListener(listener); + project.init(); + Echo echo = new Echo(); + echo.setProject(project); + echo.setMessage("From the echo task"); + + + // Configuring the Ant consumer + AntStreamConsumer consumer = new AntStreamConsumer(echo); + consumer.consumeLine("consumed line!"); + echo.execute(); + assertTrue(listener.getLog().toString().contains("From the echo task")); + assertTrue(listener.getLog().toString().contains("consumed line!")); + + } + + @Test + public void testLoggingThroughAntAsError() { + // Setting up an Ant task + Project project = new Project(); + AntBuildListener listener = new AntBuildListener(); + project.addBuildListener(listener); + project.init(); + Echo echo = new Echo(); + echo.setProject(project); + echo.setMessage("From the echo task"); + + + // Configuring the Ant consumer + AntStreamConsumer consumer = new AntStreamConsumer(echo, Project.MSG_ERR); + consumer.consumeLine("consumed line!"); + echo.execute(); + assertTrue(listener.getLog().toString().contains("From the echo task")); + assertTrue(listener.getLog().toString().contains("consumed line!")); + + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestCommandBase.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestCommandBase.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,121 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus.tests; + +import hidden.org.codehaus.plexus.interpolation.os.Os; + +import java.util.Hashtable; + +import org.junit.Test; +import static org.junit.Assert.*; + +import com.nokia.helium.core.plexus.CommandBase; +import com.nokia.helium.core.plexus.StreamRecorder; + +/** + * Unittests for the CommandBase class. + * + */ +public class TestCommandBase { + + /** + * The simplest possible implementation possible. + * + */ + public class CommandImpl extends CommandBase { + + @Override + protected String getExecutable() { + return "echo"; + } + + @Override + protected void throwException(String message, Throwable t) + throws Exception { + throw new Exception(message, t); + } + + } + + @Test + public void simpleExecution() { + CommandImpl cmd = new CommandImpl(); + try { + cmd.execute(null); + } catch (Exception e) { + fail("Exception should not happen."); + } + + } + + @Test + public void simpleExecutionWithArgs() { + CommandImpl cmd = new CommandImpl(); + String args[] = new String[2]; + args[0] = "foo"; + args[1] = "bar"; + try { + cmd.execute(args); + } catch (Exception e) { + fail("Exception should not happen."); + } + } + + @Test + public void simpleExecutionWithArgsAndRecorder() throws Exception { + CommandImpl cmd = new CommandImpl(); + String args[] = new String[2]; + args[0] = "foo"; + args[1] = "bar"; + StreamRecorder rec = new StreamRecorder(); + cmd.execute(args, rec); + assertTrue(rec.getBuffer().toString().startsWith("foo bar")); + } + + @Test + public void simpleExecutionWithArgsAndRecorderAsOutputHandler() throws Exception { + CommandImpl cmd = new CommandImpl(); + StreamRecorder rec = new StreamRecorder(); + cmd.addOutputLineHandler(rec); + String args[] = new String[2]; + args[0] = "foo"; + args[1] = "bar"; + cmd.execute(args, rec); + assertTrue(rec.getBuffer().toString().startsWith("foo bar")); + } + + @Test + public void simpleExecutionWithEnv() throws Exception { + CommandImpl cmd = new CommandImpl(); + StreamRecorder rec = new StreamRecorder(); + cmd.addOutputLineHandler(rec); + String args[] = new String[2]; + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + args[0] = "%TEST_FOO%"; + args[1] = "%TEST_BAR%"; + } else { + args[0] = "$TEST_FOO"; + args[1] = "$TEST_BAR"; + } + Hashtable env = new Hashtable(); + env.put("TEST_FOO", "foo"); + env.put("TEST_BAR", "bar"); + cmd.execute(args, env, rec); + assertTrue(rec.getBuffer().toString().startsWith("foo bar")); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestFileStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestFileStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,41 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus.tests; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; + +import org.junit.Test; +import static org.junit.Assert.*; +import com.nokia.helium.core.plexus.FileStreamConsumer; + +public class TestFileStreamConsumer { + + @Test + public void testContentGoesToFile() throws FileNotFoundException, IOException { + // Setting up an Ant task + File temp = File.createTempFile("temp_",".log"); + temp.deleteOnExit(); + FileStreamConsumer consumer = new FileStreamConsumer(temp); + consumer.consumeLine("Hello World!"); + consumer.consumeLine("Bonjour monde!"); + consumer.close(); + assertTrue(temp.length() == 28); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestStreamMultiplexer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestStreamMultiplexer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,58 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus.tests; + +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import com.nokia.helium.core.plexus.StreamMultiplexer; +import com.nokia.helium.core.plexus.StreamRecorder; + +/** + * Testing the StreamMultiplexer class. + * + */ +public class TestStreamMultiplexer { + + /** + * Having an empty list of handler should not cause any problem. + */ + @Test + public void noHandler() { + StreamMultiplexer mux = new StreamMultiplexer(); + mux.consumeLine("1st line"); + mux.consumeLine("2nd line"); + } + + /** + * The two recorders should record the same stuff. + */ + @Test + public void recordSomeLines() { + StreamMultiplexer mux = new StreamMultiplexer(); + StreamRecorder rec = new StreamRecorder(); + StreamRecorder rec2 = new StreamRecorder(); + mux.addHandler(rec); + mux.addHandler(rec2); + mux.consumeLine("1st line"); + mux.consumeLine("2nd line"); + assertTrue(rec.getBuffer().toString().equals("1st line\n2nd line\n")); + assertTrue(rec2.getBuffer().toString().equals("1st line\n2nd line\n")); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestStreamRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/core/tests/src/com/nokia/helium/core/plexus/tests/TestStreamRecorder.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,56 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.core.plexus.tests; + +import static org.junit.Assert.*; + +import org.junit.Test; + +import com.nokia.helium.core.plexus.StreamRecorder; + +/** + * Unittests for the TestStreamRecorder class. + * + */ +public class TestStreamRecorder { + + @Test + public void recordSomeLines() { + StreamRecorder rec = new StreamRecorder(); + rec.consumeLine("1st line"); + rec.consumeLine("2nd line"); + assertTrue(rec.getBuffer().toString().equals("1st line\n2nd line\n")); + } + + @Test + public void recordSomeLinesWithOwnBuffer() { + StringBuffer buffer = new StringBuffer(); + StreamRecorder rec = new StreamRecorder(buffer); + assertTrue(rec.getBuffer() == buffer); + rec.consumeLine("1st line"); + rec.consumeLine("2nd line"); + assertTrue(rec.getBuffer().toString().equals("1st line\n2nd line\n")); + } + + @Test + public void setGetBuffer() { + StringBuffer buffer = new StringBuffer(); + StreamRecorder rec = new StreamRecorder(); + assertTrue(rec.getBuffer() != buffer); + rec.setBuffer(buffer); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/lib/dom4j-1.6.1.jar Binary file buildframework/helium/external/helium-antlib/diamonds/lib/dom4j-1.6.1.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsClient.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsClient.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsClient.java Wed Dec 23 19:29:07 2009 +0200 @@ -71,10 +71,8 @@ try { result = httpClient.executeMethod(postMethod); } catch (IOException e) { - log.info("IOException while sending http request"); - log.debug("IOException while sending http request: ", e); isRecordOnly = true; - throw new DiamondsException("Exception in executeMethod"); + throw new DiamondsException("IOException while sending http request." + e.getMessage()); // e.printStackTrace(); } return result; @@ -89,8 +87,7 @@ } catch (Exception ex) { // ex.printStackTrace(); isRecordOnly = true; - log.info("Failed to check url, defaulting to input."); - throw new DiamondsException("Exception verifying URL"); + throw new DiamondsException("Failed to check url, defaulting to input. " + ex.getMessage()); } return retURL; } @@ -175,23 +172,19 @@ log.debug("diamondsBuildID: " + diamondsBuildID); } else { isRecordOnly = true; - log.debug("Connection Failed."); - log - .info("Diamonds data not sent, because of connection failure."); + log.error("Diamonds data not sent, because of connection failure."); //throw new DiamondsException("Connection Failed"); } } } catch (HttpException ex) { isRecordOnly = true; - log.debug("Connection Failed", ex); - log.info("Diamonds data not sent, because of httpexception."); + log.error("Diamonds data not sent, because of httpexception.", ex); // log.error("Failed: " + ex.toString()); //throw new DiamondsException("Http Exception see the logs: " // + ex.getMessage()); } catch (IOException ex1) { isRecordOnly = true; - log.debug("Connection Failed. ", ex1); - log.info("Diamonds data not sent, because of io exception."); + log.error("Diamonds data not sent, because of io exception.", ex1); // log.error("Failed: " + ex.toString()); //throw new DiamondsException("Network error, see the logs: " // + ex1.getMessage()); @@ -215,8 +208,7 @@ result = processPostMethodResult(httpClient .executeMethod(postMethod)); } catch (Exception e) { - log.debug("DiamondsClient:sendData by Http: ", e); - log.info("The final data via http not sent because errors: ", e); + log.error("sendData:The final data via http not sent because errors: ", e); } } return result; @@ -233,8 +225,7 @@ "[DIAMONDS_DATA]", null); log.debug("DiamondsClient:sendDataByEmail:succeeds"); } catch (Exception e) { - log.debug("DiamondsClient:sendDataByEmail: ", e); - log.info("The final data via http not sent because errors: ", e); + log.error("sendDataByMail:The final data via http not sent because errors: ", e); return -1; } return 0; diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsConfig.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsConfig.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsConfig.java Wed Dec 23 19:29:07 2009 +0200 @@ -65,11 +65,11 @@ SAXReader saxReader = new SAXReader(); Document document = null; try { - log.debug("diamonds:DiamondsConfig:reading configuration"); + log.debug("Reading diamonds configuration."); document = saxReader.read(configFile); } catch (Exception e) { - log.debug("diamonds:DiamondsConfig:", e); - throw new DiamondsException("Diamonds configuration parsing error: " + // No need to fail the build due to internal Helium configuration errors. + log.debug("Diamonds configuration parsing error: " + e.getMessage()); } parseConfig(document); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsExceptionStatusUpdate.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsExceptionStatusUpdate.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsExceptionStatusUpdate.java Wed Dec 23 19:29:07 2009 +0200 @@ -91,7 +91,7 @@ if (SignalStatusList.getDeferredSignalList().hasSignalInList()) { Vector signalList = SignalStatusList.getDeferredSignalList().getSignalStatusList(); timeFormat = new SimpleDateFormat(DiamondsConfig.getDiamondsProperties().getProperty("tstampformat")); - log.debug("[DiamondsExceptionStatusUpdate] => Build Status = " + buildStatus); + log.debug("Build Status = " + buildStatus); int i = 0; for (SignalStatus status : signalList) { signalInformation.put("diamond.signal.name." + i, status.getName()); @@ -108,12 +108,12 @@ templateProcessor.convertTemplate(templateDir, templateFile, outputFile,sourceList); /* send the generated XML file for diamonds client to update the signals information into Diamonds */ - log.debug("[DiamondsExceptionStatusUpdate] => sending data to diamonds ..." + outputFile); + log.debug("sending data to diamonds ..." + outputFile); diamondsClient.sendData(outputFile, DiamondsConfig.getDiamondsProperties().getDiamondsBuildID()); try { DiamondsListenerImpl.mergeToFullResults(new File(outputFile)); } catch (DiamondsException de) { - log.debug("[DiamondsExceptionStatusUpdate] => " + de.getMessage()); + log.error("Not able to merge into full results XML file " + de.getMessage(), de); } } /* Check, is the signal is in now signal list? @@ -124,7 +124,7 @@ Vector signalList = SignalStatusList.getNowSignalList().getSignalStatusList(); buildStatus = "failed"; timeFormat = new SimpleDateFormat(DiamondsConfig.getDiamondsProperties().getProperty("tstampformat")); - log.debug("[DiamondsExceptionStatusUpdate] => Build Status = " + buildStatus); + log.debug("Build Status = " + buildStatus); int i = 0; for (SignalStatus status : signalList) { signalInformation.put("diamond.signal.name." + i, status.getName()); @@ -141,12 +141,12 @@ templateProcessor.convertTemplate(templateDir, templateFile, outputFile,sourceList); /* send the generated XML file for diamonds client to update the signals information into Diamonds */ - log.debug("[DiamondsExceptionStatusUpdate] => sending data to diamonds ..." + outputFile); + log.debug("sending data to diamonds ..." + outputFile); diamondsClient.sendData(outputFile, DiamondsConfig.getDiamondsProperties().getDiamondsBuildID()); try { DiamondsListenerImpl.mergeToFullResults(new File(outputFile)); } catch (DiamondsException de) { - log.debug("[DiamondsExceptionStatusUpdate] => " + de.getMessage()); + log.error("Not able to merge into full results XML file " + de.getMessage(), de); } } /* Generate the build status XML file required for diamonds to update the build status information, @@ -165,7 +165,7 @@ try { DiamondsListenerImpl.mergeToFullResults(new File(outputFile)); } catch (DiamondsException de) { - log.debug("[DiamondsExceptionStatusUpdate] => " + de.getMessage()); + log.error("Not able to merge into full results XML file " + de.getMessage(), de); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsListenerImpl.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsListenerImpl.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsListenerImpl.java Wed Dec 23 19:29:07 2009 +0200 @@ -106,7 +106,6 @@ .getTemplateDir(), finishTemplateFile, output, sourceList); } catch (Exception e) { - log.debug("diamonds:DiamondsListenerImpl:exception: ", e); throw new DiamondsException( "failed to convert the build finish template: " + e.getMessage()); @@ -118,7 +117,6 @@ diamondsClient.sendData(output, DiamondsConfig .getDiamondsProperties().getDiamondsBuildID()); } catch (Exception e) { - log.debug("diamonds:DiamondsListenerImpl: exception: ", e); throw new DiamondsException("Failed to send data to diamonds: " + e.getMessage()); } @@ -149,7 +147,7 @@ fullResultsFile.getAbsolutePath(), smtpServer, ldapServer); } catch (Exception e) { - log.debug("Error sending diamonds final log: ", e); + log.error("Error sending diamonds final log: ", e); } } } @@ -278,7 +276,6 @@ } isInitialized = true; } catch (Exception e) { - log.debug("diamonds:DiamondsListenerImpl: ", e); throw new DiamondsException("failed to connect to diamonds: " + e.getMessage()); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsPostBuildStatusUpdate.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsPostBuildStatusUpdate.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/DiamondsPostBuildStatusUpdate.java Wed Dec 23 19:29:07 2009 +0200 @@ -92,7 +92,7 @@ try { DiamondsListenerImpl.mergeToFullResults(new File(outputFile)); } catch (DiamondsException de) { - log.debug("[DiamondsExceptionStatusUpdate] => " + de.getMessage()); + log.error("Not able to merge into full results XML file " + de.getMessage(), de); } } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/StageDiamondsListener.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/StageDiamondsListener.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/StageDiamondsListener.java Wed Dec 23 19:29:07 2009 +0200 @@ -80,6 +80,7 @@ initStageTargetsMap(); isTargetMapInitialized = true; } + log.debug("targetBegin targetName: " + targetName + " - currentStartTargetName:" + currentStartTargetName); if (currentStartTargetName == null) { findAndSetStartTimeForTargetInStageList(targetName); } @@ -158,10 +159,8 @@ getDiamondsClient().sendData(output, DiamondsConfig .getDiamondsProperties().getDiamondsBuildID()); } catch (com.nokia.helium.core.TemplateProcessorException e1) { - log.debug("diamonds:StageDiamondsListener:exception: ", e1); - throw new DiamondsException( - "template conversion error while sending data for stage: " - + stageName + " : " + e1.getMessage()); + throw new DiamondsException("template conversion error while sending data for stage: " + + stageName + " : " + e1.getMessage()); } } } @@ -199,11 +198,13 @@ log.debug("Diamonds target missing: ", be); } if (arrayList != null) { + log.debug(" + Stage definition: " + stage.getStageName()); Enumeration targetEnum = arrayList.elements(); while (targetEnum.hasMoreElements()) { // fast lookup Target target = targetEnum.nextElement(); stageMap.put(target.getName(), INVALID_DATE); + log.debug(" - Start target: " + target.getName()); } stageTargetBeginList.add(stageMap); @@ -211,6 +212,7 @@ String endTargetName = stage.getEndTargetName(); // fast lookup stageTargetEndMap.put(endTargetName, stage); + log.debug(" - End target: " + endTargetName); } } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/TargetDiamondsListener.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/TargetDiamondsListener.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/TargetDiamondsListener.java Wed Dec 23 19:29:07 2009 +0200 @@ -166,17 +166,13 @@ } } else { log - .debug("diamonds:TargetDiamondsListener:sendTargetData: exists(" + .debug("sendTargetData: exists(" + templateFile.getAbsolutePath() + ") => false"); } } catch (com.nokia.helium.core.TemplateProcessorException e1) { - log.debug( - "diamonds:TargetDiamondsListener:sendTargetData:exception", - e1); - throw new DiamondsException( - "template conversion error while sending data for target:" - + target + ":" + e1.getMessage()); + throw new DiamondsException("template conversion error while sending data for target:" + + target + ":" + e1.getMessage()); } File outputFile = new File(output); if (outputFile.exists()) { diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/XMLMerger.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/XMLMerger.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/XMLMerger.java Wed Dec 23 19:29:07 2009 +0200 @@ -84,7 +84,6 @@ mergeNode(root, dataRoot); write(); } catch (DocumentException e) { - log.debug("Error merging: " + e); throw new XMLMergerException(e.getMessage()); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/ant/HeliumListener.java --- a/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/ant/HeliumListener.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/src/com/nokia/helium/diamonds/ant/HeliumListener.java Wed Dec 23 19:29:07 2009 +0200 @@ -121,8 +121,7 @@ if (errorMessage == null) { errorMessage = ""; } - log - .info("Diamonds Error, might not be logged properly, see debug log. " + log.error("Diamonds Error, might not be logged properly, see debug log. " + errorMessage); } if (diamondsListeners != null) { @@ -135,8 +134,7 @@ if (errorMessage == null) { errorMessage = ""; } - log - .info("Diamonds Error, might not be logged properly, see debug log. " + log.error("Diamonds Error, might not be logged properly, see debug log. " + errorMessage); } } @@ -158,7 +156,6 @@ templateProcessor.convertTemplate(configFile, outputFile.toString(), sourceList); } catch (Exception e) { - log.debug("Error: ", e); throw new BuildException( "Diamonds configuration pre-parsing error: " + e.getMessage()); @@ -166,7 +163,6 @@ try { DiamondsConfig.parseConfiguration(outputFile.toString()); } catch (Exception e) { - log.debug("Error: ", e); throw new BuildException("Diamonds configuration parsing error: " + e.getMessage()); } @@ -199,8 +195,7 @@ if (errorMessage == null) { errorMessage = ""; } - log - .info("Diamonds Error, might not be logged properly, see debug log. " + log.error("Diamonds Error, might not be logged properly, see debug log. " + errorMessage); } @@ -217,8 +212,7 @@ try { diamondsListener.buildEnd(event); } catch (Exception e) { - log.debug("Error: ", e); - log.info("Failed to log in diamonds: " + e); + log.error("Failed to log in diamonds: " + e); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/diamonds/tests/src/com/nokia/helium/diamonds/tests/TestXMLMerger.java --- a/buildframework/helium/external/helium-antlib/diamonds/tests/src/com/nokia/helium/diamonds/tests/TestXMLMerger.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/diamonds/tests/src/com/nokia/helium/diamonds/tests/TestXMLMerger.java Wed Dec 23 19:29:07 2009 +0200 @@ -25,6 +25,7 @@ import java.io.BufferedReader; import java.io.IOException; import com.nokia.helium.diamonds.XMLMerger; +import com.nokia.helium.diamonds.XMLMerger.XMLMergerException; import org.junit.*; import static org.junit.Assert.*; @@ -142,6 +143,35 @@ } /** + * Test the XMLMerger with xml file with no Root node. + */ + @Test(expected=XMLMergerException.class) + public void test_mergeWithNoRootNode() throws Exception{ + File merge = createTextFile("\n"); + XMLMerger merger = new XMLMerger(merge); + } + /** + * Test the XMLMerger with xml files with different root nodes to merge. + */ + @Test(expected=XMLMergerException.class) + public void test_mergeWithDifferentRootNodes() throws Exception{ + File merge = createTextFile("\n"); + File toBeMerged = createTextFile("\n\n"); + XMLMerger merger = new XMLMerger(merge); + merger.merge(toBeMerged); + } + /** + * Test the XMLMerger with xml files with Wrong xml format + */ + @Test(expected=XMLMergerException.class) + public void test_mergeWithWrongXML() throws Exception{ + File merge = createTextFile("\n"); + File toBeMerged = createTextFile("\n\n"); + XMLMerger merger = new XMLMerger(merge); + merger.merge(toBeMerged); + } + + /** * Load file content into a string. * @param fullPathFilename * @return the file content as a string @@ -188,4 +218,4 @@ } } -} \ No newline at end of file +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/doc/src/build.rst --- a/buildframework/helium/external/helium-antlib/doc/src/build.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/doc/src/build.rst Wed Dec 23 19:29:07 2009 +0200 @@ -76,6 +76,11 @@ [echo] Cleaning helium-signaling [delete] Deleting directory C:\development\workspace\helium-antlib-dragonfly-trunk\sf\build\signaling\classes [delete] Deleting: C:\development\workspace\helium-antlib-dragonfly-trunk\sf\bin\helium-signaling.jar + + clean: + [echo] Cleaning helium-logging + [delete] Deleting directory C:\development\workspace\helium-antlib-dragonfly-trunk\sf\build\logging\classes + [delete] Deleting: C:\development\workspace\helium-antlib-dragonfly-trunk\sf\bin\helium-logging.jar BUILD SUCCESSFUL Total time: 1 second diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/doc/src/structure.rst --- a/buildframework/helium/external/helium-antlib/doc/src/structure.rst Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/doc/src/structure.rst Wed Dec 23 19:29:07 2009 +0200 @@ -125,9 +125,29 @@ Logging ------- -Developer must use standard Ant logging for any user log output. +Developer must preferably use standard Ant logging for any user log output. Internal debug logging must be implemented using Log4J framework. + * ANT Listeners must use log4j logging framework - using Ant logging system might cause some looping issues. + * Ant Type and Task must use the Ant logging mechanism to report to the user. + * Generic framework (part of the code which doesn't links to Ant directly) must use Log4J. + * Usage of System.out.println should be avoided. + * All the non-handled exceptions should be considered as errors and should be reported as such: + * use log("message", Project.MSG_ERR) under Ant + * log.error() otherwise. + * Exception to this rule must be clearly commented under the code. + * Debug information: + * Log4J framework (log.debug()) must be used to push information to the Helium debug log - so debug information are not + directly visible by the user. + * Ant logging framework can also be use to log Type/Task debug info (but log4j is preferred). + * PrintStackTrace method should be used on below scenario's: + * At the time of unknown exception. + * Should be used with exceptions other than BuildException. + * In case it is difficult to debug the issue with Exception.getMessage(). + * use this method during debugging complex issue (this doesn't mean the line should remain in the code after development). + * When it is required to print the all the information about the occurring Exception. + + This is an example on how to use logging: :: @@ -144,3 +164,17 @@ Please find more information on Log4J from the online manual: http://logging.apache.org/log4j/1.2/manual.html. + + +Exception +--------- + +Exceptional event reporting and handling is crutial in software development. Developer must make sure it is done accordingly +to the framework it is currently using: + + * To report a build failure under Ant the BuildException must be used. + But we have to keep in mind that a BuildException is not tracked because it derives from the RuntimeError type. + So we have to be careful with those and try to limit their puprose to the original usage: Ant build failure. + * It is preferable to have meaningful exception type like: FileNotFoundException. + * Developer should try to avoid as much as possible the throw or catch raw type of exception like Exception, RuntimeError. + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/bld.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/bld.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,60 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) + +REM Configure Ant +if not defined TESTED_ANT_HOME ( +set TESTED_ANT_HOME=C:\Apps\ant_1.7 +) +if exist %TESTED_ANT_HOME% (set ANT_HOME=%TESTED_ANT_HOME%) + +REM Configure the expected Ant Version details below +SET expMajorVer=1 +SET expMinorVer=7 + +rem *** Verify Ant Version *** +rem -- Run the 'ant -version' command and capture the output to a variable +for /f "tokens=*" %%a in ('ant -version') do (set antversion=%%a) +echo *** Installed Version : %antversion% + +rem -- Parse the version string obtained above and get the version number +for /f "tokens=4 delims= " %%a in ("%antversion%") do set val=%%a +rem -- Parse the version number delimiting the '.' and set the major and +rem minor versions +for /f "tokens=1-2 delims=." %%a in ("%val%") do ( +set /A majorVersion=%%a +set /A minorVersion=%%b +) +rem -- Check whether major version is greater than or equal to the expected. +if %majorVersion% geq %expMajorVer% ( +rem -- if major version is valid, check minor version. If minor version is less +rem than expected display message and abort the execution. +if %minorVersion% lss %expMinorVer% (echo *** Incorrect version of Ant found. Please check you have atleast Ant 1.7.0 & goto :errorstop ) +) + +ant %* +endlocal + +:errorstop +@echo *** Build aborted with error +exit /b 1 \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,37 @@ + + + + Helium Antlib iMaker build file. + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/demo/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/demo/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\lib -lib %CD%\..\..\bin\helium-core.jar -lib %CD%\..\..\bin\helium-imaker.jar -lib %CD%\..\..\antlibs +ant %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/demo/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/demo/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,66 @@ + + + + Helium Antlib imaker demo. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/HelpConfigStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/HelpConfigStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,65 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; + +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * This class implements the help-config output parser for iMaker. + * The list of configuration will be stored into an internal list + * object. + * + */ +public class HelpConfigStreamConsumer implements StreamConsumer { + private List configurations = new ArrayList(); + + /** + * {@inheritDoc} + * Only list starting with '/' and ending with '.mk' will be considered. + */ + @Override + public void consumeLine(String line) { + line = line.trim(); + if (line.startsWith("/") && line.endsWith(".mk")) { + configurations.add(line); + } + } + + /** + * Get the list of configurations as File objects. + * @return + */ + public List getConfigurations(File epocroot) { + List confs = new ArrayList(); + for (String config : configurations) { + confs.add(new File(epocroot, config)); + } + return confs; + } + + /** + * Get the list of configuration as strings. + * @return the list of configurations + */ + public List getConfigurations() { + return configurations; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/HelpTargetListStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/HelpTargetListStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,52 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker; + +import java.util.ArrayList; +import java.util.List; + +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Helper class to parse the output form help-target-*-list. + * + */ +public class HelpTargetListStreamConsumer implements StreamConsumer { + + private List targets = new ArrayList(); + + /** + * {@inheritDoc} + * iMaker targets should match the following patterns to be selected: [A-za-z0-9\\-_%]+. + */ + @Override + public void consumeLine(String line) { + line = line.trim(); + if (line.matches("^[A-za-z0-9\\-_%]+$")) { + targets.add(line); + } + } + + /** + * Get the list of found targets. + * @return the target list. + */ + public List getTargets() { + return targets; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/IMaker.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/IMaker.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,190 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker; + +import java.io.File; +import java.io.IOException; +import java.util.List; + + +import com.nokia.helium.core.plexus.CommandBase; +import com.nokia.helium.core.plexus.StreamRecorder; + +import org.apache.log4j.Logger; + +/** + * This class implements a wrapper around iMaker. + * It helps to introspect: + *
  • variables + *
  • targets + *
  • configurations + * + */ +public class IMaker extends CommandBase { + private static final String TEMP_ROMBUILD_DIR = "epoc32/rombuild/temp"; + private final Logger log = Logger.getLogger(getClass()); + private File epocroot; + + /** + * Create an iMaker wrapper class with a specific epocroot. + * @param epocroot + */ + public IMaker(File epocroot) { + this.epocroot = epocroot; + + } + + /** + * Creates a temp working dir for the rom image creation. + * @return + * @throws IOException + */ + public File createWorkDir() throws IMakerException { + try { + File tempRootDir = new File(getEpocroot(), TEMP_ROMBUILD_DIR); + tempRootDir.mkdirs(); + File tempDir = File.createTempFile("helium-imaker", "", tempRootDir); + tempDir.delete(); + tempDir.mkdirs(); + return tempDir; + } catch (IOException e) { + throw new IMakerException(e.getMessage(), e); + } + } + + /** + * Epocroot location. + * @return the epocroot location + */ + public File getEpocroot() { + return epocroot; + } + + /** + * Get the iMaker version. + * @return the current iMaker version. + * @throws IMakerException is thrown in case of an iMaker execution error. + */ + public String getVersion() throws IMakerException { + log.debug("getVersion"); + String[] args = new String[1]; + args[0] = "version"; + StreamRecorder rec = new StreamRecorder(); + execute(args, rec); + return rec.getBuffer().toString().trim(); + } + + /** + * Get the value of a particular variable from iMaker configuration. + * @param name the variable name + * @return the value or null if the variable does not exist. + * @throws IMakerException + */ + public String getVariable(String name) throws IMakerException { + log.debug("getVariable: " + name); + String[] args = new String[1]; + args[0] = "print-" + name; + PrintVarSteamConsumer consumer = new PrintVarSteamConsumer(name); + execute(args, consumer); + return consumer.getValue(); + } + + /** + * Get the value of a particular variable from iMaker configuration for a particular + * configuration. + * @param name the variable name + * @return the value or null if the variable does not exist. + * @throws IMakerException + */ + public String getVariable(String name, File configuration) throws IMakerException { + log.debug("getVariable: " + name + " - " + configuration); + String[] args = new String[3]; + args[0] = "-f"; + args[1] = configuration.getAbsolutePath(); + args[2] = "print-" + name; + PrintVarSteamConsumer consumer = new PrintVarSteamConsumer(name); + execute(args, consumer); + return consumer.getValue(); + } + + /** + * Get the list of available iMaker configurations. + * @return a list of configurations + * @throws IMakerException + */ + public List getConfigurations() throws IMakerException { + log.debug("getConfigurations"); + String[] args = new String[1]; + args[0] = "help-config"; + HelpConfigStreamConsumer consumer = new HelpConfigStreamConsumer(); + execute(args, consumer); + return consumer.getConfigurations(); + } + + /** + * Get the a list of target supported by a specific configuration. + * @param configuration the configuration to use + * @return the list of targets. + * @throws IMakerException + */ + public List getTargets(String configuration) throws IMakerException { + log.debug("getConfigurations"); + String[] args = new String[3]; + args[0] = "-f"; + args[1] = configuration; + args[2] = "help-target-*-list"; + HelpTargetListStreamConsumer consumer = new HelpTargetListStreamConsumer(); + execute(args, consumer); + return consumer.getTargets(); + } + + /** + * Get the target list for the configuration. + * @param configuration a File object representing the configuration location. + * @return a list of targets. + * @throws IMakerException + */ + public List getTargets(File configuration) throws IMakerException { + return getTargets(configuration.getAbsolutePath()); + } + + /** + * {@inheritDoc} + */ + @Override + protected String getExecutable() { + return "imaker"; + } + + /** + * {@inheritDoc} + */ + @Override + public File getWorkingDir() { + return getEpocroot(); + } + + /** + * {@inheritDoc} + */ + @Override + protected void throwException(String message, Throwable t) + throws IMakerException { + throw new IMakerException(message, t); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/IMakerException.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/IMakerException.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,43 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker; + +/** + * Exception raise by the iMaker framework. + * + */ +public class IMakerException extends Exception { + + private static final long serialVersionUID = -6918895304070211899L; + + /** + * An exception with message. + * @param message + */ + public IMakerException(String message) { + super(message); + } + + /** + * An exception with message and cause. + * @param message + */ + public IMakerException(String message, Throwable t) { + super(message, t); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/PrintVarSteamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/PrintVarSteamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,67 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker; + +import org.codehaus.plexus.util.cli.StreamConsumer; + +/** + * Parsing iMaker printvar calls. + * Output from iMaker should match: + * NAME = `some content' + * + */ +public class PrintVarSteamConsumer implements StreamConsumer { + + private String name; + private String value; + private boolean inParsing; + + /** + * Construct a PrintVarSteamConsumer for a variable named by name. + * @param name + */ + public PrintVarSteamConsumer(String name) { + this.name = name; + } + + /** + * {@inheritDoc} + */ + @Override + public void consumeLine(String line) { + String varPrefix = name + " = `"; + if (!inParsing && line.startsWith(varPrefix)) { + value = line.substring(varPrefix.length()); + inParsing = true; + } else if (inParsing) { + value += "\n" + line; + } + if (value != null && value.endsWith("'")) { + value = value.substring(0, value.length() - 1); + inParsing = false; + } + } + + /** + * Get the variable value returned by iMaker. + * @return the variable content return by iMaker. + */ + public String getValue() { + return value; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/Command.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/Command.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,148 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant; + +import java.util.ArrayList; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +/** + * Abstract a command call. The default command will be imaker. + * + */ +public class Command { + // default command is iMaker + private String command = "imaker"; + private List args = new ArrayList(); + private Map variables = new Hashtable(); + private String target = ""; + + /** + * Get the target name. + * @return the target name. + */ + public String getTarget() { + return target; + } + + /** + * Set the command name + * @param target the command name. + */ + public void setTarget(String target) { + this.target = target; + } + + /** + * Get the command name. + * @return the command name. + */ + public String getCommand() { + return command; + } + + /** + * Set the command name + * @param command the command name. + */ + public void setCommand(String command) { + this.command = command; + } + + /** + * Set the list of arguments based on a list of String. + * @param args the arg list. + */ + public void setArguments(List args) { + this.args.clear(); + this.args.addAll(args); + } + + /** + * Append an argument to the argument list. + * @param arg the argument to add. + */ + public void addArgument(String arg) { + this.args.add(arg); + } + + /** + * Get the list of arguments. + * @return the list of arguments + */ + public List getArguments() { + return args; + } + + /** + * Get the map of variables. + * @return a map representing variables for current object. + */ + public Map getVariables() { + return variables; + } + + /** + * Set variables using vars set of variables. + * @param vars + */ + public void setVariables(Map vars) { + variables.clear(); + variables.putAll(vars); + } + + /** + * Add all the variables from vars. + * @param vars a set of variables + */ + public void addVariables(Map vars) { + variables.putAll(vars); + } + + /** + * Add a variable to the command. + * @param name the variable name + * @param value the variable value + */ + public void addVariable(String name, String value) { + variables.put(name, value); + } + + /** + * Convert the current object as a command line string. + * The final string will be contains the data in the following + * order: + *
  • command + *
  • arguments + *
  • variables + *
  • target + * @return + */ + public String getCmdLine() { + String cmdLine = getCommand(); + for (String arg : getArguments()) { + cmdLine += " " + arg; + } + for (Entry e : getVariables().entrySet()) { + cmdLine += " " + e.getKey() + "=" + e.getValue(); + } + cmdLine += " " + getTarget(); + return cmdLine; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/Engine.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/Engine.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,45 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant; + +import java.util.List; + +import com.nokia.helium.imaker.IMakerException; +import com.nokia.helium.imaker.ant.taskdefs.IMakerTask; + +/** + * Engine interface. Methods needed by the IMaker task to + * build the roms. + * + */ +public interface Engine { + + /** + * Set the current IMakerTask. + * @param task the task instance. + */ + void setTask(IMakerTask task); + + /** + * Build the Commands. + * The sublist will be build in a serialize way, + * the content of each sublist will be built in parallel. + * @param cmdSet + * @throws IMakerException + */ + void build(List> cmdSet) throws IMakerException; +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/IMakerCommandSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/IMakerCommandSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,37 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant; + +import java.util.List; + +import com.nokia.helium.imaker.IMaker; + +/** + * This interface describes the API a configuration object should define. + * + */ +public interface IMakerCommandSet { + + /** + * Returns a list of command List. + * The sublist will be build sequentially. Their content can be built + * in parallel. + * @return a list of Command list + */ + List> getCommands(IMaker imaker); + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/Emake.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/Emake.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,65 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.engines; + +import java.io.File; + +import com.nokia.helium.core.plexus.CommandBase; +import com.nokia.helium.imaker.IMakerException; + +/** + * Simple emake wrapper based on the CommandBase class. + * + */ +public class Emake extends CommandBase { + + private File workingDir = new File("."); + + /** + * @return emake. + */ + @Override + protected String getExecutable() { + return "emake"; + } + + /** + * {@inheritDoc} + */ + @Override + protected void throwException(String message, Throwable t) throws IMakerException { + throw new IMakerException(message, t); + } + + /** + * Set the working directory where emake should be called. + * @param workingDir the working directory. + */ + public void setWorkingDir(File workingDir) { + this.workingDir = workingDir; + } + + /** + * Get the workingDir defined by the user. + * @return the working dir. + */ + @Override + public File getWorkingDir() { + return workingDir; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/EmakeEngine.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/EmakeEngine.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,250 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.engines; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.FileNotFoundException; +import java.io.OutputStreamWriter; +import java.io.StringWriter; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; + +import org.apache.log4j.Logger; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.types.DataType; + +import com.nokia.helium.core.plexus.AntStreamConsumer; +import com.nokia.helium.core.plexus.FileStreamConsumer; +import com.nokia.helium.imaker.IMakerException; +import com.nokia.helium.imaker.ant.Command; +import com.nokia.helium.imaker.ant.Engine; +import com.nokia.helium.imaker.ant.taskdefs.IMakerTask; + +import freemarker.cache.ClassTemplateLoader; +import freemarker.cache.FileTemplateLoader; +import freemarker.template.Configuration; +import freemarker.template.Template; +import freemarker.template.TemplateException; + +/** + * + * Simplest possible definition of the type, e.g: + *
    + * <emakeEngine id="imaker.ec" />
    + * 
    + * + * Emake engine with some custom configuration. + *
     
    + * <emakeEngine id="imaker.ec" >
    + *     <arg value="--emake-annofile=imaker.anno.xml" />
    + * </emakeEngine>
    + * 
    + * + * @ant.type name=emakeEngine category=imaker + */ +public class EmakeEngine extends DataType implements Engine { + private Logger log = Logger.getLogger(getClass()); + private IMakerTask task; + private List customArgs = new ArrayList(); + private File template; + + /** + * Holder for emake custom args. + */ + public class Arg { + private String value; + + /** + * Get the value of the argument. + * @return the argument + */ + public String getValue() { + return value; + } + + /** + * Define the additional command line parameter you want to add to emake + * invocation. + * @param value the additional command line parameter + * @ant.required + */ + public void setValue(String value) { + this.value = value; + } + } + + /** + * {@inheritDoc} + */ + public void build(List> cmdSet) throws IMakerException { + File makefile = null; + try { + // Writing the makefile. + makefile = writeMakefile(cmdSet); + + // Running Emake + runEmake(makefile); + } finally { + if (makefile != null) { + makefile.delete(); + } + } + } + + /** + * Returns the jar file name containing this class + * @return a File object or null if not found. + * @throws IMakerException + */ + protected File getJarFile() throws IMakerException { + URL url = this.getClass().getClassLoader().getResource(this.getClass().getName().replace('.', '/') + ".class"); + if (url.getProtocol().equals("jar") && url.getPath().contains("!/")) { + String fileUrl = url.getPath().split("!/")[0]; + try { + return new File(new URL(fileUrl).getPath()); + } catch (MalformedURLException e) { + throw new IMakerException("Error determining the jar file where " + + this.getClass().getName() + " is located.", e); + } + } + return null; + } + /** + * Run emake using defined makefile. + * @param makefile the makefile to build + * @throws IMakerException + */ + private void runEmake(File makefile) throws IMakerException { + FileStreamConsumer output = null; + if (task.getOutput() != null) { + try { + output = new FileStreamConsumer(task.getOutput()); + } catch (FileNotFoundException e) { + throw new IMakerException("Error creating the stream recorder: " + e.getMessage(), e); + } + } + try { + Emake emake = new Emake(); + emake.setWorkingDir(task.getEpocroot()); + List args = new ArrayList(); + for (Arg arg : customArgs) { + if (arg.getValue() != null) { + args.add(arg.getValue()); + } + } + args.add("-f"); + args.add(makefile.getAbsolutePath()); + args.add("all"); + if (task.isVerbose()) { + emake.addOutputLineHandler(new AntStreamConsumer(task)); + } + emake.addErrorLineHandler(new AntStreamConsumer(task, Project.MSG_ERR)); + if (output != null) { + emake.addOutputLineHandler(output); + emake.addErrorLineHandler(output); + } + emake.execute(args.toArray(new String[args.size()])); + } catch (IMakerException e) { + throw new IMakerException("Error executing emake: " + e.getMessage(), e); + } finally { + if (output != null) { + output.close(); + } + } + } + + /** + * Create the Makefile based on the cmdSet build sequence. + * @param cmdSet + * @return + * @throws IMakerException + * @throws IOException + */ + private File writeMakefile(List> cmdSet) throws IMakerException { + try { + Configuration cfg = new Configuration(); + Template template = null; + if (this.template != null) { + if (!this.template.exists()) { + throw new IMakerException("Could not find template file: " + this.template.getAbsolutePath()); + } + task.log("Loading template: " + this.template.getAbsolutePath()); + cfg.setTemplateLoader(new FileTemplateLoader(this.template.getParentFile())); + template = cfg.getTemplate(this.template.getName()); + } else { + cfg.setTemplateLoader(new ClassTemplateLoader(this.getClass(), "")); + template = cfg.getTemplate("build_imaker_roms_signing.mk.ftl"); + } + File makefile = File.createTempFile("helium-imaker", ".mk", task.getEpocroot()); + makefile.deleteOnExit(); + StringWriter out = new StringWriter(); + Map data = new Hashtable(); + data.put("cmdSets", cmdSet); + data.put("makefile", makefile.getAbsoluteFile()); + data.put("java_home", System.getProperty("java.home")); + File jar = getJarFile(); + if (jar != null) { + task.log("Using " + jar + " as the utility container, make sure the file is available under an emake root."); + data.put("java_utils_classpath", jar.getAbsolutePath()); + } + template.process(data, out); + log.debug(out.getBuffer().toString()); + + OutputStreamWriter output = new OutputStreamWriter(new FileOutputStream(makefile)); + output.append(out.getBuffer().toString()); + output.close(); + return makefile; + } catch (IOException e) { + throw new IMakerException("Error generating the makefile: " + e.getMessage(), e); + } catch (TemplateException e) { + throw new IMakerException("Error while rendering the makefile template: " + e.getMessage(), e); + } + } + + /** + * Add custom parameters for the emake invocation. + * @return a new Arg object. + */ + public Arg createArg() { + Arg arg = new Arg(); + customArgs.add(arg); + return arg; + } + + + /** + * {@inheritDoc} + */ + @Override + public void setTask(IMakerTask task) { + this.task = task; + } + + /** + * Defines an alternate template to use to generate the build sequence for emake. + * @ant.not-required + */ + public void setTemplate(File template) { + this.template = template; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/JavaEngine.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/JavaEngine.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,194 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.engines; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map.Entry; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.apache.tools.ant.Project; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.types.DataType; + +import com.nokia.helium.core.plexus.StreamRecorder; +import com.nokia.helium.imaker.IMaker; +import com.nokia.helium.imaker.IMakerException; +import com.nokia.helium.imaker.ant.Command; +import com.nokia.helium.imaker.ant.Engine; +import com.nokia.helium.imaker.ant.taskdefs.IMakerTask; + +/** + * Engine purely based on Java. Parallelisation is + * implemented using multithreading. + * + *
    + * <defaultEngine id="imaker.default" threads="4" />
    + * 
    + * + * @ant.type name=defaultEngine category=imaker + */ +public class JavaEngine extends DataType implements Engine { + + private IMakerTask task; + private OutputStreamWriter output; + private int threads = 1; + + /** + * {@inheritDoc} + */ + @Override + public void setTask(IMakerTask task) { + this.task = task; + } + + /** + * Defines the number of iMaker jobs running in + * parallel. + * @ant.not-required Default value is 1. + */ + public void setThreads(int threads) { + this.threads = threads; + } + + /** + * {@inheritDoc} + */ + @Override + public void build(List> cmds) throws IMakerException { + task.log("Building with Ant engine."); + if (threads <= 0) { + throw new BuildException("'threads' must be >= 0. (current value: " + threads + ")"); + } + openLog(); + // Do something with the configurations + for (List cmdlist : cmds) { + task.log("Building command list in parallel."); + if (cmdlist.size() > 0) { + ArrayBlockingQueue queue = new ArrayBlockingQueue(cmdlist.size()); + ThreadPoolExecutor threadPool = new ThreadPoolExecutor(threads, threads, 10, TimeUnit.MILLISECONDS, queue); + task.log("Adding " + cmdlist.size() + " to queue."); + for (final Command cmd : cmdlist) { + // Create a Runnable to wrap the image + // building. + threadPool.execute(new Runnable() { + public void run() { + try { + buildCommand(cmd); + } catch (IMakerException e) { + task.log(e.getMessage(), Project.MSG_ERR); + } + } + }); + } + threadPool.shutdown(); + try { + while (!threadPool.isTerminated()) { + threadPool.awaitTermination(100, TimeUnit.MILLISECONDS); + } + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + closeLog(); + } + + + /** + * Build a Command. + * @param cmd + * @throws IMakerException + */ + protected void buildCommand(Command cmd) throws IMakerException { + IMaker imaker = task.getIMaker(); + StreamRecorder rec = new StreamRecorder(); + imaker.addOutputLineHandler(rec); + imaker.addErrorLineHandler(rec); + + rec.consumeLine("-- " + cmd.getCmdLine()); + rec.consumeLine("++ Started at " + new Date()); + rec.consumeLine("+++ HiRes Start " + new Date().getTime()); + + List args = new ArrayList(); + args.addAll(cmd.getArguments()); + // Setting the working dir for the image creation. + File tempDir = imaker.createWorkDir(); + args.add("WORKDIR=" + tempDir.getAbsolutePath()); + // Pushing custom variables + for (Entry e : cmd.getVariables().entrySet()) { + if (e.getKey().equals("WORKDIR")) { + task.log("WORKDIR cannot be defined by the user, the value will be ignored.", Project.MSG_WARN); + } else { + args.add(e.getKey() + "=" + e.getValue()); + } + } + // Setting the target + args.add(cmd.getTarget()); + try { + imaker.execute(args.toArray(new String[args.size()])); + } catch (IMakerException e) { + // logging iMaker execution error to the + // task and the output log. + task.log(e.getMessage(), Project.MSG_ERR); + rec.consumeLine(e.getMessage()); + } finally { + rec.consumeLine("+++ HiRes End " + new Date().getTime()); + rec.consumeLine("++ Finished at " + new Date()); + } + // writing data + writeLog(rec.getBuffer().toString()); + } + + private void openLog() throws IMakerException { + if (task.getOutput() != null) { + try { + output = new OutputStreamWriter(new FileOutputStream(task.getOutput())); + } catch (FileNotFoundException e) { + throw new IMakerException(e.getMessage(), e); + } + } + } + + private synchronized void writeLog(String str) throws IMakerException { + if (output != null) { + try { + output.write(str); + } catch (IOException e) { + throw new IMakerException(e.getMessage(), e); + } + } + } + + private void closeLog() throws IMakerException { + if (output != null) { + try { + output.close(); + } catch (IOException e) { + throw new IMakerException(e.getMessage(), e); + } + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/build_imaker_roms_signing.mk.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/engines/build_imaker_roms_signing.mk.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,128 @@ +<#-- +============================================================================ +Name : build_imaker_roms_signing.mk.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> +############################################################################ +# Makefile generated by the emake engine +############################################################################ + +# Are we using linux? +USE_UNIX := $(if $(findstring cmd.exe,$(SHELL))$(findstring mingw,$(call lcase,$(MAKE))),0,1) +EXECUTOR_CLASS = com.nokia.helium.imaker.utils.ParallelExecutor + +# Path conversion macro +true = $(if $(filter-out 0,$(subst 0,0 ,$1)),1) +iif = $(if $(call true,$1),$2,$3) +lcase = $(call tr,$([A-Z]),$([a-z]),$1) +pathconv = $(call iif,$(USE_UNIX),$(subst \,/,$1),$(subst /,\,$1)) + +# Some variables +ROMBUILD_TEMPDIR=$(call pathconv,epoc32/rombuild/temp) + + +$(ROMBUILD_TEMPDIR)/: + -mkdir $(ROMBUILD_TEMPDIR) + +<#assign cid=0> +<#assign iid=0> +<#assign deps=""> +<#list cmdSets as cmds> +<#assign configdeps=""> +<#assign configimagedeps=""> +<#assign firstiid=iid> + +#pragma runlocal +configuration_${cid}-dir: $(ROMBUILD_TEMPDIR)/config_${iid} <#if (cid>0)>configuration_${cid-1} <#if (cmds?size == 0)>; + $(call iif,$(USE_UNIX),touch,@echo off >) $(ROMBUILD_TEMPDIR)/command-dir-list-${cid}.txt + <#list cmds as cmd> + <#assign cmdline=""> + <#list cmd.getArguments() as arg> + <#assign cmdline="${cmdline} ${arg}"> + + <#assign vars=cmd.getVariables()> + <#list vars?keys as var> + <#assign cmdline="${cmdline} ${var}=${vars[var]}"> + + @echo $(call iif,$(USE_UNIX),,cmd /c) ${cmd.getCommand()} WORKDIR=$(ROMBUILD_TEMPDIR)/config_${iid} ${cmdline} ${cmd.getTarget()}-dir >> $(ROMBUILD_TEMPDIR)/command-dir-list-${cid}.txt +<#assign iid=iid+1> + + -@${java_home}/bin/java -cp "${java_utils_classpath}" $(EXECUTOR_CLASS) $(ROMBUILD_TEMPDIR)/command-dir-list-${cid}.txt + + +<#assign iid=firstiid> + <#list cmds as cmd> + <#assign cmdline=""> + <#list cmd.getArguments() as arg> + <#assign cmdline="${cmdline} ${arg}"> + + <#assign vars=cmd.getVariables()> + <#list vars?keys as var> + <#assign cmdline="${cmdline} ${var}=${vars[var]}"> + +# +# Building configuration ${iid}. +# +$(ROMBUILD_TEMPDIR)/config_${iid}/: $(ROMBUILD_TEMPDIR) + -mkdir $(call pathconv,$(ROMBUILD_TEMPDIR)/config_${iid}) + + +rom_${iid}-image: configuration_${cid}-dir + @echo -- ${cmd.getCmdLine()}-image + -@perl -e "print '++ Started at '.localtime().\"\n\"" + -@perl -e "use Time::HiRes; print '+++ HiRes Start '.Time::HiRes::time().\"\n\";" + -${cmd.getCommand()} WORKDIR=$(ROMBUILD_TEMPDIR)/config_${iid} ${cmdline} ${cmd.getTarget()}-image + -@perl -e "use Time::HiRes; print '+++ HiRes End '.Time::HiRes::time().\"\n\";" + -@perl -e "print '++ Finished at '.localtime().\"\n\"" + +<#assign configimagedeps="${configimagedeps} rom_${iid}-image"> +<#assign iid=iid+1> + + +configuration_${cid}-image: ${configimagedeps} ; + +<#assign iid=firstiid> +#pragma runlocal +configuration_${cid}-e2flash: configuration_${cid}-image + $(call iif,$(USE_UNIX),touch,@echo off >) $(ROMBUILD_TEMPDIR)/command-e2flash-list-${cid}.txt + <#list cmds as cmd> + <#assign cmdline=""> + <#list cmd.getArguments() as arg> + <#assign cmdline="${cmdline} ${arg}"> + + <#assign vars=cmd.getVariables()> + <#list vars?keys as var> + <#assign cmdline="${cmdline} ${var}=${vars[var]}"> + + @echo $(call iif,$(USE_UNIX),,cmd /c) ${cmd.getCommand()} WORKDIR=$(ROMBUILD_TEMPDIR)/config_${iid} ${cmdline} ${cmd.getTarget()}-e2flash >> $(ROMBUILD_TEMPDIR)/command-e2flash-list-${cid}.txt +<#assign iid=iid+1> + + -@${java_home}/bin/java -cp "${java_utils_classpath}" $(EXECUTOR_CLASS) $(ROMBUILD_TEMPDIR)/command-e2flash-list-${cid}.txt + + +configuration_${cid}: <#if (cid>0)>configuration_${cid-1} configuration_${cid}-dir configuration_${cid}-image configuration_${cid}-e2flash ; + + +<#assign deps="configuration_${cid}"> +<#assign cid=cid+1> + + +all: ${deps} ; + +# End of config diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/taskdefs/IMakerTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/taskdefs/IMakerTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,286 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.taskdefs; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Task; +import org.apache.tools.ant.types.DataType; + +import com.nokia.helium.core.plexus.AntStreamConsumer; +import com.nokia.helium.imaker.IMaker; +import com.nokia.helium.imaker.IMakerException; +import com.nokia.helium.imaker.ant.Command; +import com.nokia.helium.imaker.ant.Engine; +import com.nokia.helium.imaker.ant.IMakerCommandSet; +import com.nokia.helium.imaker.ant.engines.JavaEngine; + +/** + * The imaker task will allow you to efficiently use iMaker to + * build rom images in parallel. + * + * The task is actually base on two concepts: + *
      + *
    • content configuration: what needs to be built. + *
    • acceleration engine: how to build roms in an efficient way. + *
    + * + * In the following example the task is configured to use the emake engine + * to accelerate the rom image creation and an imakerconfiguration configuration + * element to configure the content of the building: + * + *
    + *       <hlm:emakeEngine id="imaker.ec" />
    + *       <hlm:imaker epocroot="${epocroot}" 
    + *                      output="${epocroot}/imaker.log"
    + *                      engineRefid="imaker.ec"
    + *                      verbose="true">
    +             <hlm:imakerconfiguration>
    + *                   <makefileset>
    + *                       <include name="**/product/*ui.mk" />
    + *                   </makefileset>
    + *                   <targetset>
    + *                       <include name="core" />
    + *                       <include name="langpack_01" />
    + *                   </targetset>
    +             </hlm:imakerconfiguration>
    + *       </hlm:imaker>
    + * 
    + * @ant.task name=imaker category=imaker + */ +public class IMakerTask extends Task { + + private File epocroot; + private boolean verbose; + private boolean failOnError = true; + private List commandSets = new ArrayList(); + private String engineRefId; + private AntStreamConsumer stdout = new AntStreamConsumer(this); + private AntStreamConsumer stderr = new AntStreamConsumer(this, Project.MSG_ERR); + private File output; + + /** + * Add iMaker Task compatible configuration. The task will + * accept any Ant type implementing the IMakerCommandSet + * interface like the imakerconfiguration type. + * + * @param cmdSet an iMaker configuration which will defines + * what needs to be built. + * + * @ant.required + */ + public void add(IMakerCommandSet cmdSet) { + commandSets.add(cmdSet); + } + + /** + * Defines the reference id of the engine to use. + * @ant.not-required Default Java implementation will be used. + */ + public void setEngineRefId(String engineRefId) { + this.engineRefId = engineRefId; + } + + /** + * Retrieve the engine to be used. If the engineRefId + * attribute is not defined then the JavaEngine is used + * as the default one. + * @return An instance of engine. + * @throws a BuildException if the engineRefId attribute doesn't define an Engine + * object. + */ + protected Engine getEngine() { + if (engineRefId == null) { + log("Using default engine (Java threading)"); + JavaEngine engine = new JavaEngine(); + engine.setProject(getProject()); + engine.setTask(this); + return engine; + } else { + try { + Engine engine = (Engine)this.getProject().getReference(engineRefId); + engine.setTask(this); + return engine; + } catch (ClassCastException e) { + throw new BuildException("Reference '" + engineRefId + "' is not referencing an Engine configuration."); + } + } + } + + /** + * Get current epocroot location (build environment). + * @return a File object. + */ + public File getEpocroot() { + File epocroot = this.epocroot; + if (epocroot == null) { + epocroot = new File(System.getenv("EPOCROOT")); + if (epocroot == null) { + throw new BuildException("'epocroot' attribute has not been defined."); + } else { + log("Using EPOCROOT: " + epocroot.getAbsolutePath()); + } + } + + if (!epocroot.exists() || !epocroot.isDirectory()) { + throw new BuildException("Invalid epocroot directory: " + epocroot); + } + return epocroot; + } + + /** + * Defines the EPOCROOT location. + * @param epocroot + * @ant.not-required Will use EPOCROOT environment variable if not defined. + */ + public void setEpocroot(File epocroot) { + this.epocroot = epocroot; + } + + /** + * {@inheritDoc} + */ + @Override + public void execute() { + try { + // Simple way to verify if iMaker is installed under + // then environment. + IMaker imaker = getIMaker(); + log("Using iMaker: " + imaker.getVersion()); + + // build the content. + build(); + } catch (IMakerException e) { + if (shouldFailOnError()) { + throw new BuildException(e.getMessage(), e); + } else { + log(e.getMessage(), Project.MSG_ERR); + } + } + } + + /** + * Should the task be verbose. + * @return Returns true if the task should display all iMaker outputs. + */ + public boolean isVerbose() { + return verbose; + } + + /** + * Defines if the task should log all the output through Ant. + * by default only error/warnings are printed. + * @param verbose set the verbosity status. + * @ant.not-required Default is false. + */ + public void setVerbose(boolean verbose) { + this.verbose = verbose; + } + + /** + * Get the output filename. + * @return the output filename. + */ + public File getOutput() { + return output; + } + + /** + * Defined the output log filename. + * @param output + * @ant.not-required + */ + public void setOutput(File output) { + this.output = output; + } + + /** + * Concatenate all the configuration content and + * delegate the building to the engine. + * @throws IMakerException + */ + protected void build() throws IMakerException { + List> cmds = new ArrayList>(); + for (IMakerCommandSet cmdSet : commandSets) { + if (cmdSet instanceof DataType) { + DataType dataType = (DataType)cmdSet; + if (dataType.isReference()) { + cmdSet = (IMakerCommandSet)dataType.getRefid().getReferencedObject(); + } + } + cmds.addAll(cmdSet.getCommands(getIMaker())); + } + if (cmds.size() > 0) { + getEngine().build(cmds); + } else { + log("Nothing to build."); + } + } + + /** + * Get a configured IMaker instance. The created object + * is configured with output stream redirected to + * the task logging. Stderr is always redirected, + * stdout is only redirected if the task is configured + * to be verbose. + * + * @return an IMaker instance. + */ + public IMaker getIMaker() { + return getIMaker(verbose, true); + } + + /** + * Get a configured IMaker instance. The created object + * is configured with output stream redirected to + * the task logging. Stderr is always redirected, + * the stdout will be configured by the verbose parameter. + * @param verbose enable stdout redirection to the task logging. + * @return an IMaker instance. + */ + public IMaker getIMaker(boolean verbose, boolean verboseError) { + IMaker imaker = new IMaker(getEpocroot()); + if (verbose) { + imaker.addOutputLineHandler(stdout); + } + if (verboseError) { + imaker.addErrorLineHandler(stderr); + } + return imaker; + } + + /** + * Defines if the task should fail in case of error. + * @ant.not-required Default is true + */ + public void setFailOnError(boolean failOnError) { + this.failOnError = failOnError; + } + + /** + * Should the task should fail in case of error? + * @return true if the task should fail on error. + */ + public boolean shouldFailOnError() { + return this.failOnError; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Configuration.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Configuration.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,356 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.imaker.ant.types; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.DirectoryScanner; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.types.PatternSet; + +import com.nokia.helium.imaker.IMaker; +import com.nokia.helium.imaker.IMakerException; +import com.nokia.helium.imaker.ant.Command; +import com.nokia.helium.imaker.ant.IMakerCommandSet; + +import java.io.File; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.Vector; +import java.util.regex.Pattern; + +/** + * The imakerconfiguration enables the build manager to configure his iMaker + * builds based on introspection. + * + * The makefileset element will configure the filtering of the "imaker help-config" + * command. Then for each of the configuration found the targetset elements will be used + * to filter the output from the "imaker -f configuration.mk help-target-*-list" + * command. Finally a set of command will be generated. + * + * Each command will then be configure using the set of variables defined by the variableset + * elements. Only the WORKDIR variable is under the task control to ensure call safety during the + * parallelization. + * + * The usage of the variablegroup will allow you to duplicate the common set of commands + * and apply additional variables. + * + * Example: + *
    + *     <imakerconfiguration regionalVariation="true">
    + *         <makefileset>
    + *             <include name="**/product/*ui.mk"/>
    + *         </makefileset>
    + *         <targetset>
    + *             <include name="^core$" />
    + *             <include name="langpack_\d+" />
    + *             <include name="^custvariant_.*$" />
    + *             <include name="^udaerase$" />
    + *         </targetset>
    + *         <variableset>
    + *             <variable name="USE_FOTI" value="0"/>
    + *             <variable name="USE_FOTA" value="1"/>
    + *         </variableset>
    + *         <variablegroup>
    + *             <variable name="TYPE" value="rnd"/>
    + *         </variablegroup>
    + *         <variablegroup>
    + *             <variable name="TYPE" value="subcon"/>
    + *         </variablegroup>
    + *     </imakerconfiguration>
    + * 
    + * + * This configuration might produce the following calls : + *
    + * imaker -f /epoc32/rom/config/platform/product/image_conf_product_ui.mk TYPE=rnd USE_FOTI=0 USE_FOTA=1 core
    + * imaker -f /epoc32/rom/config/platform/product/image_conf_product_ui.mk TYPE=subcon USE_FOTI=0 USE_FOTA=1 core
    + * imaker -f /epoc32/rom/config/platform/product/image_conf_product_ui.mk TYPE=rnd USE_FOTI=0 USE_FOTA=1 langpack_01
    + * imaker -f /epoc32/rom/config/platform/product/image_conf_product_ui.mk TYPE=subcon USE_FOTI=0 USE_FOTA=1 langpack_01
    + * 
    + * + * @ant.type name="imakerconfiguration" category="imaker" + */ +public class Configuration extends DataType implements IMakerCommandSet { + + private Vector makefiles = new Vector(); + private Vector selectors = new Vector(); + private Vector targets = new Vector(); + private Vector variables = new Vector(); + private Vector variablegroups = new Vector(); + private boolean regionalVariation; + + /** + * Create a makefileset element. + * Makefileset elements are based on regular Ant PatternSet. + * @return a PatternSet object. + */ + public PatternSet createMakefileSet() { + PatternSet makefile = new PatternSet(); + makefiles.add(makefile); + return makefile; + } + + /** + * Get the list of makefileset element. + * @return a vector of PatternSet objects. + */ + public Vector getMakefileSet() { + return makefiles; + } + + /** + * Add a Makefile selector configuration (e.g: products) + * @param filter + */ + public void add(MakefileSelector filter) { + selectors.add(filter); + } + + /** + * Create a targetset element. + * Targetset elements are based on regular Ant PatternSet. + * @return a PatternSet object. + */ + public PatternSet createTargetSet() { + PatternSet target = new PatternSet(); + targets.add(target); + return target; + } + + /** + * Get the list of targetset. + * @return a vector of PatternSet objects. + */ + public Vector getTargetSet() { + return targets; + } + + /** + * Create a VariableSet element. + * @return a VariableSet object. + */ + public VariableSet createVariableSet() { + VariableSet var = new VariableSet(); + variables.add(var); + return var; + } + + /** + * Create a VariableSet element. + * @return a VariableSet object. + */ + public VariableGroup createVariableGroup() { + VariableGroup var = new VariableGroup(); + variablegroups.add(var); + return var; + } + + /** + * Get the list of variableset. + * @return a vector of VariableSet objects. + */ + public Vector getVariableSet() { + return variables; + } + + /** + * Enables the sorting of images per region. + * @deprecated The usage of this feature is now ignored. + * @param value the state of the regional variation + * @ant.not-required Default is false - The usage of this feature is now ignored. + */ + @Deprecated + public void setRegionalVariation(boolean value) { + log(this.getDataTypeName() + ": the usage of the regionalVariation attribute is now ignored.", Project.MSG_WARN); + regionalVariation = value; + } + + /** + * Get the status of the regional variation enabling. + * @deprecated The usage of this feature is now ignored. + * @return returns true is the regional variation should be enabled. + */ + public boolean getRegionalVariation() { + return regionalVariation; + } + + /** + * Check if name is matching any of the pattern under patterns list. + * @param name the string to match + * @param patterns a list of PatternSet + * @return Returns true if name matches at least one pattern. + */ + protected boolean isIncluded(String name, Vector patterns) { + for (PatternSet patternSet : patterns) { + if (patternSet.isReference()) { + patternSet = (PatternSet) patternSet.getRefid().getReferencedObject(); + } + String[] includes = patternSet.getIncludePatterns(getProject()); + if (includes != null) { + for (String pattern : includes) { + if (Pattern.matches(pattern, name)) { + return true; + } + } + } + } + return false; + } + + /** + * Check if name is matching any of the pattern under patterns list. + * @param name the string to match + * @param patterns a list of PatternSet + * @return Returns true if name matches at least one pattern. + */ + protected boolean isExcluded(String name, Vector patterns) { + for (PatternSet patternSet : patterns) { + if (patternSet.isReference()) { + patternSet = (PatternSet) patternSet.getRefid().getReferencedObject(); + } + String[] excludes = patternSet.getExcludePatterns(getProject()); + if (excludes != null) { + for (String pattern : excludes) { + if (Pattern.matches(pattern, name)) { + return true; + } + } + } + } + return false; + } + + /** + * Get a configured matcher. + * @return a configured makefile matcher. + */ + protected Matcher getMakefileMatcher() { + Matcher matcher = new Matcher(); + List includes = new ArrayList(); + List excludes = new ArrayList(); + for (PatternSet patternSet : makefiles) { + if (patternSet.isReference()) { + patternSet = (PatternSet) patternSet.getRefid().getReferencedObject(); + } + String[] patterns = patternSet.getIncludePatterns(getProject()); + if (patterns != null) { + for (String pattern : patterns) { + includes.add(pattern); + } + } + patterns = patternSet.getExcludePatterns(getProject()); + if (patterns != null) { + for (String pattern : patterns) { + excludes.add(pattern); + } + } + } + matcher.setIncludes(includes.toArray(new String[includes.size()])); + matcher.setExcludes(excludes.toArray(new String[excludes.size()])); + return matcher; + } + + /** + * {@inheritDoc} + */ + @Override + public List> getCommands(IMaker imaker) { + List> cmdSet = new ArrayList>(); + List cmds = new ArrayList(); + // Let's add one fake group. + if (variablegroups.size() == 0) { + variablegroups.add(new VariableGroup()); + } + try { + for (String configuration : getConfigurations(imaker.getConfigurations())) { + log("Including configuration: " + configuration); + for (String target : imaker.getTargets(configuration)) { + if (isIncluded(target, targets) && !isExcluded(target, targets)) { + log("Including target: " + target); + for (VariableGroup group : variablegroups) { + if (group.isReference()) { + group = (VariableGroup)group.getRefid().getReferencedObject(); + } + Command cmd = new Command(); + cmd.setCommand("imaker"); + cmd.addArgument("-f"); + cmd.addArgument(configuration); + // Adding variables + for (VariableSet vs : variables) { + cmd.addVariables(vs.toMap()); + } + // Adding variables from groups + cmd.addVariables(group.toMap()); + cmd.setTarget(target); + cmds.add(cmd); + } + } + } + } + } catch (IMakerException e) { + throw new BuildException(e.getMessage()); + } + // adding all the commands. + if (cmds.size() > 0) { + cmdSet.add(cmds); + } + return cmdSet; + } + + /** + * Select which iMaker configuration should be built. + * @param configurations + * @return + */ + protected Set getConfigurations(List configurations) { + Set result = new HashSet(); + if (makefiles.size() > 0) { + Matcher matcher = getMakefileMatcher(); + for (String configuration : configurations) { + if (matcher.match(configuration)) { + result.add(configuration); + } + } + } + for (MakefileSelector selector : selectors) { + result.addAll(selector.selectMakefile(configurations)); + } + return result; + } + + /** + * Matcher object to filter discovered configurations. + * iMaker configuration. + */ + public class Matcher extends DirectoryScanner { + + /** + * Check is a particular configuration can + * is selected. + * @param path the string to match. + * @return return true is the path is selected. + */ + public boolean match(String path) { + String vpath = path.replace('/', File.separatorChar). + replace('\\', File.separatorChar); + return isIncluded(vpath) && !isExcluded(vpath); + } + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/ConfigurationSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/ConfigurationSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,95 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.imaker.ant.types; + +import org.apache.tools.ant.types.DataType; + +import com.nokia.helium.imaker.IMaker; +import com.nokia.helium.imaker.ant.Command; +import com.nokia.helium.imaker.ant.IMakerCommandSet; + +import java.util.ArrayList; +import java.util.List; +import java.util.Vector; + +/** + * Set of iMaker configuration. + * + *
    + * <hlm:imakerconfigurationset>
    + *     <imakerconfiguration regionalVariation="true">
    + *         <makefileset>
    + *             <include name="**/product/*ui.mk"/>
    + *         </makefileset>
    + *         <targetset>
    + *             <include name="^core$" />
    + *             <include name="langpack_\d+" />
    + *             <include name="^custvariant_.*$" />
    + *             <include name="^udaerase$" />
    + *         </targetset>
    + *         <variableset>
    + *             <variable name="USE_FOTI" value="0"/>
    + *             <variable name="USE_FOTA" value="1"/>
    + *         </variableset>
    + *         <variablegroup>
    + *             <variable name="TYPE" value="rnd"/>
    + *             <variable name="USE_FOTI" value="0"/>
    + *             <variable name="USE_FOTA" value="1"/>
    + *         </variablegroup>
    + *         <variablegroup>
    + *             <variable name="TYPE" value="subcon"/>
    + *             <variable name="USE_FOTI" value="0"/>
    + *             <variable name="USE_FOTA" value="1"/>
    + *         </variablegroup>
    + *     </imakerconfiguration>
    + * </hlm:imakerconfigurationset>
    + * 
    + * @ant.type name="imakerconfigurationset" category="imaker" + */ +public class ConfigurationSet extends DataType implements IMakerCommandSet { + + private Vector configurations = new Vector(); + + /** + * This method create an iMaker Configuration element. + */ + public Configuration createImakerConfiguration() { + Configuration config = new Configuration(); + configurations.add(config); + return config; + } + + /** + * Get the list of iMaker configuration. + */ + public Vector getImakerConfiguration() { + return configurations; + } + + @Override + public List> getCommands(IMaker imaker) { + List> cmdset = new ArrayList>(); + for (Configuration config : configurations) { + if (config.isReference()) { + config = (Configuration) config.getRefid().getReferencedObject(); + } + cmdset.addAll(config.getCommands(imaker)); + } + return cmdset; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/MakefileSelector.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/MakefileSelector.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,34 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.types; + +import java.util.List; + +/** + * Interface used to extend the Makefile introspection of the + * iMaker configuration. + * + */ +public interface MakefileSelector { + + /** + * Select the configurations to be built based on the object settings. + * @param configuration + * @return a list of selected configuration from the input list. + */ + List selectMakefile(List configurations); +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Product.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Product.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,123 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.types; + +import java.util.ArrayList; +import java.util.List; +import java.util.Vector; + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.types.DataType; + +/** + * The product type will allow you to select iMaker makefile configuration based on + * the product name. The search will be done using the following template: + * image_conf_[product_name][_ui].mk + * + *
    + * <hlm:product list="product1,product2" ui="true" failonerror="false" />
    + * 
    + * @ant.type name="hlm:product" category="imaker" + */ +public class Product extends DataType implements MakefileSelector { + private String list; + private boolean ui; + private boolean failOnError = true; + + /** + * Defines a comma separated list of product names. + * @param name + */ + public void setList(String list) { + this.list = list; + } + + /** + * Get the list of products + * @return an array of products + */ + public String[] getNames() { + Vector names = new Vector(); + for (String name : this.list.split(",")) { + name = name.trim(); + if (name.length() > 0) { + names.add(name); + } + } + return names.toArray(new String[names.size()]); + } + + public void setUi(boolean ui) { + this.ui = ui; + } + + /** + * Define if we are looking for a ui configuration (will add _ui to the + * makefile name) + * @return + * @ant.not-required Default false + */ + public boolean isUi() { + return ui; + } + + /** + * Shall we fail the build in case of missing config? + * @param failOnError + * @ant.not-required Default true + */ + public void setFailOnError(boolean failOnError) { + this.failOnError = failOnError; + } + + /** + * Shall we fail the build in case of missing config. + * @return a boolean + */ + public boolean isFailOnError() { + return failOnError; + } + + /** + * {@inheritDoc} + */ + @Override + public List selectMakefile(List configurations) { + List result = new ArrayList(); + for (String product : getNames()) { + String endOfString = "image_conf_" + product + (ui ? "_ui" : "") + ".mk"; + boolean foundConfig = false; + for (String config : configurations) { + if (config.endsWith(endOfString)) { + foundConfig = true; + result.add(config); + break; + } + } + if (!foundConfig) { + if (isFailOnError()) { + throw new BuildException("Could not find a valid configuration for " + product); + } else { + log("Could not find a valid configuration for " + product, Project.MSG_ERR); + } + } + } + return result; + } + +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Variable.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/Variable.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,78 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.imaker.ant.types; + +import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.BuildException; + +/** + * Configure a variable for iMaker. + * @ant.type name="variable" category="imaker" + */ +public class Variable extends DataType +{ + private String mName; + private String mValue; + + /** + * Set the name of the variable. + * @param name + */ + public void setName(String name) { + mName = name; + } + + + /** + * Get the name of the variable. + * @return name. + */ + public String getName() { + return mName; + } + + /** + * Set the value of the variable. + * @param value + */ + public void setValue(String value) { + mValue = value; + } + + + /** + * Get the value of the variable. + * @return value. + */ + public String getValue() { + return mValue; + } + + /** + * Validate if the configuration is defined properly. + * Throws BuildException in case of error. + */ + public void validate() { + if (getName() == null) { + throw new BuildException("The variable element doesn't define a 'name' attribute."); + } + if (getValue() == null) { + throw new BuildException("The variable element doesn't define a 'value' attribute."); + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/VariableGroup.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/VariableGroup.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,34 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.ant.types; + +/** + * This type is a container for variable configuration. + * A set of command will be generated for each group + * present in the imakerconfiguration. + * + *
    + * <variablegroup>
    + *     <variable name="TYPE" value="rnd" />
    + * </variablegroup>
    + * 
    + * + * @ant.type name=variablegroup category="imaker" + */ +public class VariableGroup extends VariableSet { + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/VariableSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/ant/types/VariableSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,78 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.imaker.ant.types; +import org.apache.tools.ant.types.DataType; + +import java.util.Hashtable; +import java.util.Map; +import java.util.Vector; + +/** + * This type is a container for variable configuration. + * + *
    + * <variableset>
    + *     <variable name="TYPE" value="rnd" />
    + * </variableset>
    + * 
    + * + * @ant.type name="variableset" category="imaker" + */ +public class VariableSet extends DataType { + + private Vector variables = new Vector(); + + /** + * Creates a Variable object. + * @return a Variable object. + */ + public Variable createVariable() { + Variable var = new Variable(); + add(var); + return var; + } + + /** + * Support the addition of a Variable object. + * @param a Variable object. + */ + public void add(Variable var) { + variables.add(var); + } + + /** + * Get the list of Variable object. + * @return a vector of Variable objects + */ + public Vector getVariables() { + return variables; + } + + /** + * Convert the set of variable to a Map object. + * @return the content of that set into a Map object. + */ + public Map toMap() { + Map data = new Hashtable(); + for (Variable var : variables) { + var.validate(); + data.put(var.getName(), var.getValue()); + } + return data; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/utils/ParallelExecutor.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/src/com/nokia/helium/imaker/utils/ParallelExecutor.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,134 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.utils; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.List; +import java.util.StringTokenizer; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.Date; +import java.text.SimpleDateFormat; + +/** + * Simple application which will execute each line from a text file + * as a command. All the command will be executed in parallel. + * Default number of threads is 4. + * + * The implementation must not rely on any external dependencies except JVM and owning jar. + * + */ +public final class ParallelExecutor { + + /** + * Private constructor - not meant to be instantiated. + */ + private ParallelExecutor() { + } + + /** + * Internal class holding a command to + * execute. + * + */ + private static class RunCommand implements Runnable { + private String cmdline; + + /** + * Default constructor + * @param cmdline the command to run + */ + public RunCommand(String cmdline) { + this.cmdline = cmdline; + } + + /** + * Running command line and capturing the output. + */ + @Override + public void run() { + StringTokenizer st = new StringTokenizer(cmdline); + String[] cmdArray = new String[st.countTokens()]; + for (int i = 0; st.hasMoreTokens(); i++) { + cmdArray[i] = st.nextToken(); + } + Process p; + try { + p = new ProcessBuilder(cmdArray).redirectErrorStream(true).start(); + BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream())); + String line; + StringBuffer buffer = new StringBuffer(); + SimpleDateFormat df = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy"); + + Date start = new Date(); + buffer.append("++ Started at " + df.format(start) + "\n"); + buffer.append("+++ HiRes Start " + start.getTime() / 1000 + "\n"); + buffer.append("-- " + cmdline + "\n"); + while ((line = in.readLine()) != null) { + buffer.append(line + "\n"); + } + Date end = new Date(); + buffer.append("+++ HiRes End " + end.getTime() / 1000 + "\n"); + buffer.append("++ Finished at " + df.format(end) + "\n"); + synchronized (System.out) { + System.out.print(buffer); + } + } catch (IOException e) { + System.err.println("ERROR: " + e.getMessage()); + } + } + } + + /** + * This is the entry point of the application. + * It will only accept one file name as parameter. + * @param args a list of arguments. + */ + public static void main(String[] args) { + if (args.length == 1) { + try { + List cmds = new ArrayList(); + BufferedReader in = new BufferedReader(new FileReader(args[0])); + String line; + while ((line = in.readLine()) != null) { + if (line.trim().length() > 0) { + cmds.add(line); + } + } + + final ArrayBlockingQueue queue = new ArrayBlockingQueue(cmds.size()); + int numOfProcessor = Runtime.getRuntime().availableProcessors(); + System.out.println("Number of threads: " + numOfProcessor); + ThreadPoolExecutor executor = new ThreadPoolExecutor(numOfProcessor, numOfProcessor, 100, TimeUnit.MILLISECONDS, queue); + for (String cmd : cmds) { + executor.execute(new RunCommand(cmd)); + } + executor.shutdown(); + } catch (IOException e) { + System.err.println("ERROR: " + e.getMessage()); + } + } else { + System.out.println("ParallelExecutor: nothing to execute."); + } + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_emakeengine.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_emakeengine.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,155 @@ + + + + Helium Antlib imaker unittests. + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_imaker.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_imaker.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,280 @@ + + + + Helium Antlib imaker unittests. + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + ------------------------------------------------------------------------------- + + + + Will fail because it can't find imaker. + + + + + + + Usin custom epocroot. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_imaker_invalid.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_imaker_invalid.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,82 @@ + + + + Helium Antlib imaker unittests. + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_javaengine.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/antunit/test_javaengine.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,63 @@ + + + + Helium Antlib imaker unittests. + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + ------------------------------------------------------------------------------- + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/bld.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/bld.sh Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,25 @@ +#!/bin/csh + +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + + + +module load java/1.6.0 +module load mercurial +export PATH=$PATH:. +setenv ANT_ARGS "-lib ../lib -lib ../../lib -lib ../../core/lib -lib ../../bin/helium-core.jar -lib ../../bin/helium-imaker.jar -lib ../../antlibs" +ant $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\lib -lib %CD%\..\..\bin\helium-core.jar -lib %CD%\..\..\bin\helium-imaker.jar -lib %CD%\..\..\antlibs +ant %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,35 @@ + + + + Helium Antlib Sysdef unittests. + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/emake_data/cust.mk.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/emake_data/cust.mk.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,30 @@ +<#-- +============================================================================ +Name : cust.mk.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> +java_home:${java_home} +java_utils_classpath:${java_utils_classpath} +makefile:${makefile} +<#list cmdSets as cmds> +Group: + <#list cmds as cmd> + + <#list cmd.getArguments() as arg>${arg} ${cmd.getTarget()} + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/emake_data/invalid.mk.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/emake_data/invalid.mk.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ +<#-- +============================================================================ +Name : invalid.mk.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> +java_home:${java_home} +java_utils_classpath:${java_utils_classpath} +makefile:${makefile} +<#list cmdSets as cmds> +Group: + <#list cmds as cmd> + + <#list cmd.getArguments() as arg>${arg} ${cmd.getTarget()} + +<#-- The next line is meant to be invalid! --> + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,18 @@ +#!/bin/sh +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + +python imaker_mock.p $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,19 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +@python imaker_mock.py %* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker_mock.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot/imaker_mock.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,69 @@ +#============================================================================ +#Name : imaler_mock.py +#Part of : Helium + +#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +#All rights reserved. +#This component and the accompanying materials are made available +#under the terms of the License "Eclipse Public License v1.0" +#which accompanies this distribution, and is available +#at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +#Initial Contributors: +#Nokia Corporation - initial contribution. +# +#Contributors: +# +#Description: +#=============================================================================== + +import sys +print "iMaker 09.24.01, 10-Jun-2009." + +if sys.argv.count("version"): + print "" + sys.exit(0) + +if sys.argv.count("help-target-*-list") and sys.argv.count("/epoc32/rom/config/platform/product/image_conf_invalid.mk"): + print """mingw_make: /epoc32/rom/config/platform/product/image_conf_invalid.mk: No such file or directory +mingw_make: *** No rule to make target `/epoc32/rom/config/platform/product/image_conf_invalid.mk'. Stop. +*** Error: Command `\epoc32\tools\rom\imaker\mingw_make.exe -R --no-print-directory SHELL="C:\WINNT\system32\cmd.exe" -I + B:/epoc32/rom/config -f B:/epoc32/tools/rom/imaker/imaker.mk TIMESTAMP=2009102317302243 -f /epoc32/rom/config/platform/ +product/image_conf_invalid.mk -f B:/epoc32/tools/rom/imaker/imaker.mk help-target-*-list' failed in `/'. +""" + sys.exit(1) + +if sys.argv.count("help-config"): + print "Finding available configuration file(s):" + print "/epoc32/rom/config/platform/product/image_conf_product.mk" + print "/epoc32/rom/config/platform/product/image_conf_product_ui.mk" + print "" + sys.exit(0) + +if sys.argv.count("help-target-*-list"): + # start with some kind of warnings... + print "B:/epoc32/tools/rom/imaker/imaker_help.mk:55: memory_map_settings2.hrh: No such file or directory" + print "all" + print "core" + print "core-dir" + print "help-%-list" + print "langpack_01" + print "" + sys.exit(0) + +if sys.argv.count("-f") and sys.argv.count("print-VARIABLE"): + print "VARIABLE = `PRODUCT_VALUE'" + print "" + sys.exit(0) + +if sys.argv.count("print-VARIABLE"): + print "VARIABLE = `VALUE'" + print "" + sys.exit(0) + +if sys.argv.count("print-NOTEXISTSVARIABLE"): + print "" + sys.exit(0) + +print "" +sys.exit(0) \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,18 @@ +#!/bin/sh +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + +python imaker_mock.p $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,19 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +@python imaker_mock.py %* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker_mock.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_invalid/imaker_mock.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,28 @@ +#============================================================================ +#Name : imaler_mock.py +#Part of : Helium + +#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +#All rights reserved. +#This component and the accompanying materials are made available +#under the terms of the License "Eclipse Public License v1.0" +#which accompanies this distribution, and is available +#at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +#Initial Contributors: +#Nokia Corporation - initial contribution. +# +#Contributors: +# +#Description: +#=============================================================================== + +import sys +print "iMaker 09.24.01, 10-Jun-2009." +sys.stderr.write("""mingw_make: /epoc32/rom/config/platform/product/image_conf_invalid.mk: No such file or directory +mingw_make: *** No rule to make target `/epoc32/rom/config/platform/product/image_conf_invalid.mk'. Stop. +*** Error: Command `\epoc32\tools\rom\imaker\mingw_make.exe -R --no-print-directory SHELL="C:\WINNT\system32\cmd.exe" -I + B:/epoc32/rom/config -f B:/epoc32/tools/rom/imaker/imaker.mk TIMESTAMP=2009102317302243 -f /epoc32/rom/config/platform/ +product/image_conf_invalid.mk -f B:/epoc32/tools/rom/imaker/imaker.mk help-target-*-list' failed in `/'. +""") +sys.exit(1) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/emake --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/emake Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,18 @@ +#!/bin/sh +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + +echo $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/emake.cmd --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/emake.cmd Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,21 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +echo %* + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,18 @@ +#!/bin/sh +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + +python imaker_mock.p $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,19 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +@python imaker_mock.py %* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker_mock.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/epocroot_task/imaker_mock.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,167 @@ +#============================================================================ +#Name : imaler_mock.py +#Part of : Helium + +#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +#All rights reserved. +#This component and the accompanying materials are made available +#under the terms of the License "Eclipse Public License v1.0" +#which accompanies this distribution, and is available +#at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +#Initial Contributors: +#Nokia Corporation - initial contribution. +# +#Contributors: +# +#Description: +#=============================================================================== + +import sys +print "iMaker 09.24.01, 10-Jun-2009." + +if sys.argv.count("version"): + print "" + sys.exit(0) + +# two product supported by the mock +if sys.argv.count("help-config"): + print "Finding available configuration file(s):" + print "/epoc32/rom/config/platform/product/image_conf_product.mk" + print "/epoc32/rom/config/platform/product/image_conf_product_ui.mk" + print "" + sys.exit(0) + +# List of targets +if sys.argv.count("help-target-*-list"): + # start with some kind of warnings... + print "all" + print "core" + print "core-dir" + print "help-%-list" + print "langpack_01" + print "" + sys.exit(0) + + + + +def print_log(log): + for line in log: + print line + + +core_log = ["iMaker 09.42.01, 13-Oct-2009.", +"Generating content with ConE", +"* Writing tmp2.oby - result of substitution phase", +"* Writing tmp3.oby - result of reorganisation phase", +"* Writing tmp4.oby - result of Plugin stage", +"* Writing tmp5.oby - result of choosing language-specific files", +"* Writing tmp7.oby - result of problem-suppression phase", +"* Writing tmp8.oby - result of bitmap conversion phase", +"* Removing previous image and logs...", +"* Writing tmp9.oby - result of cleaning phase", +"* Writing NAME_VERSION04_rnd.oby - final OBY file", +"* Writing NAME_VERSION04_rnd.rom.oby - final OBY file", +"* Writing NAME_VERSION04_rnd.dir - ROM directory listing", +"-------------------------------------------------------------------------------", +"Total duration: 01:42 Status: OK", +"===============================================================================", +] + +if sys.argv.count("core"): + print_log(core_log) + print "" + sys.exit(0) + +rof2_log = ["iMaker 09.42.01, 13-Oct-2009.", +"Generating content with ConE", +"Variant target USE_VARIANTBLD = `2'", +"Variant directory VARIANT_DIR = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/temp/cone'", +"Variant config makefile VARIANT_MK = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/temp/cone/language_variant.mk'", +"Variant include directory VARIANT_INCDIR = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/temp/cone/include'", +"Variant SIS directory VARIANT_SISDIR = -", +"Variant operator cache dir VARIANT_OPCDIR = -", +"Variant widget preinst dir VARIANT_WGZDIR = -", +"Variant zip content dir VARIANT_ZIPDIR = -", +"Variant copy content dir VARIANT_CPDIR = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/temp/cone/content'", +"Variant output directory VARIANT_OUTDIR = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/variant'", +"Generating oby(s) for Variant image creation", +"Copying copy content directory", +"Generating Feature manager file(s)", +"Generating file(s) for ROFS2 image creation", +"Generating language files for Language Package image creation", +"Creating ROFS2 SOS image", +"", +"ROM_IMAGE[0] non-xip size=0x00000000 xip=0 compress=0 extension=0 composite=none uncompress=0", +"ROM_IMAGE[1] dummy1 size=0x10000000 xip=0 compress=0 extension=0 composite=none uncompress=0 ", +"ROM_IMAGE[2] rofs2 size=0x10000000 xip=0 compress=0 extension=0 composite=none uncompress=0 ", +"ROM_IMAGE[3] dummy3 size=0x10000000 xip=0 compress=0 extension=0 composite=none uncompress=0 ", +"* Writing tmp2.oby - result of substitution phase", +"* Writing tmp3.oby - result of reorganisation phase", +"* Writing tmp4.oby - result of Plugin stage", +"* Writing tmp5.oby - result of choosing language-specific files", +"Created ecom-2-0.spi", +"Created ecom-2-1.s06", +"Created ecom-2-2.s15", +"Created ecom-2-3.s07", +"Created ecom-2-4.s08", +"Created ecom-2-5.s09", +"Created ecom-2-6.s01", +"* Writing tmp6.oby - result of SPI stage", +"override.pm: ------------------------------------------------------------------", +"Handling overrides...Replace ROM_IMAGE[2] `data=\epoc32\data\Z\Resource\bootdata\languages.txt resource\Bootdata\languages.txt' with `data=I:/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_rofs2_languages.txt resource\Bootdata\languages.txt'", +"Replace ROM_IMAGE[2] `data=\epoc32\data\Z\Resource\versions\lang.txt resource\versions\lang.txt' with `data=I:/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_rofs2_lang.txt resource\versions\lang.txt'", +"Replace ROM_IMAGE[2] `data=\epoc32\data\Z\Resource\versions\langsw.txt resource\versions\langsw.txt' with `data=I:/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_rofs2_langsw.txt resource\versions\langsw.txt'", +"override.pm: Duration: 1 seconds ----------------------------------------------", +"obyparse.pm: ------------------------------------------------------------------", +"Finding include hierarchy from tmp1.oby", +"Found 730 different include files", +"Finding SPI input files from tmp5.oby", +"Found 103 SPI input files", +"Reading UDEB files from /epoc32/rombuild/mytraces.txt", +"Found 0 entries", +"Finding ROM-patched components", +"Found 0 ROM-patched components", +"obyparse.pm: Duration: 2 seconds ----------------------------------------------", +"* Writing tmp7.oby - result of problem-suppression phase", +"* Writing tmp8.oby - result of bitmap conversion phase", +"* Removing previous image and logs...", +"* Writing tmp9.oby - result of cleaning phase", +"* Writing NAME_VERSION04_rnd.oby - final OBY file", +"* Writing NAME_VERSION04_rnd.rofs2.oby - final OBY file", +"* Writing NAME_VERSION04_rnd.dir - ROM directory listing", +"* Executing rofsbuild -slog -loglevel1 NAME_VERSION04_rnd.rofs2.oby", +"The number of processors (4) is used as the number of concurrent jobs.", +"", +"ROFSBUILD - Rofs/Datadrive image builder V2.6.3", +"Copyright (c) 1996-2009 Nokia Corporation.", +"", +"WARNING: Unknown keyword 'OM_IMAGE[0]'. Line 31 ignored", +"WARNING: Unknown keyword '-----------------------------------------------------------'. Line 2464 ignored", +"WARNING: Unknown keyword 'OM_IMAGE[0]'. Line 31 ignored", +"WARNING: Unknown keyword '-----------------------------------------------------------'. Line 2464 ignored", +"* rofsbuild failed", +"", +"*** Error: (S:ROFS2,C:1,B:1,K:0,V:1): Command `buildrom -loglevel1 -v -nosymbols -DFEATUREVARIANT=product -fm=/epoc32/include/s60regionalfeatures.xml -es60ibymacros -elocalise -oNAME_VERSION04_rnd.img I:/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_rofs2_master.oby' failed (1) in `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2'.", +"===============================================================================", +"Target: langpack_01 Duration: 01:40 Status: FAILED", +"ConE output dir = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/temp/cone'", +"ConE log file = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_cone_langpack_01.log'", +"ROFS2 dir = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2'", +"ROFS2 symbols = `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd.rofs2.symbol'", +"ROFS2 flash = `/output/release_flash_images/product/rnd/langpack/langpack_01/NAME_VERSION04_rnd.rofs2.fpsx'", +"-------------------------------------------------------------------------------", +"Total duration: 01:42 Status: FAILED", +"===============================================================================", +] + +if sys.argv.count("langpack_01"): + print_log(rof2_log) + sys.stderr.write("*** Error: (S:ROFS2,C:1,B:1,K:0,V:1): Command `buildrom -loglevel1 -v -nosymbols -DFEATUREVARIANT=product -fm=/epoc32/include/s60regionalfeatures.xml -es60ibymacros -elocalise -oNAME_VERSION04_rnd.img /output/release_flash_images/product/rnd/langpack/langpack_01/rofs2/NAME_VERSION04_rnd_rofs2_master.oby' failed (1) in `/output/release_flash_images/product/rnd/langpack/langpack_01/rofs2'.\n") + print "" + sys.exit(1) + + +print "" +sys.exit(0) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/parallelexecutor_data/linux.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/parallelexecutor_data/linux.txt Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,2 @@ +ls -l +ls -l diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/parallelexecutor_data/windows.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/parallelexecutor_data/windows.txt Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,2 @@ +cmd /c dir +cmd /c dir diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestHelpConfigStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestHelpConfigStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,58 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.tests; + +import java.io.File; + +import org.junit.*; + +import com.nokia.helium.imaker.HelpConfigStreamConsumer; + +import static org.junit.Assert.*; + +/** + * Test the HelpTargetStreamConsumer. + * + */ +public class TestHelpConfigStreamConsumer { + + /** + * Checking if the consumer is parsing correctly the output. + */ + @Test + public void introspectConfiguration() { + HelpConfigStreamConsumer consumer = new HelpConfigStreamConsumer(); + consumer.consumeLine("iMaker 09.24.01, 10-Jun-2009."); + consumer.consumeLine("Finding available configuration file(s):"); + consumer.consumeLine("/epoc32/rom/config/platform/product/image_conf_product.mk"); + consumer.consumeLine("/epoc32/rom/config/platform/product/image_conf_product_ui.mk"); + consumer.consumeLine(""); + + // Verifying string output + String[] expected = new String[2]; + expected[0] = "/epoc32/rom/config/platform/product/image_conf_product.mk"; + expected[1] = "/epoc32/rom/config/platform/product/image_conf_product_ui.mk"; + assertArrayEquals(expected, consumer.getConfigurations().toArray(new String[2])); + + // Verifying the file output + File[] expectedFile = new File[2]; + expectedFile[0] = new File(new File("."), "/epoc32/rom/config/platform/product/image_conf_product.mk"); + expectedFile[1] = new File(new File("."), "/epoc32/rom/config/platform/product/image_conf_product_ui.mk"); + assertArrayEquals(expectedFile, consumer.getConfigurations(new File(".")).toArray(new File[2])); + } +} + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestIMaker.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestIMaker.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,141 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.tests; + +import static org.junit.Assert.*; + +import java.io.File; +import java.util.List; + +import org.junit.Test; + +import com.nokia.helium.imaker.IMaker; +import com.nokia.helium.imaker.IMakerException; + +/** + * Testing IMaker class. + */ +public class TestIMaker { + + private File epocroot = new File(System.getProperty("testdir"), "tests/epocroot"); + + /** + * Test the getVersion is retrieving the output from imaker correctly. + * @throws IMakerException + */ + @Test + public void testGetVersion() throws IMakerException { + String expectedVersion = "iMaker 09.24.01, 10-Jun-2009."; + IMaker imaker = new IMaker(epocroot); + assertEquals(expectedVersion, imaker.getVersion()); + } + + /** + * Test the introspection of an existing variable. + * @throws IMakerException + */ + @Test + public void testGetVariable() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + assertEquals("VALUE", imaker.getVariable("VARIABLE")); + } + + /** + * Test the introspection of an existing variable. + * @throws IMakerException + */ + @Test + public void testGetVariableFromConfiguration() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + assertEquals("PRODUCT_VALUE", imaker.getVariable("VARIABLE", new File("/epoc32/rom/config/platform/product/image_conf_product.mk"))); + } + + /** + * Test the introspection of a non-existing variable. + * @throws IMakerException + */ + @Test + public void testGetNotExistingVariable() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + assertEquals(null, imaker.getVariable("NOTEXISTINGVARIABLE")); + } + + /** + * Test the introspection of existing configurations. + * @throws IMakerException + */ + @Test + public void testGetConfigurations() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + String[] expected = new String[2]; + expected[0] = "/epoc32/rom/config/platform/product/image_conf_product.mk"; + expected[1] = "/epoc32/rom/config/platform/product/image_conf_product_ui.mk"; + assertArrayEquals(expected, imaker.getConfigurations().toArray(new String[2])); + } + + /** + * Test the introspection of existing target for a configuration. + * @throws IMakerException + */ + @Test + public void testGetTargets() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + List targets = imaker.getTargets("/epoc32/rom/config/platform/product/image_conf_product.mk"); + + String[] expected = new String[5]; + expected[0] = "all"; + expected[1] = "core"; + expected[2] = "core-dir"; + expected[3] = "help-%-list"; + expected[4] = "langpack_01"; + assertArrayEquals(expected, targets.toArray(new String[5])); + } + + /** + * Test the introspection of existing target for a configuration using file + * object. + * @throws IMakerException + */ + @Test + public void testGetTargetsFromFile() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + List targets = imaker.getTargets(new File("/epoc32/rom/config/platform/product/image_conf_product.mk")); + + String[] expected = new String[5]; + expected[0] = "all"; + expected[1] = "core"; + expected[2] = "core-dir"; + expected[3] = "help-%-list"; + expected[4] = "langpack_01"; + assertArrayEquals(expected, targets.toArray(new String[5])); + } + + /** + * Test the introspection of existing target for a configuration. + * @throws IMakerException + */ + @Test + public void testGetTargetsWithInvalidProductConf() throws IMakerException { + IMaker imaker = new IMaker(epocroot); + try { + imaker.getTargets("/epoc32/rom/config/platform/product/image_conf_invalid.mk"); + fail("We should catch a failure from iMaker."); + } catch(IMakerException e) { + // Exception should be raised + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestParallelExecutor.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestParallelExecutor.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,53 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.tests; + +import java.io.File; +import org.junit.Test; +import com.nokia.helium.imaker.utils.ParallelExecutor; + +/** + * Basic testing of the ParallelExecutor + * + */ +public class TestParallelExecutor { + private File testdir = new File(System.getProperty("testdir")); + + /** + * Nothing should happen. + */ + @Test + public void executionWithNoArgs() { + String args[] = new String[0]; + ParallelExecutor.main(args); + } + + /** + * Will list current directory content twice. + */ + @Test + public void executionWithTextFile() { + String args[] = new String[1]; + if (System.getProperty("os.name").toLowerCase().startsWith("win")) { + args[0] = new File(testdir, "tests/parallelexecutor_data/windows.txt").getAbsolutePath(); + } else { + args[0] = new File(testdir, "tests/parallelexecutor_data/linux.txt").getAbsolutePath(); + } + ParallelExecutor.main(args); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestPrintVarStreamConsumer.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/imaker/tests/src/com/nokia/helium/imaker/tests/TestPrintVarStreamConsumer.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,46 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.imaker.tests; + +import org.junit.*; + +import com.nokia.helium.imaker.PrintVarSteamConsumer; + +import static org.junit.Assert.*; + +public class TestPrintVarStreamConsumer { + + @Test + public void readSimpleVar() { + PrintVarSteamConsumer consumer = new PrintVarSteamConsumer("WORKDIR"); + consumer.consumeLine("iMaker 09.24.01, 10-Jun-2009."); + consumer.consumeLine("WORKDIR = `/.'"); + consumer.consumeLine(""); + assertEquals(consumer.getValue(), "/."); + } + + @Test + public void readMultilineVar() { + PrintVarSteamConsumer consumer = new PrintVarSteamConsumer("LONGVAR"); + consumer.consumeLine("iMaker 09.24.01, 10-Jun-2009."); + consumer.consumeLine("LONGVAR = `some text"); + consumer.consumeLine("second line"); + consumer.consumeLine("end of text'"); + consumer.consumeLine(""); + assertEquals(consumer.getValue(), "some text\nsecond line\nend of text"); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/lib/dom4j-1.6.1.jar Binary file buildframework/helium/external/helium-antlib/lib/dom4j-1.6.1.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/lib/plexus-utils-1.5.6.jar Binary file buildframework/helium/external/helium-antlib/lib/plexus-utils-1.5.6.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/bld.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/bld.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,60 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) + +REM Configure Ant +if not defined TESTED_ANT_HOME ( +set TESTED_ANT_HOME=C:\Apps\ant_1.7 +) +if exist %TESTED_ANT_HOME% (set ANT_HOME=%TESTED_ANT_HOME%) + +REM Configure the expected Ant Version details below +SET expMajorVer=1 +SET expMinorVer=7 + +rem *** Verify Ant Version *** +rem -- Run the 'ant -version' command and capture the output to a variable +for /f "tokens=*" %%a in ('ant -version') do (set antversion=%%a) +echo *** Installed Version : %antversion% + +rem -- Parse the version string obtained above and get the version number +for /f "tokens=4 delims= " %%a in ("%antversion%") do set val=%%a +rem -- Parse the version number delimiting the '.' and set the major and +rem minor versions +for /f "tokens=1-2 delims=." %%a in ("%val%") do ( +set /A majorVersion=%%a +set /A minorVersion=%%b +) +rem -- Check whether major version is greater than or equal to the expected. +if %majorVersion% geq %expMajorVer% ( +rem -- if major version is valid, check minor version. If minor version is less +rem than expected display message and abort the execution. +if %minorVersion% lss %expMinorVer% (echo *** Incorrect version of Ant found. Please check you have atleast Ant 1.7.0 & goto :errorstop ) +) + +ant %* +endlocal + +:errorstop +@echo *** Build aborted with error +exit /b 1 \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,38 @@ + + + + Helium Antlib logging. + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/helium.antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/helium.antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,28 @@ + + + + + Ant task definition declarations. + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/AntLogRecorderEntry.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/AntLogRecorderEntry.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,408 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import java.io.PrintStream; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildLogger; +import org.apache.tools.ant.DefaultLogger; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.SubBuildListener; +import org.apache.tools.ant.util.StringUtils; +import org.apache.tools.ant.BuildException; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Vector; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * This is a class that represents a recorder. This is the listener to the + * build process. + * + * @since Ant 1.4 + */ +public class AntLogRecorderEntry implements BuildLogger, SubBuildListener { + + ////////////////////////////////////////////////////////////////////// + // ATTRIBUTES + + /** The name of the file associated with this recorder entry. */ + private String filename; + /** The state of the recorder (recorder on or off). */ + private boolean record = true; + /** The current verbosity level to record at. */ + private int loglevel = Project.MSG_INFO; + /** The output PrintStream to record to. */ + private PrintStream out; + /** The start time of the last know target. */ + private long targetStartTime; + /** Strip task banners if true. */ + private boolean emacsMode; + /** project instance the recorder is associated with */ + private Project project; + + private Pattern pattern; + + private Vector logRegExps = new Vector(); + + + ////////////////////////////////////////////////////////////////////// + // CONSTRUCTORS / INITIALIZERS + + /** + * @param name The name of this recorder (used as the filename). + */ + public AntLogRecorderEntry(String name) { + targetStartTime = System.currentTimeMillis(); + filename = name; + } + + ////////////////////////////////////////////////////////////////////// + // ACCESSOR METHODS + + /** + * @return the name of the file the output is sent to. + */ + public String getFilename() { + return filename; + } + + /** + * Turns off or on this recorder. + * + * @param state true for on, false for off, null for no change. + */ + public void setRecordState(Boolean state) { + if (state != null) { + flush(); + record = state.booleanValue(); + } + } + + /** + * To set the regexp to filter the logging. + * @param regexp + */ + public void addRegexp(String regexp) { + logRegExps.add(regexp); + } + + /** + * To clear all regexp set. + */ + public void resetRegExp() { + logRegExps.clear(); + } + + /** + * @see org.apache.tools.ant.BuildListener#buildStarted(BuildEvent) + */ + /** {@inheritDoc}. */ + public void buildStarted(BuildEvent event) { + log("> BUILD STARTED", Project.MSG_DEBUG); + } + + /** + * @see org.apache.tools.ant.BuildListener#buildFinished(BuildEvent) + */ + /** {@inheritDoc}. */ + public void buildFinished(BuildEvent event) { + log("< BUILD FINISHED", Project.MSG_DEBUG); + + if (record && out != null) { + Throwable error = event.getException(); + + if (error == null) { + out.println(StringUtils.LINE_SEP + "BUILD SUCCESSFUL"); + } else { + out.println(StringUtils.LINE_SEP + "BUILD FAILED" + + StringUtils.LINE_SEP); + error.printStackTrace(out); + } + } + cleanup(); + } + + /** + * Cleans up any resources held by this recorder entry at the end + * of a subbuild if it has been created for the subbuild's project + * instance. + * + * @param event the buildFinished event + * + * @since Ant 1.6.2 + */ + public void subBuildFinished(BuildEvent event) { + if (event.getProject() == project) { + cleanup(); + } + } + + /** + * Empty implementation to satisfy the BuildListener interface. + * + * @param event the buildStarted event + * + * @since Ant 1.6.2 + */ + public void subBuildStarted(BuildEvent event) { + } + + /** + * @see org.apache.tools.ant.BuildListener#targetStarted(BuildEvent) + */ + /** {@inheritDoc}. */ + public void targetStarted(BuildEvent event) { + log(">> TARGET STARTED -- " + event.getTarget(), Project.MSG_DEBUG); + log(StringUtils.LINE_SEP + event.getTarget().getName() + ":", + Project.MSG_INFO); + targetStartTime = System.currentTimeMillis(); + } + + /** + * @see org.apache.tools.ant.BuildListener#targetFinished(BuildEvent) + */ + /** {@inheritDoc}. */ + public void targetFinished(BuildEvent event) { + log("<< TARGET FINISHED -- " + event.getTarget(), Project.MSG_DEBUG); + + String time = formatTime(System.currentTimeMillis() - targetStartTime); + + log(event.getTarget() + ": duration " + time, Project.MSG_VERBOSE); + flush(); + } + + /** + * @see org.apache.tools.ant.BuildListener#taskStarted(BuildEvent) + */ + /** {@inheritDoc}. */ + public void taskStarted(BuildEvent event) { + log(">>> TASK STARTED -- " + event.getTask(), Project.MSG_DEBUG); + } + + /** + * @see org.apache.tools.ant.BuildListener#taskFinished(BuildEvent) + */ + /** {@inheritDoc}. */ + public void taskFinished(BuildEvent event) { + log("<<< TASK FINISHED -- " + event.getTask(), Project.MSG_DEBUG); + flush(); + } + + /** + * @see org.apache.tools.ant.BuildListener#messageLogged(BuildEvent) + */ + /** {@inheritDoc}. */ + public void messageLogged(BuildEvent event) { + log("--- MESSAGE LOGGED", Project.MSG_DEBUG); + + StringBuffer buf = new StringBuffer(); + + if (event.getTask() != null) { + String name = event.getTask().getTaskName(); + + if (!emacsMode) { + String label = "[" + name + "] "; + int size = DefaultLogger.LEFT_COLUMN_SIZE - label.length(); + + for (int i = 0; i < size; i++) { + buf.append(" "); + } + buf.append(label); + } + } + String messgeToUpdate = filterMessage(event.getMessage()); + buf.append(messgeToUpdate); + log(buf.toString(), event.getPriority()); + } + + + /** + * To replace regExp matching with ****. + * @param message + * @return + */ + private String filterMessage(String message) { + for (String regExp : logRegExps) { + pattern = Pattern.compile(regExp); + if (pattern != null) { + Matcher match = pattern.matcher(message); + message = match.replaceAll("********"); + } + } + return message; + } + + + /** + * The thing that actually sends the information to the output. + * + * @param mesg The message to log. + * @param level The verbosity level of the message. + */ + private void log(String mesg, int level) { + if (record && (level <= loglevel) && out != null) { + out.println(mesg); + } + } + + private void flush() { + if (record && out != null) { + out.flush(); + } + } + + /** + * @see BuildLogger#setMessageOutputLevel(int) + */ + /** {@inheritDoc}. */ + public void setMessageOutputLevel(int level) { + if (level >= Project.MSG_ERR && level <= Project.MSG_DEBUG) { + loglevel = level; + } + } + + /** + * @see BuildLogger#setOutputPrintStream(PrintStream) + */ + /** {@inheritDoc}. */ + public void setOutputPrintStream(PrintStream output) { + closeFile(); + out = output; + } + + + /** + * @see BuildLogger#setEmacsMode(boolean) + */ + /** {@inheritDoc}. */ + public void setEmacsMode(boolean emacsMode) { + this.emacsMode = emacsMode; + } + + + /** + * @see BuildLogger#setErrorPrintStream(PrintStream) + */ + /** {@inheritDoc}. */ + public void setErrorPrintStream(PrintStream err) { + setOutputPrintStream(err); + } + + + private static String formatTime(long millis) { + // CheckStyle:MagicNumber OFF + long seconds = millis / 1000; + long minutes = seconds / 60; + + + if (minutes > 0) { + return Long.toString(minutes) + " minute" + + (minutes == 1 ? " " : "s ") + + Long.toString(seconds % 60) + " second" + + (seconds % 60 == 1 ? "" : "s"); + } else { + return Long.toString(seconds) + " second" + + (seconds % 60 == 1 ? "" : "s"); + } + // CheckStyle:MagicNumber ON + } + + /** + * Set the project associated with this recorder entry. + * + * @param project the project instance + * + * @since 1.6.2 + */ + public void setProject(Project project) { + this.project = project; + if (project != null) { + project.addBuildListener(this); + } + } + + /** + * @since 1.6.2 + */ + public void cleanup() { + closeFile(); + if (project != null) { + project.removeBuildListener(this); + } + project = null; + } + + /** + * Closes the file associated with this recorder. + * Used by Recorder. + * @since 1.6.3 + */ + public void closeFile() { + if (out != null) { + out.close(); + out = null; + } + } + + /** + * Initially opens the file associated with this recorder. + * Used by Recorder. + * @param append Indicates if output must be appended to the logfile or that + * the logfile should be overwritten. + * @throws BuildException + * @since 1.6.3 + */ + public void openFile(boolean append) { + openFileImpl(append); + } + + /** + * Re-opens the file associated with this recorder. + * Used by Recorder. + * @throws BuildException + * @since 1.6.3 + */ + public void reopenFile() { + openFileImpl(true); + } + + private void openFileImpl(boolean append) { + if (out == null) { + try { + out = new PrintStream(new FileOutputStream(filename, append)); + } catch (IOException ioe) { + throw new BuildException("Problems opening file using a " + + "recorder entry", ioe); + } + } + } + + /** + * To add user message into log file. + * @param message + */ + public void addLogMessage(String message) { + out.println(StringUtils.LINE_SEP + message); + + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/AntLoggingHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/AntLoggingHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,581 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import java.io.File; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Enumeration; +import java.util.Hashtable; +import java.util.HashMap; +import java.util.Map; +import java.util.Vector; +import java.util.Map.Entry; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Target; +import org.apache.tools.ant.Task; +import org.apache.tools.ant.types.LogLevel; +import org.apache.tools.ant.BuildException; +import org.apache.log4j.Logger; + +import com.nokia.helium.logger.ant.types.*; + + + /** + * Ant logging class for each Stage. + * + * + */ +public class AntLoggingHandler implements Handler { + + private static Hashtable recorderEntries = new Hashtable(); + private Boolean append ; + private int loglevel = -1; + private VerbosityLevelChoices antLogLevel; + private Map stagesMapping; + private Map stageRecordMap; + private Map defaultRecordMap; + private Map depStartTargetMap; + private boolean isStagesIntialized; + private boolean isRecordingStarted; + private AntLogRecorderEntry recorderEntry; + private boolean isRecorderEntryRegistered; + private boolean isDefaultRecorderEntryRegistered; + private Logger log = Logger.getLogger(AntLoggingHandler.class); + private boolean isInitDepStartTarget; + private String currentStageName; + + + public AntLoggingHandler() { + antLogLevel = new VerbosityLevelChoices(); + stagesMapping = new HashMap(); + stageRecordMap = new HashMap(); + defaultRecordMap = new HashMap(); + depStartTargetMap = new HashMap(); + } + + /** + * {@inheritDoc} + */ + public void handleTargetFinished(BuildEvent event) { + String stageName = getStopStageName (event.getTarget().getName()); + if (stageName != null && getIsRecordingStarted()) { + stopStageAntLog(stageName); + startDefaultAntLog(); + + } + log.debug("Finishing target [" + event.getTarget().getName() + "]"); + } + + /** + * {@inheritDoc} + */ + public void handleTargetStarted(BuildEvent event) { + + if (!isDefaultRecorderEntryRegistered ) { + log.debug("Intializing deafult recorder information and registering the recorder"); + initDefaultAntLogStage(event); + registerDefaultRecorderEntry(); + } + + if (!isStagesIntialized()) { + log.debug("Intializing stages information"); + getStagesInformation(event); + } + + if (!isInitDepStartTarget() && isStagesIntialized()) { + log.debug("Intializing dependent targets stage information."); + initDepStartTarget(event); + } + + if (!isRecorderEntryRegistered && isStagesIntialized()) { + log.debug("Registering recorder entries."); + registerRecorderEntry(event); + } + + log.debug("Starting target [" + event.getTarget().getName() + "]"); + String stageName = getStartStageName (event.getTarget().getName()); + if (stageName != null && !getIsRecordingStarted()) { + stopDefaultAntLog(stageName); + startStageAntLog(stageName); + } + + } + + /** + * {@inheritDoc} + */ + public void handleBuildStarted(BuildEvent event) { + + } + + /** + * {@inheritDoc} + */ + public void handleBuildFinished(BuildEvent event) { + String time = getDateTime(); + StageLogging stageLogging = defaultRecordMap.get("default"); + if (stageLogging != null) { + // this case might happen if the config has not been loaded because + // of an error happening before the start of the build. + File logFile = new File(stageLogging.getDefaultOutput()); + if (logFile.exists()) { + recorderEntry = getRecorder(stageLogging.getDefaultOutput(), StatusAndLogListener.getProject()); + recorderEntry.addLogMessage("Stopping main Ant logging at " + time + " into " + stageLogging.getDefaultOutput()); + recorderEntry.setRecordState(false); + } + } else { + log.debug("Could not find default recorder configuration."); + } + this.cleanup(); + } + + /** + * Stage ant logging for respective stage. + * @param stageName + */ + public void startStageAntLog(String stageName) { + String time = getDateTime(); + log.debug("Starting stagerecorder for stage [" + stageName + "]"); + StageLogging startStageLogging = stageRecordMap.get(stageName); + File logFile = new File(startStageLogging.getOutput()); + if (logFile.exists()) { + recorderEntry = getRecorder(startStageLogging.getOutput(), StatusAndLogListener.getProject()); + recorderEntry.setRecordState(true); + recorderEntry.addLogMessage("Starting logging for stage \"" + stageName + "\" into " + startStageLogging.getOutput() + " at " + time ); + this.isRecordingStarted = true; + this.currentStageName = stageName; + } + } + + /** + * Stop ant logging for respective stage. + * @param stageName + */ + + public void stopStageAntLog( String stageName) { + String time = getDateTime(); + log.debug("Stopping stagerecorder for stage [" + stageName + "]"); + StageLogging stopStageLogging = stageRecordMap.get(stageName); + StageLogging defaultStageLogging = defaultRecordMap.get("default"); + recorderEntry.addLogMessage("Stopping logging for stage \"" + stageName + "\" into " + stopStageLogging.getOutput() + " at " + time); + recorderEntry.addLogMessage("Starting logging into " + defaultStageLogging.getDefaultOutput()); + recorderEntry = getRecorder(stopStageLogging.getOutput(), StatusAndLogListener.getProject()); + recorderEntry.setRecordState(false); + this.isRecordingStarted = false; + this.currentStageName = null; + } + + /** + * Returns recorder entry for logging current build process. + * @param stageLogging + * @param proj + * @return + */ + @SuppressWarnings("unchecked") + protected AntLogRecorderEntry getRecorder(String name, Project proj) { + + Object o = recorderEntries.get(name); + AntLogRecorderEntry entry; + if (o == null) { + // create a recorder entry + entry = new AntLogRecorderEntry(name); + if (append == null) { + entry.openFile(false); + } else { + entry.openFile(append.booleanValue()); + } + entry.setProject(proj); + recorderEntries.put(name, entry); + } else { + entry = (AntLogRecorderEntry) o; + } + return entry; + } + + /** + * Whether or not the logger should append to a previous file. + * @param append if true, append to a previous file. + */ + public void setAppend(boolean append) { + this.append = append ; + } + + + /** + * Sets the level to which this recorder entry should log to. + * @param level the level to set. + * @see VerbosityLevelChoices + */ + public void setLoglevel(VerbosityLevelChoices level) { + loglevel = level.getLevel(); + } + + /** + * A list of possible values for the setLoglevel() method. + * Possible values include: error, warn, info, verbose, debug. + */ + public static class VerbosityLevelChoices extends LogLevel { + } + + /** + * To get the stage information. + * @param event + */ + @SuppressWarnings("unchecked") + private void getStagesInformation(BuildEvent event) { + this.isStagesIntialized = true; + Hashtable references = event.getProject().getReferences(); + Enumeration keyEnum = references.keys(); + while (keyEnum.hasMoreElements()) { + String key = keyEnum.nextElement(); + if (references.get(key) instanceof Stage) { + Stage tempStage = (Stage)references.get(key); + log.debug("Found stage [" + key + "] for recording"); + if (validateStageTargets(event, tempStage.getStartTarget(), tempStage.getEndTarget() )) { + log.debug("Start and end targets are valid for stage [" + key + "]"); + stagesMapping.put(key, (Stage)references.get(key)); + getStageRecordInformation(event, key, tempStage.getStartTarget(), tempStage.getEndTarget()); + } + } + } + } + + /** + * Checks, is stages are initialized. + * @return + */ + private boolean isStagesIntialized() { + return this.isStagesIntialized; + } + + /** + * To initialize stage record mapping. + * @param event + */ + @SuppressWarnings("unchecked") + private void getStageRecordInformation(BuildEvent event, String stageKey, String startTarget, String endTarget) { + Hashtable references = event.getProject().getReferences(); + Enumeration keyEnum = references.keys(); + while (keyEnum.hasMoreElements()) { + String key = keyEnum.nextElement(); + if (references.get(key) instanceof StageLogging) { + StageLogging tempStageLogging = (StageLogging)references.get(key); + + if (tempStageLogging.getStageRefID() == null && tempStageLogging.getDefaultOutput() == null ) { + throw new BuildException("stagefefid attribute should be specified for stagerecord [" + key + "]"); + } + if (tempStageLogging.getStageRefID() != null ) { + if (tempStageLogging.getStageRefID().equalsIgnoreCase(stageKey) && tempStageLogging.getDefaultOutput() == null) { + log.debug("stagerecord reference for stage [" + stageKey + "] is [" + tempStageLogging.getStageRefID() + "]"); + if (tempStageLogging.getOutput() == null) { + throw new BuildException("output attribute should be specified for stagerecord [" + key + "]"); + } + stageRecordMap.put(stageKey, tempStageLogging); + } + } + } + } + } + + /** + * First Validate is the default output has been set. + * @param event + */ + @SuppressWarnings("unchecked") + private void initDefaultAntLogStage(BuildEvent event) { + Hashtable references = event.getProject().getReferences(); + Enumeration keyEnum = references.keys(); + while (keyEnum.hasMoreElements()) { + String key = keyEnum.nextElement(); + if (references.get(key) instanceof StageLogging) { + StageLogging tempStageLogging = (StageLogging)references.get(key); + + if (tempStageLogging.getStageRefID() == null && tempStageLogging.getDefaultOutput() != null ) { + defaultRecordMap.put("default", tempStageLogging); + } + } + } + } + + /** + * To check is the stage start and end targets present in the sequence. + * @param event + * @param startTarget + * @param endTarget + * @return + */ + @SuppressWarnings("unchecked") + private boolean validateStageTargets(BuildEvent event, String startTarget, String endTarget) { + + Hashtable antTargets = event.getProject().getTargets(); + return antTargets.containsKey(startTarget) && antTargets.containsKey(endTarget); + } + + /** + * Return mapped stage name to start record. + * @param targetName + * @return + */ + + private String getStartStageName(String targetName) { + + for (Map.Entry entry : stagesMapping.entrySet() ) { + Stage stage = entry.getValue(); + if ( stage.getStartTarget().equalsIgnoreCase(targetName)) { + log.debug("stageName name for target [" + targetName + "] is [" + entry.getKey() + "]"); + return entry.getKey(); + } + for (Map.Entry depEntry : depStartTargetMap.entrySet() ) { + Stage depStage = depEntry.getKey(); + if ((depStage.getStartTarget().equalsIgnoreCase(stage.getStartTarget())) && (depEntry.getValue().getName().equalsIgnoreCase(targetName))) { + log.debug("stageName name for depending target [" + depStage.getStartTarget() + "] is [" + entry.getKey() + "]"); + return entry.getKey(); + } + } + } + return null; + } + + /** + * Return mapped stage name to stop record. + * @param targetName + * @return + */ + private String getStopStageName(String targetName) { + + for (Map.Entry entry : stagesMapping.entrySet() ) { + Stage stage = entry.getValue(); + if ( stage.getEndTarget().equalsIgnoreCase(targetName)) { + log.debug("stageName name for end target [" + targetName + "] is [" + entry.getKey() + "]"); + return entry.getKey(); + } + } + return null; + } + + /** + * To check is recording started. + * @return + */ + private boolean getIsRecordingStarted() { + return this.isRecordingStarted; + } + + /** + * To register recorder entries. + * @param event + */ + @SuppressWarnings("unchecked") + private void registerRecorderEntry(BuildEvent event) { + /* Later register stages recorder entries */ + for (Map.Entry entry : stageRecordMap.entrySet()) { + StageLogging stageLogging = entry.getValue(); + File logFile = new File(stageLogging.getOutput()); + if (!logFile.getParentFile().exists()) { + logFile.getParentFile().mkdirs(); + } + if (logFile.getParentFile().exists()) { + log.debug("Registering recorderentry for log file [" + stageLogging.getOutput() + "]"); + this.setAppend(stageLogging.getAppend().booleanValue()); + recorderEntry = getRecorder(stageLogging.getOutput(), StatusAndLogListener.getProject()); + antLogLevel.setValue(stageLogging.getLogLevel()); + this.setLoglevel(antLogLevel); + recorderEntry.setMessageOutputLevel(loglevel); + recorderEntry.setEmacsMode(false); + recorderEntry.setRecordState(false); + isRecorderEntryRegistered = true; + } + } + } + + /** + * To register default recorder entry. + */ + @SuppressWarnings("unchecked") + private void registerDefaultRecorderEntry() { + + /* First register default recorder */ + if (defaultRecordMap.size() == 0) { + throw new BuildException("There is no stagerecord type with defaultoutput attribute set. please set..."); + } + StageLogging stageLogging = defaultRecordMap.get("default"); + File logFile = new File(stageLogging.getDefaultOutput()); + if (!logFile.getParentFile().exists()) { + logFile.getParentFile().mkdirs(); + } + if (logFile.getParentFile().exists()) { + log.debug("Registering recorderentry for log file [" + stageLogging.getDefaultOutput() + "]"); + this.setAppend(stageLogging.getAppend().booleanValue()); + recorderEntry = getRecorder(stageLogging.getDefaultOutput(), StatusAndLogListener.getProject()); + antLogLevel.setValue(stageLogging.getLogLevel()); + this.setLoglevel(antLogLevel); + recorderEntry.setMessageOutputLevel(loglevel); + recorderEntry.setEmacsMode(false); + recorderEntry.setRecordState(true); + String time = getDateTime(); + recorderEntry.addLogMessage("Starting main Ant logging at " + time + " into " + stageLogging.getDefaultOutput()); + isDefaultRecorderEntryRegistered = true; + } + + } + + + + /** + * To clean recorder entries. + */ + private void cleanup() { + log.debug("Cleaning up recorder entries of stagerecord"); + StatusAndLogListener.getProject().removeBuildListener(recorderEntry); + recorderEntries.clear(); + + } + + /** + * To check is the dependent start target map is initialized. + * @return + */ + private boolean isInitDepStartTarget() { + return isInitDepStartTarget; + } + + /** + * Initialize the dependent start targets mapping. + * @param event + */ + @SuppressWarnings("unchecked") + private void initDepStartTarget(BuildEvent event) { + Vector arrayList = null; + isInitDepStartTarget = true; + for (Map.Entry entry : stagesMapping.entrySet() ) { + Stage stage = entry.getValue(); + arrayList = event.getProject().topoSort(stage.getStartTarget(), event.getProject().getTargets(), false); + if (arrayList != null && arrayList.size() > 1) { + depStartTargetMap.put(stage, arrayList.firstElement()); + } + } + } + + /** + * To get current date and time. + * @return + */ + private String getDateTime() { + DateFormat dateFormat = new SimpleDateFormat("EE yyyy/MM/dd HH:mm:ss:SS aaa"); + Date date = new Date(); + return dateFormat.format(date); + } + + /** + * To stop default ant logging. + */ + private void stopDefaultAntLog(String stageName) { + String time = getDateTime(); + StageLogging defaultStageLogging = defaultRecordMap.get("default"); + StageLogging stageLogging = stageRecordMap.get(stageName); + File logFile = new File(defaultStageLogging.getDefaultOutput()); + if (logFile.exists()) { + recorderEntry = getRecorder(defaultStageLogging.getDefaultOutput(), StatusAndLogListener.getProject()); + recorderEntry.addLogMessage("Stopping logging into " + defaultStageLogging.getDefaultOutput() + " and starting logging for stage \"" + stageName + "\" at " + time); + recorderEntry.addLogMessage("Starting logging into " + stageLogging.getOutput()); + recorderEntry.setRecordState(false); + } + } + + /** + * To start deafult ant logging. + */ + + private void startDefaultAntLog() { + String time = getDateTime(); + StageLogging stageLogging = defaultRecordMap.get("default"); + recorderEntry = getRecorder(stageLogging.getDefaultOutput(), StatusAndLogListener.getProject()); + recorderEntry.addLogMessage("Resuming logging into " + stageLogging.getDefaultOutput() + " at " + time ); + recorderEntry.setRecordState(true); + } + + /** + * To get the current stage running. + * @return + */ + public String getCurrentStageName() { + + return this.currentStageName; + } + + /** + * To do the logging actions depending on hlm:record actions. + * @param stageName + * @param action + * @param message + */ + public void doLoggingAction(String stageName, boolean action, String message, Task task) { + String time = getDateTime(); + StageLogging stageLogging = null; + String fileName; + if (stageName.equalsIgnoreCase("default")) { + stageLogging = defaultRecordMap.get(stageName); + fileName = stageLogging.getDefaultOutput(); + } else { + stageLogging = stageRecordMap.get(stageName); + fileName = stageLogging.getOutput(); + } + File logFile = new File(fileName); + if (logFile.exists()) { + recorderEntry = getRecorder(fileName, StatusAndLogListener.getProject()); + recorderEntry.addLogMessage(message + " logging into " + fileName + " from " + task.getTaskName() + " task at " + time); + recorderEntry.setRecordState(action); + } + } + + /** + * Called by LogReplace task to find and replace any property values which are not updated. + * @param regExp + */ + @SuppressWarnings("unchecked") + public void addRegExp (String regExp) { + + if (!regExp.equals("")) { + + for (Map.Entry entry : defaultRecordMap.entrySet() ) { + StageLogging stageLogging = entry.getValue(); + File logFile = new File(stageLogging.getDefaultOutput()); + if (logFile.exists()) { + AntLogRecorderEntry recorderEntry = getRecorder(stageLogging.getDefaultOutput(), StatusAndLogListener.getProject()); + recorderEntry.addRegexp(regExp); + } + } + + for (Map.Entry entry : stageRecordMap.entrySet() ) { + StageLogging stageLogging = entry.getValue(); + File logFile = new File(stageLogging.getOutput()); + if (logFile.exists()) { + AntLogRecorderEntry recorderEntry = getRecorder(stageLogging.getOutput(), StatusAndLogListener.getProject()); + recorderEntry.addRegexp(regExp); + } + } + + } + } + +} + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/BuildEventHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/BuildEventHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,43 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import org.apache.tools.ant.BuildEvent; + +/** + * Handler is an interface which is used to handle the build events + * which are of importance for ant logging and build stage summary display. + * + * + */ +public interface BuildEventHandler { + + /** + * Method to handle build start events. + * + * @param event is the build event to be handled. + */ + void handleBuildStarted( BuildEvent event ); + + /** + * Method to handle build finish events. + * + * @param event is the build event to be handled. + */ + void handleBuildFinished( BuildEvent event ); + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/BuildStatusReport.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/BuildStatusReport.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ +package com.nokia.helium.logger.ant.listener; + +import static com.nokia.helium.logger.ant.listener.StageSummaryHandler.FAILED; +import static com.nokia.helium.logger.ant.listener.StageSummaryHandler.PASSED; + +/** + * BuildStatusReport is a simple java bean used to hold information pertaining + * to various build stages. + * + */ +public class BuildStatusReport { + + private String phaseName; + private String startTime; + private String duration; + private String reason; + private String status; + + /** + * Create an instance of {@link BuildStatusReport}. + * + * @param phaseName is the name of the Phase + * @param startTime is the start time of the Phase + * @param duration is the duration of the Phase + * @param reason is the cause of build failure, if any + */ + public BuildStatusReport(String phaseName, String startTime, + String duration, String reason) { + this.phaseName = phaseName; + this.startTime = startTime; + this.duration = duration; + this.reason = (reason != null && !reason.isEmpty()) ? reason : "N/A"; + this.status = (reason != null && !reason.isEmpty()) ? FAILED : PASSED; + } + + /** + * Get the Build Phase name. + * + * @return the Build Phase name. + */ + public String getPhaseName() { + return phaseName; + } + + /** + * Set the Build Phase name. + * + * @param phaseName is the phase name to set. + */ + public void setPhaseName(String phaseName) { + this.phaseName = phaseName; + } + + /** + * Get the start time of this Phase. + * + * @return the start time of this Phase. + */ + public String getStartTime() { + return startTime; + } + + /** + * Set the start time of this Phase. + * + * @param startTime is the start time to set. + */ + public void setStartTime(String startTime) { + this.startTime = startTime; + } + + /** + * Get the duration of this Phase. + * + * @return the duration of this Phase. + */ + public String getDuration() { + return duration; + } + + /** + * Set the duration of this Phase. + * + * @param duration the duration to set. + */ + public void setDuration(String duration) { + this.duration = duration; + } + + /** + * Get the reason for build failure. + * + * @return the reason for build failure. + */ + public String getReason() { + return reason; + } + + /** + * Set the reason for build failure. + * + * @param reason is the reason for build failure. + */ + public void setReason(String reason) { + this.reason = reason; + } + + /** + * Get the build status. + * + * @return the build status. + */ + public String getStatus() { + return status; + } + + /** + * Set the build status. + * + * @param status is the build status to set. + */ + public void setStatus(String status) { + this.status = status; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/Handler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/Handler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,30 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + + +/** + * Handler is an interface which is used to handle the build events + * which are of importance for ant logging and build stage summary display. + * + * + */ +public interface Handler extends BuildEventHandler, TargetEventHandler { + + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/StageSummaryHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/StageSummaryHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,447 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ +package com.nokia.helium.logger.ant.listener; + +import java.io.File; +import java.io.StringWriter; +import java.text.DateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; +import java.util.TreeMap; +import java.util.Vector; + +import org.apache.log4j.Logger; +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Target; +import org.apache.tools.ant.util.DateUtils; + +import com.nokia.helium.logger.ant.types.Stage; +import com.nokia.helium.logger.ant.types.StageSummary; + +import freemarker.cache.FileTemplateLoader; +import freemarker.template.Configuration; +import freemarker.template.Template; + +/** + * StageStatusHandler is the handler class responsible for + * displaying the summary of the various configured build stages at the end of + * build process. + * + */ +public class StageSummaryHandler implements Handler { + + public static final String PASSED = "PASSED"; + public static final String FAILED = "FAILED"; + + private Logger log = Logger.getLogger(getClass()); + private boolean displaySummary; + private boolean lookup4Stages; + private boolean summarize; + + private List statusReports; + private HashSet completedStages; + private Hashtable currentStages; + private Hashtable currentStagesStartTime; + private Hashtable stages; + private String template; + + /** + * Create an instance of {@link StageSummaryHandler} + * + */ + public StageSummaryHandler() { + this.statusReports = new ArrayList(); + this.completedStages = new HashSet(); + this.currentStages = new Hashtable(); + this.currentStagesStartTime = new Hashtable(); + log.debug("StageStatusHandler instantiated"); + } + + /** + * {@inheritDoc} + */ + public void handleBuildStarted(BuildEvent event) { + + } + + /** + * {@inheritDoc} + */ + public void handleBuildFinished(BuildEvent event) { + if (summarize && !currentStages.isEmpty()) { + Long currTime = getCurrentTime(); + String reason = getReason(event.getException()); + Map tempStages = new Hashtable( + currentStages); + for (String stageName : tempStages.keySet()) { + endCurrentStage(stageName, tempStages.get(stageName), reason, + currTime); + } + + } + if (summarize && displaySummary) { + generateSummary(event.getProject()); + displaySummary = false; + log.debug("Stage Summary generation completed"); + } + } + + /** + * {@inheritDoc} + */ + public void handleTargetStarted(BuildEvent event) { + Project project = event.getProject(); + if (!summarize) { + StageSummary stageSummary = getStageSummary(project); + summarize = stageSummary != null + && !stageSummary.getTemplate().trim().isEmpty(); + lookup4Stages = summarize; + template = stageSummary.getTemplate(); + log.debug("Is Project configured to display Stage Summary ? " + + summarize); + } + + if (lookup4Stages) { + log.debug("Loading stages...."); + parseStages(event.getProject()); + log.debug("Total no of stages loaded = " + stages.size()); + lookup4Stages = false; + } + + log.debug("Handling target - " + event.getTarget().getName()); + if (summarize) { + Long currTime = getCurrentTime(); + TreeMap result = searchNewStage(event); + if (result != null && result.size() == 1) { + String stageName = result.firstKey(); + Stage stage = result.get(stageName); + startNewStage(stageName, stage, currTime); + } + } + } + + /** + * {@inheritDoc} + */ + public void handleTargetFinished(BuildEvent event) { + String currentTarget = event.getTarget().getName(); + Long currTime = getCurrentTime(); + String reason = getReason(event.getException()); + if (summarize && !currentStages.isEmpty()) { + TreeMap result = getCurrentStageToEnd(currentTarget); + if (!result.isEmpty()) { + String stageName = result.firstKey(); + Stage stage = result.get(stageName); + endCurrentStage(stageName, stage, reason, currTime); + } + } + } + + private TreeMap getCurrentStageToEnd(String target) { + TreeMap result = new TreeMap(); + for (String stageName : currentStages.keySet()) { + Stage stage = currentStages.get(stageName); + if (stage.isEndTarget(target)) { + result.put(stageName, stage); + break; + } + } + return result; + } + + /** + * Method returns the configured {@link StageSummary}. + * + * @param project + * is the project to lookup for stageSummary. + * @return the {@link StageSummary}. + */ + @SuppressWarnings("unchecked") + private StageSummary getStageSummary(Project project) { + StageSummary stageSummary = null; + int count = 0; + Hashtable references = project.getReferences(); + for (Enumeration en = references.keys(); en.hasMoreElements();) { + Object object = references.get(en.nextElement()); + if (object instanceof StageSummary) { + count++; + if (count > 1) { + raiseException("Multiple entries of 'hlm:stagesummary' found in " + + "stages_config.ant.xml."); + } + stageSummary = (StageSummary) object; + } + } + return stageSummary; + } + + /** + * Raise a {@link BuildException} with the specified error message. + * + * @param message + * is the error message to display. + */ + private void raiseException(String message) { + throw new BuildException(message); + } + + /** + * Start the given stage as a new build stage. + * + * @param stageName + * is the name of the new stage. + * @param newStage + * is the build stage to start as new. + * @param startTime + * is the start time of the given build stage. + */ + private void startNewStage(String stageName, Stage newStage, Long startTime) { + if (!currentStages.containsKey(stageName)) { + this.currentStages.put(stageName, newStage); + this.currentStagesStartTime.put(stageName, startTime); + log.debug("New stage [" + stageName + "] started at " + + getTimestamp(startTime)); + } + } + + /** + * End the current stage. + * + * @param reason + * is the reason for build failure if any. + * @param currTime + * is the end time of the current stage. + */ + private void endCurrentStage(String currentStageName, Stage currentStage, + String reason, Long currTime) { + if (currentStage != null) { + BuildStatusReport report = constructBuildStatusReport( + currentStageName, currentStagesStartTime + .get(currentStageName), currTime, reason); + statusReports.add(report); + displaySummary = true; + log.debug("Stage [" + currentStageName + "] finished at " + + getTimestamp(currTime)); + reset(currentStageName); + } + } + + /** + * Reset the build stage variables to default. + */ + private void reset(String stageName) { + this.currentStages.remove(stageName); + this.currentStagesStartTime.remove(stageName); + this.completedStages.add(stageName); + } + + /** + * Search for the new Stage based on the given build event. + * + * @param event + * is the build event fired. + * @return a map with Stage Name and stage, if the build event marks the + * start of a configured Stage. + */ + private TreeMap searchNewStage(BuildEvent event) { + TreeMap result = new TreeMap(); + String target = event.getTarget().getName(); + for (String stageName : stages.keySet()) { + Stage stage = stages.get(stageName); + if (!completedStages.contains(stageName) + && isStartingTarget(target, event.getProject(), stage)) { + result.put(stageName, stage); + break; + } + } + return result; + } + + /** + * Return whether the given target is a starting target of the given stage. + * + * @param targetName + * is the target to check. + * @param project + * is the project to lookup for target + * @param stage + * is the stage to check. + * @return + */ + @SuppressWarnings("unchecked") + private boolean isStartingTarget(String targetName, Project project, + Stage stage) { + boolean bool = false; + if (project.getTargets().containsKey(stage.getStartTarget())) { + Vector dependencies = project.topoSort(stage + .getStartTarget(), project.getTargets(), false); + if (!dependencies.isEmpty()) { + Target target = dependencies.firstElement(); + bool = target.getName().equals(targetName); + } + } + return bool; + } + + /** + * Parse and cache the stages configured. + * + * @param project + * is the project to lookup for stages. + */ + @SuppressWarnings("unchecked") + private void parseStages(Project project) { + stages = new Hashtable(); + Hashtable references = project.getReferences(); + for (Enumeration en = references.keys(); en.hasMoreElements();) { + String key = en.nextElement(); + Object value = references.get(key); + if (value instanceof Stage) { + stages.put(key, (Stage) value); + } + } + } + + /** + * Return the reason for build failure in String format. + * + * @param th + * is the cause of build failure if any. + * @return String representing the build failure. + */ + private String getReason(Throwable th) { + return (th != null) ? th.getMessage() : ""; + } + + /** + * Return the current time in milliseconds. + * + * @return the current time in milliseconds. + */ + private Long getCurrentTime() { + return System.currentTimeMillis(); + } + + /** + * Generate build summary. + * + */ + private void generateSummary(Project project) { + if (template != null) { + try { + Configuration cfg = new Configuration(); + // get base dir and template name + StringTokenizer tokenizer = new StringTokenizer(template, + File.separator); + StringBuffer baseDirBuf = new StringBuffer(); + String templateName = null; + while (tokenizer.hasMoreElements()) { + String str = (String) tokenizer.nextElement(); + if (str.endsWith(".ftl")) { + templateName = str; + } else { + baseDirBuf.append(str).append(File.separator); + } + } + File baseDir = new File(baseDirBuf.toString()); + cfg.setTemplateLoader(new FileTemplateLoader(baseDir)); + Template templ = cfg.getTemplate(templateName); + StringWriter writer = new StringWriter(); + templ.process(getTemplateData(), writer); + project.log(writer.toString()); + } catch (freemarker.core.InvalidReferenceException ivx) { + project.log("Invalid reference in config: ", ivx, + Project.MSG_WARN); + } catch (freemarker.template.TemplateException e2) { + project.log("TemplateException: ", e2, Project.MSG_WARN); + } catch (java.io.IOException e) { + project.log("I/O Error during template conversion: ", e, + Project.MSG_WARN); + } + } + } + + /** + * Return the data-model to be merged with the template. + * + * @return a Map representing template data-model. + */ + private Map getTemplateData() { + Map templateMap = new HashMap(); + templateMap.put("statusReports", new ArrayList( + statusReports)); + return templateMap; + } + + /** + * Get the given date as String format. + * + * @param date + * is the date to be formatted as String. + * @return given date formated as String + */ + private String getTimestamp(long date) { + Date dt = new Date(date); + DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, + DateFormat.SHORT); + String finishTime = formatter.format(dt); + return finishTime; + } + + /** + * Get the time duration for the given start and end times in String format. + * + * @param startTime + * is the start time. + * @param endTime + * is the end time. + * @return + */ + private String getTimeElapsed(Long startTime, Long endTime) { + long timeElapsed = endTime - startTime; + return DateUtils.formatElapsedTime(timeElapsed); + } + + /** + * Construct an instance of {@link BuildStatusReport} with the given + * details. + * + * @param phaseName + * is the name of the Phase. + * @param startTime + * is the start time of the given Phase + * @param endTime + * is the end time of given phase + * @param reason + * is the cause of failure + * @return + */ + private BuildStatusReport constructBuildStatusReport(String phaseName, + Long startTime, Long endTime, String reason) { + return new BuildStatusReport(phaseName, getTimestamp(startTime), + getTimeElapsed(startTime, endTime), reason); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/StatusAndLogListener.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/StatusAndLogListener.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,201 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import java.util.Vector; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildListener; +import org.apache.tools.ant.SubBuildListener; +import org.apache.tools.ant.Project; + +/** + * StatusAndLogListener implements {@link BuildListener} and + * listens to build events in particularly for activities such as ant logging + * and displaying build stage summary at the end of build process. + * + */ +public class StatusAndLogListener implements BuildListener, SubBuildListener { + + private static Vector handlers = new Vector(); + private static Project project; + + /** + * Default constructor + */ + public StatusAndLogListener() { + } + + /** + * Signals that the last target has finished. This event will still be fired + * if an error occurred during the build. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getException() + */ + public void buildStarted(BuildEvent event) { + project = event.getProject(); + for (Handler handler : handlers) { + handler.handleBuildStarted(event); + } + + } + /** + * Signals that a build has started. This event is fired before any targets + * have started. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + */ + public void buildFinished(BuildEvent event) { + for ( Handler handler : handlers ) { + handler.handleBuildFinished( event ); + } + } + + + /** + * Signals that a target is starting. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getTarget() + */ + public void targetStarted(BuildEvent event) { + for (Handler handler : handlers) { + handler.handleTargetStarted(event); + } + } + + /** + * Signals that a target has finished. This event will still be fired if an + * error occurred during the build. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getException() + */ + public void targetFinished(BuildEvent event) { + for (Handler handler : handlers) { + handler.handleTargetFinished(event); + } + } + + /** + * Signals that a task is starting. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getTask() + */ + public void taskStarted(BuildEvent event) { + // implement if needed + } + + /** + * Signals that a task has finished. This event will still be fired if an + * error occurred during the build. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getException() + */ + public void taskFinished(BuildEvent event) { + // implement if needed + } + + /** + * Signals that a subbuild has started. This event is fired before any targets have started. + * @param event + */ + public void subBuildStarted(BuildEvent event) { + + } + + /** + * Signals that the last target has finished. This event will still be fired if an error occurred during the build. + * @param event + */ + + public void subBuildFinished(BuildEvent event) { + + } + + + + /** + * Signals a message logging event. + * + * @param event + * An event with any relevant extra information. Must not be + * null. + * + * @see BuildEvent#getMessage() + * @see BuildEvent#getException() + * @see BuildEvent#getPriority() + */ + public void messageLogged(BuildEvent event) { + // implement if needed + + } + + /** + * Register the given handler. + * + * @param handler + * is the handler to register + */ + public static void register ( Handler handler ) { + handlers.add( handler ); + } + + /** + * Return root project name. + * @return + */ + public static Project getProject() { + return project; + } + + + /** + * Check and return required type handler. + * @param handlerType + * @return + */ + public static Handler getHandler(Class handlerType) { + for (Handler handler : handlers) { + if (handlerType.isInstance(handler)) { + return handler; + } + } + return null; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/SubBuildEventHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/SubBuildEventHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,41 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import org.apache.tools.ant.BuildEvent; + +/** + * Handler is an interface which is used to handle the build events + * which are of importance for ant logging and build stage summary display. + * + * + */ +public interface SubBuildEventHandler { + + /** + * Method to handle SubBuild Started events. + * @param event + */ + void handleSubBuildStarted( BuildEvent event ); + + /** + * Method to handle SubBuild Finished events. + * @param event + */ + void handleSubBuildFinished( BuildEvent event ); + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/TargetEventHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/TargetEventHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,44 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import org.apache.tools.ant.BuildEvent; + +/** + * Handler is an interface which is used to handle the build events + * which are of importance for ant logging and build stage summary display. + * + * + */ +public interface TargetEventHandler { + + /** + * Method to handle Target started Event + * + * @param event + */ + + void handleTargetStarted( BuildEvent event ); + + /** + * Method to handle target finish events. + * + * @param event is the build event to be handled. + */ + void handleTargetFinished( BuildEvent event ); + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/TaskEventHandler.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/listener/TaskEventHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,43 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.listener; + +import org.apache.tools.ant.BuildEvent; + +/** + * Handler is an interface which is used to handle the build events + * which are of importance for ant logging and build stage summary display. + * + * + */ +public interface TaskEventHandler { + + /** + * Method to handle Task Finished events. + * + * @param event is the build event to be handled. + */ + void handleTaskFinished( BuildEvent event ); + + /** + * Method to handle Task Started( events. + * + * @param event is the build event to be handled. + */ + void handleTaskStarted( BuildEvent event ); + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/taskdefs/LogRecorder.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/taskdefs/LogRecorder.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,333 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + + + +package com.nokia.helium.logger.ant.taskdefs; + +import java.util.Hashtable; +import java.util.Vector; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.tools.ant.BuildEvent; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Task; +import org.apache.tools.ant.types.EnumeratedAttribute; +import org.apache.tools.ant.types.LogLevel; + +import com.nokia.helium.logger.ant.listener.AntLogRecorderEntry; +import com.nokia.helium.logger.ant.listener.AntLoggingHandler; +import com.nokia.helium.logger.ant.listener.Handler; +import com.nokia.helium.logger.ant.listener.StatusAndLogListener; +import com.nokia.helium.logger.ant.types.RecordFilter; +import com.nokia.helium.logger.ant.types.RecordFilterSet; + +/** + * For recording ant logging output. + * + *
    + *      <hlm:record name="${build.log.dir}/${build.id}_test.log" action="start" append="false" loglevel="info">
    + *           <hlm:recordfilterset refid="recordfilter.config"/>
    + *           <hlm:recordfilter category="info" regexp="^INFO:" />
    + *      </hlm:record>
    + *      
    + * 
    + * + * @ant.task name="Record" category="Logging". + * + */ + +public class LogRecorder extends Task implements Handler { + + private static Hashtable recorderEntries = new Hashtable(); + private String fileName; + private Boolean append ; + private Boolean start ; + private int loglevel = -1; + private boolean emacsMode ; + private Vector recordFilters = new Vector(); + private Vector recordFilterSet = new Vector(); + private Vector regExpList = new Vector(); + + + public LogRecorder() { + + } + /** + * Run by the task. + */ + + public void execute () { + + AntLoggingHandler antLoggingHandler = (AntLoggingHandler)StatusAndLogListener.getHandler(AntLoggingHandler.class); + + /* To validate attributes passed. */ + validateAttributes(); + + /* to add regular filters */ + addAllRecordFilters(); + + + /* Init password/record filter and replace any unset properties */ + initAndReplaceProperties(); + + // get the recorder entry + AntLogRecorderEntry recorder = getRecorder(fileName, getProject()); + // set the values on the recorder + recorder.setMessageOutputLevel(loglevel); + recorder.setEmacsMode(emacsMode); + if (start != null) { + if (start.booleanValue()) { + if (antLoggingHandler != null) { + if (antLoggingHandler.getCurrentStageName() != null) { + antLoggingHandler.doLoggingAction(antLoggingHandler.getCurrentStageName(), false, "Stopping", this); + } else { + antLoggingHandler.doLoggingAction("default", false, "Stopping", this); + } + } + recorder.reopenFile(); + recorder.setRecordState(start); + } else { + recorder.setRecordState(start); + recorder.closeFile(); + if (antLoggingHandler != null) { + if (antLoggingHandler.getCurrentStageName() != null) { + antLoggingHandler.doLoggingAction(antLoggingHandler.getCurrentStageName(), true, "Starting", this); + } else { + antLoggingHandler.doLoggingAction("default", true, "Starting", this); + } + } + } + } + + } + /** + * To Validate is the fileName set for recording. + */ + private void validateAttributes() { + if (fileName == null) { + throw new BuildException("filename attribute should be specified for helium recorder task."); + } + + } + + /** + * Set the file name to recod. + * @param fileName + * @ant.required + */ + public void setName(String fileName) { + this.fileName = fileName; + } + + /** + * Return the fileName. + * @return + */ + public String getName() { + return this.fileName; + } + + /** + * Set the append parameter. + * @param append + * @ant.not-required + */ + public void setAppend(boolean append) { + this.append = append ? Boolean.TRUE : Boolean.FALSE; + } + + /** + * Set logLevel to log the information. + * @param level + * @ant.not-required + */ + public void setLoglevel(VerbosityLevelChoices level) { + loglevel = level.getLevel(); + } + + /** + * Set the EmacsMode. + * @param emacsMode + * @ant.not-required + */ + public void setEmacsMode(boolean emacsMode) { + this.emacsMode = emacsMode; + } + + /** + * Return the emacsMode. + * @return + */ + public boolean getEmacsMode() { + return this.emacsMode; + } + + /** + * create the type of recorderfilter. + * @param logFilter + */ + public void addRecordFilter(RecordFilter logFilter) { + if (!recordFilters.contains(logFilter)) { + recordFilters.add(logFilter); + } + } + + /** + * Create the type of recoderfilterset + * @param logFilterSet + */ + public void addRecordFilterSet(RecordFilterSet logFilterSet) { + if (!recordFilterSet.contains(logFilterSet)) { + recordFilterSet.add(logFilterSet); + } + } + + /** + * Set the action of stop/start. + * @param action + * @ant.not-required + */ + public void setAction(ActionChoices action) { + if (action.getValue().equalsIgnoreCase("start")) { + start = Boolean.TRUE; + } else { + start = Boolean.FALSE; + } + } + + /** + * To get the action state of current recorder. + * @return + */ + public boolean getAction() { + return start.booleanValue(); + } + + + /** + * A list of possible values for the setAction() method. + * Possible values include: start and stop. + */ + public static class ActionChoices extends EnumeratedAttribute { + private static final String[] VALUES = {"start", "stop"}; + + /** + * @see EnumeratedAttribute#getValues() + */ + /** {@inheritDoc}. */ + public String[] getValues() { + return VALUES; + } + } + + /** + * To set the verbosity levels + * + * + */ + public static class VerbosityLevelChoices extends LogLevel { + } + + + /** + * To register the recorder entry + */ + @SuppressWarnings("unchecked") + protected AntLogRecorderEntry getRecorder(String name, Project proj) { + Object o = recorderEntries.get(name); + AntLogRecorderEntry entry; + if (o == null) { + // create a recorder entry + entry = new AntLogRecorderEntry(name); + for (String regExp : regExpList) { + if (!regExp.equals("")) { + String pattern = Pattern.quote(regExp); + entry.addRegexp(pattern); + } + } + + if (append == null) { + entry.openFile(false); + } else { + entry.openFile(append.booleanValue()); + } + entry.setProject(proj); + recorderEntries.put(name, entry); + } else { + entry = (AntLogRecorderEntry) o; + } + return entry; + } + + /** + * Get all the recorderfilters from recorderfilterset refid. + */ + public void addAllRecordFilters() { + for (RecordFilterSet recFilterSet : recordFilterSet ) { + recordFilters.addAll(recFilterSet.getAllFilters()); + } + } + + public void handleBuildFinished(BuildEvent event) { + // TODO Auto-generated method stub + + } + + public void handleBuildStarted(BuildEvent event) { + // TODO Auto-generated method stub + + } + + public void handleTargetFinished(BuildEvent event) { + // TODO Auto-generated method stub + } + + public void handleTargetStarted(BuildEvent event) { + // TODO Auto-generated method stub + + } + + /** + * To init password and record filters. + * Replace with values if any property values are unset. + */ + @SuppressWarnings("unused") + public void initAndReplaceProperties() { + + Pattern pattern = null; + Matcher match = null; + for (RecordFilter recordFilter : recordFilters) { + if (recordFilter.getRegExp() == null) { + throw new BuildException("\"regexp\" attribute should not have null value for recordfilter"); + } + if (recordFilter.getRegExp() != null) { + pattern = Pattern.compile("\\$\\{(.*)}"); + match = pattern.matcher(recordFilter.getRegExp()); + if (match.find()) { + regExpList.add(getProject().replaceProperties(recordFilter.getRegExp())); + } else { + regExpList.add(recordFilter.getRegExp()); + } + } + } + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/taskdefs/LogReplace.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/taskdefs/LogReplace.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,74 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.taskdefs; + +import java.util.regex.Pattern; + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Task; + +import com.nokia.helium.logger.ant.listener.AntLoggingHandler; +import com.nokia.helium.logger.ant.listener.StatusAndLogListener; + +/** + * To replace the property values with real values if the properties are not set at the begining of the build. + * + * pre> + * <hlm:logreplace regexp="${property.not.set}"/> + * + * + * @ant.task name="logreplace" category="Logging". + */ +public class LogReplace extends Task { + + private String regExp; + + /** + * Run by the task. + */ + + public void execute () { + + AntLoggingHandler antLoggingHandler = (AntLoggingHandler)StatusAndLogListener.getHandler(AntLoggingHandler.class); + + if (regExp == null ) { + throw new BuildException ("'regexp' attribute should not be null."); + } + + if (antLoggingHandler != null) { + String pattern = Pattern.quote(regExp); + antLoggingHandler.addRegExp(pattern); + } + } + + /** + * @param regExp the regExp to set + * @ant.required + */ + public void setRegExp(String regExp) { + this.regExp = regExp; + } + + /** + * @return the regExp + */ + public String getRegExp() { + return regExp; + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/RecordFilter.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/RecordFilter.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,76 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.types; + +import org.apache.tools.ant.types.DataType; + +/** + * Recorder Filter will be used to filter the ant logging output. + * + * To get the lines which matches the regular expression. + * + *
    + *      <hlm:recordfilter category="info" regexp="ERROR"/>
    + *      <hlm:recordfilter category="warn" regexp="^WARN"/>
    + * 
    + * + * @ant.task name="Recordfilter" category="Logging". + * + */ + +public class RecordFilter extends DataType { + + private String category; + private String regExp; + + + /** + * Set category. + * @param category + * @ant.not-required + */ + public void setCategory(String category) { + this.category = category; + } + + /** + * Return the category. + * @return + */ + public String getCategory() { + return this.category; + } + + /** + * Sets the regExp. + * @param regExp + * @ant.required + */ + public void setRegExp(String regExp) { + this.regExp = regExp; + } + + + /** + * get the regExp. + * @return + */ + public String getRegExp() { + return this.regExp; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/RecordFilterSet.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/RecordFilterSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,87 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.types; + +import java.util.Vector; + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.types.Reference; + +/** + * Recorder Filter set will be used to group the recorder filters to filter ant logging output. + * + * To get the lines which matches the regular expression. + * + *
    + *      <hlm:recordfilterset id="recordfilter.config">
    + *          <hlm:recordfilter category="error" regexp="Hello" />
    + *          <hlm:recordfilter category="warning" regexp="echo" />
    + *          <hlm:recordfilter category="info" regexp="ERROR" />
    + *      </hlm:recordfilterset>
    + *      
    + * 
    + * + * @ant.task name="Recordfilterset" category="Logging". + * + */ + +public class RecordFilterSet extends DataType { + + private Vector recordFilters = new Vector(); + + public RecordFilterSet() { + } + + /** + * Add the recordefilter type into recordfilterset. + * @param logFilter + */ + public void addRecordFilter(RecordFilter logFilter) { + if (!recordFilters.contains(logFilter)) { + recordFilters.add(logFilter); + } + } + + + /** + * return all the recorderfilters associated with current recorderfilterset. + * @return + */ + public Vector getAllFilters() { + Vector allFilters = new Vector(); + if (recordFilters.size() > 0) { + allFilters.addAll(recordFilters); + return allFilters; + } + Reference refId = getRefid(); + Object filterSetObject = null; + if (refId != null) { + try { + filterSetObject = refId.getReferencedObject(); + } catch ( Exception ex) { + throw new BuildException("Reference id of the record filter is not valid. " + ex.getMessage(), ex); + } + if (filterSetObject != null && filterSetObject instanceof RecordFilterSet) { + allFilters.addAll(((RecordFilterSet)filterSetObject).getAllFilters()); + return allFilters; + } + } + return allFilters; + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/Stage.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/Stage.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,106 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.types; + +import org.apache.tools.ant.types.DataType; + + +/** + * A Stage is a Data type which stores Stage information. + * + *

    + * A Stage is defined by setting three attributes name, start and end targets, both should be a + * valid target name in the project. + * + *

    + * Usage: + * + *

    + *      <hlm:stage id="preparation" starttarget="stagetest" endtarget="stagetest"/>              
    + * 
    + * + * @ant.task name="stage" category="Logging" + * + */ +public class Stage extends DataType { + + private String startTarget; + private String endTarget; + + public Stage() { + + } + /** + * Get the starting point of this {@link Stage}. + * + * @return the starting point of this {@link Stage}. + */ + public String getStartTarget() { + return this.startTarget; + } + + /** + * Set the starting target. + * + * @param start + * is the starting point to set. + * @ant.required + */ + public void setStartTarget(String startTarget) { + this.startTarget = startTarget; + } + + /** + * Get the end point of this {@link Stage}. + * + * @return the end point of this {@link Stage}. + * + */ + public String getEndTarget() { + return this.endTarget; + } + + /** + * Set the end target. + * + * @param end + * is the end point to set. + * @ant.required + */ + public void setEndTarget(String endTarget) { + this.endTarget = endTarget; + } + + /** + * Check is the start target set to current target. + * @param target + * @return + */ + public boolean isStartTarget ( String target ) { + return this.startTarget.equals( target ); + } + + /** Check is the end target set to current target. + * + * @param target + * @return + */ + public boolean isEndTarget ( String target ) { + return this.endTarget.equals( target ); + } + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/StageLogging.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/StageLogging.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,158 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.logger.ant.types; + +import org.apache.tools.ant.types.DataType; +import com.nokia.helium.logger.ant.listener.AntLoggingHandler; +import com.nokia.helium.logger.ant.listener.StatusAndLogListener; +import org.apache.log4j.Logger; + +/** + * A 'StageRecord' is a Data type which stores attributes for stage recording/logging. + * + * + * Usage: + *
    + * <hlm:stagerecord id="record.default" defaultoutput="${build.log.dir}/${build.id}_main.ant.log" loglevel="info" append="false"/>
    + *      
    + *                  
    + * <hlm:stagerecord id="record.prep"  
    + *                  stagerefid="preparation" 
    + *                  output="${build.log.dir}/${build.id}_prep.ant.log" 
    + *                  loglevel="info"
    + *                  append="false"/>
    + *                                  
    + * 
    + * + * + * @ant.task name="stagerecord" category="Logging" + */ +public class StageLogging extends DataType { + + private static boolean isAntLoggerRegistered; + private String logLevel = "info"; + private String logFile; + private String defaultLogFile; + private Boolean append; + private String stageRefId; + private Logger log = Logger.getLogger(StageLogging.class); + /** + * Constructor which will register the logging handler + */ + public StageLogging () { + if (!isAntLoggerRegistered) { + StatusAndLogListener.register(new AntLoggingHandler()); + log.debug("Registering stage record to StatusAndLogListener listener"); + isAntLoggerRegistered = true; + } + } + + /** + * Sets output log file name. + * @param outPut + * @ant.required + */ + + public void setOutput(String outPut) { + this.logFile = outPut; + } + + /** + * Returns output log file name. + * @return + */ + + public String getOutput() { + return this.logFile; + } + + /** + * Sets log level for respective stage. + * @param logLevel + * @ant.not-required + */ + + public void setLogLevel(String logLevel) { + this.logLevel = logLevel; + } + + /** + * Returns log level of respective stage. + * @return + */ + + public String getLogLevel() { + return this.logLevel; + } + + /** + * Get the name of this StageRefID. + * + * @return name of the Phase. + */ + public String getStageRefID() { + return this.stageRefId; + } + + /** + * Set the name of the StageRefID. + * + * @param name + * is the name to set. + * @ant.required + */ + public void setStageRefId(String name) { + this.stageRefId = name; + } + + /** + * Return default ant log file name. + * @return + */ + public String getDefaultOutput() { + return this.defaultLogFile; + } + + /** + * Set the default ant log name. + * @param name + * @ant.required + */ + public void setDefaultOutput(String name) { + this.defaultLogFile = name; + } + + /** + * Set append value. + * @param append + * @ant.not-required + */ + public void setAppend(boolean append) { + this.append = append ? Boolean.TRUE : Boolean.FALSE; + } + + /** + * Return the append value. + * @param append + * @return + */ + public Boolean getAppend() { + return this.append; + } + + +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/StageSummary.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/com/nokia/helium/logger/ant/types/StageSummary.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ +package com.nokia.helium.logger.ant.types; + +import org.apache.tools.ant.types.DataType; + +import com.nokia.helium.logger.ant.listener.StageSummaryHandler; +import com.nokia.helium.logger.ant.listener.StatusAndLogListener; + +/** + * StageSummary is a Data type when set a build summary is + * displayed at the end of build process. + * + *
    + * Usage:
    + *       <hlm:stagesummary id="stage.summary" 
    + *          template="${template.dir}\build_stages_summary.txt.ftl"/>
    + * 
    + * + * @ant.task name="stagesummary" category="Logging" + * + */ +public class StageSummary extends DataType { + + private static boolean isStageSummaryHandlerRegistered; + + private String template; + + public StageSummary () { + if ( !isStageSummaryHandlerRegistered ) { + StatusAndLogListener.register( new StageSummaryHandler() ); + isStageSummaryHandlerRegistered = true; + } + } + + /** + * Get the template used for displaying build stage summary. + * + * @return the template to display build stage summary. + */ + public String getTemplate () { + return template; + } + + /** + * Set the template to be used for displaying build stage summary. + * + * @param template + * the template to set + * @ant.required + */ + public void setTemplate ( String template ) { + this.template = template; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/src/templates/build_stages_summary.txt.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/src/templates/build_stages_summary.txt.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,35 @@ +<#-- +============================================================================ +Name : build_stages_summary.txt.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> + +*** BUILD STAGE SUMMARY *** + +<#assign count = 0> +<#list statusReports as report> +<#assign count = count + 1> +${count}) ${report["phaseName"]} +${""?left_pad(2)} Start Time : ${report["startTime"]} +${""?left_pad(2)} Duration : ${report["duration"]} +${""?left_pad(2)} Status : ${report["status"]} +<#if report["status"] == "FAILED"> +${""?left_pad(2)} Reason : ${report["reason"]} + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/antunit/test_stageslogging.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/antunit/test_stageslogging.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,80 @@ + + + + Helium Antlib Signal unittests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/bld.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/bld.sh Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,30 @@ +#!/bin/bash + +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + +if [ -f ~/.bashrc ] ; then + . ~/.bashrc +fi +MODULE_VERSION="$(module --version 2>&1)" +if [ "$?" == "0" ] ; then + module load "java/1.6.0" + module load "tww/ant/1.7.1" +fi +export TEMP="/tmp/$USER" + +export ANT_ARGS="-lib ../lib -lib ../../lib -lib ../../bin/helium-logging.jar -lib ../../antlibs -listener com.nokia.helium.logger.ant.listener.StatusAndLogListener" +ant $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\lib -lib %CD%\..\..\bin\helium-logging.jar -lib %CD%\..\..\antlibs -lib ..\..\bin\helium-core.jar -listener com.nokia.helium.logger.ant.listener.StatusAndLogListener +ant -Dant.executor.class=com.nokia.helium.core.ant.HeliumExecutor %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,114 @@ + + + + Helium Antlib Logging unittests. + + + + + + + + + + + + + + + + + + + + + + + + + Before calling target + grace + + + + + + After calling target + + + + + + + + + + + + + + + + + + + + + + + + + + unix-password = ${unix.password} + + + ats-password = ${ats.password} + + + + + + + ${unix.password} + synergy + Hello... inside stagetest target ${display} + + + + Hello... inside creatbom target + + + + Hello... inside get-env target ${display} + + + + Hello... inside hello target ${display} + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/build_stages_summary.txt.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/build_stages_summary.txt.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,35 @@ +<#-- +============================================================================ +Name : build_stages_summary.txt.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> + +*** BUILD STAGE SUMMARY *** + +<#assign count = 0> +<#list statusReports as report> +<#assign count = count + 1> +${count}) ${report["phaseName"]} +${""?left_pad(2)} Start Time : ${report["startTime"]} +${""?left_pad(2)} Duration : ${report["duration"]} +${""?left_pad(2)} Status : ${report["status"]} +<#if report["status"] == "FAILED"> +${""?left_pad(2)} Reason : ${report["reason"]} + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/prep-ccm-get-input.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/prep-ccm-get-input.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,54 @@ + + + + Helium Antlib Logging unittests. + + + + + + + + + + + + + + + + + + Hello... Inside Prep target ${display} + + + + Hello... Inside ccm-get-input target + + + + Hello... Inside init target ${display} + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/logging/tests/stages_config.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/logging/tests/stages_config.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,46 @@ + + + + + Definitions of helium stages. + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/macros.ant.xml --- a/buildframework/helium/external/helium-antlib/macros.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/macros.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -24,7 +24,7 @@ Helium Antlib build macro. - + @@ -71,7 +71,8 @@ - + +
    @@ -86,18 +87,67 @@ + + - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -115,60 +165,72 @@ + + JUnit test module @{name} + + + + + + - - - - - - + + - - + + + + + - - + + + + + + + - - - - + - + + - - + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/SQLFMPPLoader.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/SQLFMPPLoader.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/SQLFMPPLoader.java Wed Dec 23 19:29:07 2009 +0200 @@ -34,6 +34,7 @@ import com.nokia.helium.metadata.db.MetaDataDb; import org.apache.log4j.Logger; + /** * Utility class to access the data from the database and used by FMPP * templates. @@ -52,11 +53,11 @@ * @see fmpp.tdd.DataLoader#load(fmpp.Engine, java.util.List) */ public Object load(Engine engine, List args) throws Exception { - log.debug("args.size:" + args.size()); + //log.debug("args.size:" + args.size()); java.util.ListIterator iter = args.listIterator(); int argsSize = args.size(); if (argsSize < 1) { - throw new Exception("input DB path should be provided"); + throw new Exception("Input DB path should be provided to load into FMPP."); } /* arg[0] - dbpath @@ -82,7 +83,7 @@ * @return returns the template model for the query */ public TemplateModel get(String query) { - log.debug("QueryModel:" + query); + //log.debug("QueryModel:" + query); return new QueryTemplateModel(query); } @@ -111,7 +112,7 @@ * @param query for which the template model needs to be returned. */ public QueryTemplateModel(String query) { - log.debug("query in SQLTemplateModel" + query); + //log.debug("query in SQLTemplateModel" + query); this.query = query; } @@ -121,9 +122,9 @@ */ public TemplateModel get(String key) { checkAndReadData(); - log.debug("QueryModel:" + key); + //log.debug("QueryModel:" + key); List dataList = indexMap.get(key); - log.debug("datalist size" + dataList.size()); + //log.debug("datalist size" + dataList.size()); if (dataList.size() == 1 ) { return new SimpleScalar((String)dataList.get(0)); } @@ -136,11 +137,11 @@ */ private void checkAndReadData() { if (!isDataRead) { - log.debug("isDataRead:" + isDataRead); + //log.debug("isDataRead:" + isDataRead); isDataRead = true; indexMap = metadataDb.getIndexMap(query); } - log.debug("indexmap size" + indexMap.size()); + //log.debug("indexmap size" + indexMap.size()); } /* @@ -196,7 +197,7 @@ * @return the iterator model from which the data is accessed. */ public TemplateModelIterator iterator() { - log.debug("iterator constructor called"); + //log.debug("iterator constructor called"); return new SQLTemplateModelIterator(query); } } @@ -218,15 +219,17 @@ public TemplateModel next() { SimpleHash simpleHash = null; try { - log.debug("checking any more element"); + //log.debug("checking any more element"); if (rowList != null && (count >= rowList.size())) { finished = true; } - log.debug("next:count:" + count); + //log.debug("next:count:" + count); simpleHash = new SimpleHash(rowList.get(count)); count ++; return simpleHash; } catch (Exception ex) { + // We are Ignoring the errors as no need to fail the build. + log.debug("Iteration exception" + ex.getMessage()); ex.printStackTrace(); } return null; @@ -235,10 +238,10 @@ public boolean hasNext() { if (rowList == null || READ_LIMIT <= count) { if (!finished) { - log.debug("getting records"); + //log.debug("Getting records"); rowList = metadataDb.getRecords(query, READ_LIMIT, currentOffsetIndex * READ_LIMIT); count = 0; - log.debug("rowList.size : " + rowList.size()); + //log.debug("rowList.size : " + rowList.size()); if (rowList.size() == 0) { finished = true; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/antlib.xml --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/antlib.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -23,6 +23,7 @@ + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/conditions/MetaDataLogCondition.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/conditions/MetaDataLogCondition.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/conditions/MetaDataLogCondition.java Wed Dec 23 19:29:07 2009 +0200 @@ -81,8 +81,7 @@ if (severity == null) throw new BuildException("'severity' attribute is not defined"); - this.log("Looking for severity '" + severity + "' under '" - + fileName.getAbsolutePath() + "'"); + //this.log("Looking for severity '" + severity + "' under '" + fileName.getAbsolutePath() + "'"); MetaDataDb.Priority prty = null; if (severity.equalsIgnoreCase("ERROR")) { diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/taskdefs/MetaDataDelete.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/taskdefs/MetaDataDelete.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ + +package com.nokia.helium.metadata.ant.taskdefs; + +import org.apache.tools.ant.types.FileSet; +import org.apache.tools.ant.DirectoryScanner; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Task; +import java.util.Vector; +import java.util.ArrayList; +import java.util.List; +import org.apache.log4j.Logger; +import com.nokia.helium.metadata.db.*; +import java.util.Date; + +/** + * This task provide a way to delete the data from db for a log file set. + * + *
    + * Example 1:
    + * <metadadelete database="compile_log.db">
    + *     <fileset casesensitive="false" file="sbs.log.file"/>
    + * </metadadelete>
    + * 
    + * + * @ant.task name="metadatadelete" category="Metadata" + */ +public class MetaDataDelete extends Task { + + private static Logger log = Logger.getLogger(MetaDataDelete.class); + + private String database; + + private boolean failOnError = true; + + private Vector fileSetList = new Vector(); + + /** + * Helper function to set the database parameter + * + * @ant.required + */ + public void setDatabase(String dbFile) { + database = dbFile; + } + + public void setFailOnError(String failNotify) { + if (failNotify.equals("false")) { + failOnError = false; + } + } + + /** + * Updates the list of filelist from the input fileset. + * @param fileSetList input fileset list + * @return the matched files including the base dir. + */ + private List getFileListFromFileSet() { + List fileList = new ArrayList(); + for (FileSet fs : fileSetList) { + DirectoryScanner ds = fs.getDirectoryScanner(getProject()); + String[] includedFiles = ds.getIncludedFiles(); + for ( String file : includedFiles ) { + fileList.add(file); + log.debug("includedfiles: " + file); + } + } + log.debug("fileList.size" + fileList.size()); + return fileList; + } + + /** + * Adds the fileset (list of input log files to be processed). + * @param fileSet fileset to be added + * + */ + public void add(FileSet fileSet) { + fileSetList.add(fileSet); + } + + /** + * Helper function to get the database + * + */ + public String getDatabase() { + return database; + } + + + @Override + public void execute() { + MetaDataDb metadataDb = null; + try { + log.debug("Initializing DB: " + database + "to delete"); + log("time before removing entries from db" + new Date()); + metadataDb = new MetaDataDb(database); + metadataDb.removeEntries(getFileListFromFileSet()); + log("time after removing entries from db" + new Date()); + } catch (BuildException ex1) { + if (failOnError) { + throw ex1; + } + } catch (Exception ex) { + if (failOnError) { + throw new BuildException("Failed during writing data to db"); + } + } finally { + log.debug("finalizing DB: " + database); + if (metadataDb != null) { + metadataDb.finalizeDB(); + } + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/taskdefs/MetaDataRecord.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/taskdefs/MetaDataRecord.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/taskdefs/MetaDataRecord.java Wed Dec 23 19:29:07 2009 +0200 @@ -22,10 +22,9 @@ import org.apache.tools.ant.Task; import java.util.Vector; import java.util.Iterator; -//import java.util.ArrayList; import org.apache.log4j.Logger; -//import org.apache.tools.ant.types.FileSet; import com.nokia.helium.metadata.db.*; +import java.util.Date; /** * This task provide a way to record the data in the Database. @@ -114,6 +113,7 @@ log.debug("Initializing DB: " + database); metadataDb = new MetaDataDb(database); log.debug("Parsing the input and writing to DB"); + log("time before recording to db" + new Date()); for ( MetaDataInput metadataInput : metadataList ) { boolean removed = false; Iterator inputIterator = metadataInput.iterator(); @@ -125,6 +125,7 @@ metadataDb.addLogEntry(logEntry); } } + log("time after recording to db" + new Date()); log.debug("Successfully writen to DB"); } catch (BuildException ex1) { if (failOnError) { diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AbldLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AbldLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AbldLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -60,36 +60,41 @@ public AbldLogMetaDataInput() { } - public boolean isEntryAvailable() { + /** + * Function to check from the input stream if is there any entries available. + * @return true if there are any entry available otherwise false. + */ + public boolean isEntryCreated(File currentFile) { String exceptions = ""; - int currentFileIndex = getCurrentFileIndex(); int lineNumber = getLineNumber(); BufferedReader currentReader = getCurrentReader(); - log.debug("Getting next set of log entries for Abld Input"); - //log.debug("currentFileIndex" + currentFileIndex); - List fileList = getFileList(); - int fileListSize = fileList.size(); - log.debug("fileList.size" + fileListSize); - while (currentFileIndex < fileListSize) { - try { - //log.debug("currentfileindex while getting file name: " + currentFileIndex); - File currentFile = fileList.get(currentFileIndex); - if (currentReader == null) { - lineNumber = 0; - setLineNumber(lineNumber); - log.debug("Current abld log file name:" + currentFile); - log.info("Processing file: " + currentFile); - currentReader = new BufferedReader(new FileReader(currentFile)); - setCurrentReader(currentReader); - } - String logText = null; - while ((logText = currentReader.readLine()) != null) { - lineNumber ++; - setLineNumber(lineNumber); - logText = logText.replaceFirst("'^\\s*\\[.+?\\]\\s*", ""); - if (logText.startsWith("++ Finished at")) { - //log.debug("matching finished regex"); + try { + if (currentReader == null) { + lineNumber = 0; + setLineNumber(lineNumber); + log.debug("Current abld log file name:" + currentFile); + log.debug("Processing file: " + currentFile); + currentReader = new BufferedReader(new FileReader(currentFile)); + setCurrentReader(currentReader); + } + String logText = null; + while ((logText = currentReader.readLine()) != null) { + lineNumber ++; + setLineNumber(lineNumber); + logText = logText.replaceFirst("'^\\s*\\[.+?\\]\\s*", ""); + if (logText.startsWith("++ Finished at")) { + if (currentComponent != null && !entryCreated) { + addEntry("DEFAULT", currentComponent, currentFile.toString(), + 0, "" ); + entryCreated = true; + recordText = false; + return true; + } + entryCreated = false; + } else if (logText.startsWith("=== ")) { + Matcher finishMatch = abldFinishedPattern.matcher(logText); + if (finishMatch.matches()) { if (currentComponent != null && !entryCreated) { addEntry("DEFAULT", currentComponent, currentFile.toString(), 0, "" ); @@ -98,69 +103,46 @@ return true; } entryCreated = false; - } else if (logText.startsWith("=== ")) { - //log.debug("trying to match with finish pattern ======="); - Matcher finishMatch = abldFinishedPattern.matcher(logText); - if (finishMatch.matches()) { - if (currentComponent != null && !entryCreated) { - addEntry("DEFAULT", currentComponent, currentFile.toString(), - 0, "" ); - entryCreated = true; - recordText = false; - return true; - } - entryCreated = false; - } else { - //log.debug("trying to match the start pattern"); - Matcher componentMatch = abldComponentPattern.matcher(logText); - if (componentMatch.matches()) { - //log.debug("matched abldComponentPattern"); - currentComponent = componentMatch.group(2); - recordText = true; - } + } else { + Matcher componentMatch = abldComponentPattern.matcher(logText); + if (componentMatch.matches()) { + currentComponent = componentMatch.group(2); + recordText = true; + } - Matcher startMatch = abldStartedPattern.matcher(logText); - if (startMatch.matches()) { - //log.debug("matched abldStartedPattern"); - currentComponent = startMatch.group(1); - recordText = true; - } + Matcher startMatch = abldStartedPattern.matcher(logText); + if (startMatch.matches()) { + currentComponent = startMatch.group(1); + recordText = true; } - } else { - if (recordText) { - String severity = getSeverity(logText); - if (severity != null) { - //log.debug("severity:" + severity); - //log.debug("currentFile:" + currentFile); - //log.debug("lineNumber:" + lineNumber); - //log.debug("logText:" + logText); - entryCreated = true; - addEntry(severity, currentComponent, currentFile.toString(), - lineNumber, logText ); - return true; - } + } + } else { + if (recordText) { + String severity = getSeverity(logText); + if (severity != null) { + entryCreated = true; + addEntry(severity, currentComponent, currentFile.toString(), + lineNumber, logText ); + return true; } } } - currentReader.close(); - currentReader = null; - setCurrentReader(currentReader); - currentFileIndex ++; - setCurrentFileIndex(currentFileIndex); - //log.debug("currentfileindex: " + currentFileIndex); - //log.debug("fileListSize: " + fileListSize); - } catch (Exception ex) { - log.debug("Exception in AbldLogMetadata", ex); - try { - currentReader.close(); - } catch ( IOException iex) { - log.debug("Exception in closing reader"); - } - currentReader = null; - setCurrentReader(null); - exceptions = exceptions + ex.getMessage() + "\n"; - return false; } + currentReader.close(); + currentReader = null; + setCurrentReader(currentReader); + } catch (Exception ex) { + log.debug("Exception in AbldLogMetadata", ex); + try { + currentReader.close(); + } catch ( IOException iex) { + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception in closing reader", iex); + } + currentReader = null; + setCurrentReader(null); + exceptions = exceptions + ex.getMessage() + "\n"; + return false; } if (!exceptions.equals("")) { throw new BuildException(exceptions); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AntLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AntLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/AntLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -52,82 +52,69 @@ private boolean entryCreated; + /** + * Constructor + */ public AntLogMetaDataInput() { } - public boolean isEntryAvailable() { + /** + * Function to check from the input stream if is there any entries available. + * @return true if there are any entry available otherwise false. + */ + public boolean isEntryCreated(File currentFile) { String exceptions = ""; - int currentFileIndex = getCurrentFileIndex(); int lineNumber = getLineNumber(); BufferedReader currentReader = getCurrentReader(); - log.debug("Getting next set of log entries for Ant Input"); - //log.debug("currentFileIndex" + currentFileIndex); - List fileList = getFileList(); - //log.debug("is filelist empty" + fileList.isEmpty()); - int fileListSize = fileList.size(); - log.debug("fileList.size" + fileListSize); - while (currentFileIndex < fileListSize) { - try { - lineNumber ++; - setLineNumber(lineNumber); - log.debug("currentfileindex while getting file name: " + currentFileIndex); - File currentFile = fileList.get(currentFileIndex); - if (currentReader == null) { - setLineNumber(0); - log.debug("Current Text log file name:" + currentFile); - log.info("Processing file: " + currentFile); - currentReader = new BufferedReader(new FileReader(currentFile)); - setCurrentReader(currentReader); - } - String logText = ""; - while ((logText = currentReader.readLine()) != null) { - //log.debug("logtext : " + logText + " line-number: " + lineNumber); - //log.debug("logtext : " + logText + " line-number: " + lineNumber); - Matcher match = antTargetPattern.matcher(logText); - if (match.matches()) { - if (currentComponent != null && !entryCreated) { - addEntry("DEFAULT", currentComponent, currentFile.toString(), - 0, "" ); - entryCreated = true; - return true; - } - entryCreated = false; - currentComponent = match.group(1); - //log.debug("currentComponent:" + currentComponent); - } - logText = logText.replaceFirst("^[ ]*\\[.+?\\][ ]*", ""); - String severity = getSeverity(logText); - if (severity != null) { -// log.debug("severity:" + severity); -// log.debug("currentFile:" + currentFile); -// log.debug("lineNumber:" + lineNumber); -// log.debug("logText:" + logText); + try { + if (currentReader == null) { + setLineNumber(0); + log.debug("Current Text log file name:" + currentFile); + log.debug("Processing file: " + currentFile); + currentReader = new BufferedReader(new FileReader(currentFile)); + setCurrentReader(currentReader); + } + String logText = ""; + while ((logText = currentReader.readLine()) != null) { + Matcher match = antTargetPattern.matcher(logText); + if (match.matches()) { + if (currentComponent != null && !entryCreated) { + addEntry("DEFAULT", currentComponent, currentFile.toString(), + 0, "" ); entryCreated = true; - // If there is no current component which means - // it is a redirected output, using file name as comp name - if (currentComponent == null ) { - currentComponent = currentFile.getName(); - } - addEntry(severity, currentComponent, currentFile.toString(), - lineNumber, logText ); - logText = ""; return true; } + entryCreated = false; + currentComponent = match.group(1); } + logText = logText.replaceFirst("^[ ]*\\[.+?\\][ ]*", ""); + String severity = getSeverity(logText); + if (severity != null) { + entryCreated = true; + // If there is no current component which means + // it is a redirected output, using file name as comp name + if (currentComponent == null ) { + currentComponent = currentFile.getName(); + } + addEntry(severity, currentComponent, currentFile.toString(), + lineNumber, logText ); + logText = ""; + return true; + } + } + currentReader.close(); + currentReader = null; + setCurrentReader(currentReader); + if (isAdditionalEntry()) { + return true; + } + } catch (Exception ex) { + log.debug("Exception in AntLogMetadata", ex); + try { currentReader.close(); - currentReader = null; - setCurrentReader(currentReader); - currentFileIndex ++; - setCurrentFileIndex(currentFileIndex); - //log.debug("currentfileindex: " + currentFileIndex); - //log.debug("fileListSize: " + fileListSize); - } catch (Exception ex) { - log.debug("Exception in AntLogMetadata", ex); - try { - currentReader.close(); - } catch ( IOException iex) { - log.debug("Exception in closing reader"); - } + } catch ( IOException iex) { + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception in closing reader", iex); currentReader = null; setCurrentReader(null); exceptions = exceptions + ex.getMessage() + "\n"; @@ -137,7 +124,6 @@ if (!exceptions.equals("")) { throw new BuildException(exceptions); } - return false; } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/LogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/LogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/LogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -43,7 +43,11 @@ private static Logger log = Logger.getLogger(LogMetaDataInput.class); private Vector fileSetList = new Vector(); + + private int currentFileIndex; + private boolean entryAddedForLog; + private List fileList; private Vector metadataFilterSets = new Vector(); private Vector completeFilterList; @@ -54,7 +58,6 @@ public LogMetaDataInput() { - //initRecordInfo(); } /** @@ -83,7 +86,11 @@ add(filterSet); return filterSet; } - + + /** + * Helper function to return all the filters associated with this metadata input + * @return all the filters merged based on the order of definition. + */ private Vector getCompleteFilters()throws Exception { Vector allFilter = new Vector(); for (MetaDataFilterSet filterSet : metadataFilterSets) { @@ -111,6 +118,10 @@ return fileList; } + /** + * Internal function to get the entry + * @return the top most entry in the list. + */ private MetaDataDb.LogEntry getEntry()throws Exception { if (logEntries != null && logEntries.size() > 0) { return logEntries.remove(0); @@ -119,12 +130,65 @@ } } + /** + * Helper function to return the file list of the metadata input + * @return file list of this metadata input. + */ protected List getFileList() { return fileList; } - abstract boolean isEntryAvailable() throws Exception ; + protected File getCurrentFile() { + List fileList = getFileList(); + return fileList.get(currentFileIndex); + } + + /** + * Function to check from the input stream if is there any entries available. Implemented by the sub classes. + * @return true if there are any entry available otherwise false. + */ + boolean isEntryAvailable() throws Exception { + try { + int fileListSize = getFileList().size(); + while (currentFileIndex < fileListSize) { + boolean entryCreated = false; + File currentFile = getCurrentFile(); + entryCreated = isEntryCreated(currentFile); + if (entryCreated) { + if (!entryAddedForLog) { + entryAddedForLog = true; + } + return entryCreated; + } + if (!entryAddedForLog) { + // If no entry, then logfile is added to the database. + addEntry("default", "general", getCurrentFile().toString(), -1, "", -1); + currentFileIndex ++; + return true; + } + currentFileIndex ++; + } + } catch (Exception ex1 ) { + log.info("Exception processing stream: " + ex1.getMessage()); + log.debug("exception while parsing the stream", ex1); + throw ex1; + } + return false; + } + + /** + * Function to check from the input stream if is there any entries available. + * @param file for which the contents needs to be parsed for errors + * @return true if there are any entry available otherwise false. + */ + abstract boolean isEntryCreated(File currentFile) throws Exception; + + /** + * Returns the severity matches for the log text + * @param log text for which the severity needs to be identified. + * @return the severity of the input text + */ protected String getSeverity(String logText) throws Exception { try { if (completeFilterList == null) { @@ -133,7 +197,6 @@ for ( MetaDataFilter filter : completeFilterList) { Pattern pattern = filter.getPattern(); if ((pattern.matcher(logText)).matches()) { - //log.debug("pattern matched"); return filter.getPriority(); } } @@ -144,8 +207,32 @@ return null; } + + /** + * Helper function to store the entry which will be added to the database + * @param priority for the entry + * @param component of the entry + * @param logpath of the entry + * @param lineNo of the entry + * @param log text message of the entry + */ protected void addEntry(String priority, String component, String logPath, int lineNo, String logText) throws Exception { + addEntry(priority, component, logPath, lineNo, logText, -1); + } + + + /** + * Helper function to store the entry which will be added to the database + * @param priority for the entry + * @param component of the entry + * @param logpath of the entry + * @param lineNo of the entry + * @param log text message of the entry + * @param elapsedTime of the component + */ + protected void addEntry(String priority, String component, String logPath, int lineNo, + String logText, float elapsedTime) throws Exception { //log.debug("adding entry to the list"); File logPathFile = new File(logPath.trim()); String baseDir = logPathFile.getParent(); @@ -153,9 +240,17 @@ String uniqueLogPath = baseDir + "/" + logPathFile.getName(); logEntries.add(new MetaDataDb.LogEntry( logText, priority, - component, uniqueLogPath, lineNo)); + component, uniqueLogPath, lineNo, elapsedTime)); } - + + /** + * Looks for the text which matches the filter regular expression and adds the entries to the database. + * @param logTextInfo text message to be searched with filter regular expressions + * @param priority for the entry + * @param currentComponent of the logtextInfo + * @param logpath fo;e fpr wjocj tje text info has to be looked for with filter expression + * @param lineNumber of the text message + */ protected boolean findAndAddEntries(String logTextInfo, String currentComponent, String logPath, int lineNumber)throws Exception { boolean entryAdded = false; @@ -164,20 +259,28 @@ for (int i = 0; i < logText.length; i++) { severity = getSeverity(logText[i]); if ( severity != null) { - log.debug("found match: ----" + logText[i]); addEntry(severity, currentComponent, logPath, - i + lineNumber, logText[i] ); + i + lineNumber, logText[i]); if (!entryAdded) { entryAdded = true; } } } return entryAdded; - } + } + + /** + * Log text are processed based on iterator. When ever the entry is found the entry is returned + * and the function is called again for further entries. + * @return the iterator object for the metadata input. + */ public Iterator iterator() { return metadataInputIterator; } + /** + * Class to process the files as stream and add the entries todb + */ public class MetaDataInputIterator implements Iterator { public boolean hasNext() { if (fileList == null) { @@ -187,23 +290,29 @@ } } if (logEntries.size() > 0) { - log.debug("returning from existing entries"); return true; } boolean retValue = false; try { retValue = isEntryAvailable(); } catch ( Exception ex) { - throw new BuildException("Exception in metadata input."); + throw new BuildException("Exception while analysing errors from the log:", ex); } return retValue; } + /** + * Helper function to remove entries if any + */ public void remove() { } + + /** + * Gets the next entry, which has been identified + * @return log entry to be added to the database. + */ public MetaDataDb.LogEntry next() { - //log.debug("getting next element: " + logEntry); MetaDataDb.LogEntry entry = null; try { entry = getEntry(); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilter.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilter.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilter.java Wed Dec 23 19:29:07 2009 +0200 @@ -104,6 +104,10 @@ pattern = Pattern.compile(regex); } + /** + * Helper function to return the pattern + * @return the pattern of this filter. + */ public Pattern getPattern() { return pattern; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilterSet.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilterSet.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetaDataFilterSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -22,6 +22,8 @@ import java.util.*; import org.apache.tools.ant.types.Reference; import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; import org.apache.log4j.Logger; import fmpp.models.CsvSequence; import freemarker.template.TemplateSequenceModel; @@ -92,14 +94,15 @@ try { filterSetObject = refId.getReferencedObject(); } catch ( Exception ex) { - log.info("Reference id of the filter is not valid"); - throw ex; + log.debug("Reference id of the metadata filter is not valid.", ex); + throw new BuildException("Reference id of the metadata filter is not valid " + ex.getMessage(), ex); } if (filterSetObject != null && filterSetObject instanceof MetaDataFilterSet) { allFilters.addAll(((MetaDataFilterSet)filterSetObject).getAllFilters()); return allFilters; } - throw new Exception ("filterset object is not instance of MetaDataFilterSet"); + log.debug("Filterset object is not instance of MetaDataFilterSet"); + throw new Exception ("Filterset object is not instance of MetaDataFilterSet"); } // Add any nested filtersets for (MetaDataFilterSet filterSet : filterSets) { @@ -108,7 +111,11 @@ return removeInvalidFilters(allFilters); } - + + /** + * Helper function called to remove any invalid filters + * @return only the valid filters + */ private Vector removeInvalidFilters(Vector filterList) { ListIterator iter = filterList.listIterator(); while (iter.hasNext()) { @@ -116,8 +123,7 @@ String priority = filter.getPriority(); String regEx = filter.getRegex(); if (priority == null || regEx == null) { - log.info("Warning: invalid filter removed"); - log.debug("Warning: some filter is invalid removing it"); + log("Warning: some filter is invalid removing it", Project.MSG_WARN); iter.remove(); } } @@ -166,7 +172,10 @@ } } - + /** + * Helper function to add the filters from the csv files + * @param csv file path from which the filters needs to be added. + */ private void addCSVFromFile(String csvPath) throws Exception { CsvSequence csvs = new CsvSequence(); csvs.setSeparator(','); @@ -174,22 +183,22 @@ try { csvs.load(new FileReader(new File(filterFile))); } catch (java.io.FileNotFoundException fex) { - log.error("File not found:" + filterFile); + log.debug("Metadata CSV file not found:", fex); throw fex; } catch (fmpp.util.StringUtil.ParseException pex) { - log.error("parser exception"); + log.debug("FMPP not able parse the Metadata CSV file. ", pex); throw pex; } catch (java.io.IOException iex) { - log.error("I/O exception"); + log.debug("Metadata I/O Exception. " + iex.getMessage(), iex); throw iex; } int size = 0; try { - log.debug("filter CSV record size: " + csvs.size()); + log.debug("filter CSV record size: " + csvs.size()); size = csvs.size(); } catch (Exception ex) { - log.info("Warning: filter parsing error"); - log.debug("Exception in processing csv file"); + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception in processing csv file " + filterFile, ex); } for (int i = 0; i < size; i++) { try { @@ -197,8 +206,8 @@ .get(i); int modelSize = model.size(); if (modelSize != 3 ) { - log.debug("csv row size:" + size); - throw new Exception("filter format is invalid"); + log.debug("Metadata CSV file filter file format is invalid. It has row size " + size); + throw new Exception("Metadata CSV file filter file format is invalid. It has row size " + size); } MetaDataFilter filter = new MetaDataFilter(); filter.setPriority(model.get(0).toString()); @@ -206,8 +215,8 @@ filter.setDescription(model.get(2).toString()); filters.add(filter); } catch (Exception ex) { - log.info("Warning: filter parsing error"); - log.debug("Exception in processing csv file"); + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception in processing Metadate csv file " + filterFile, ex); } } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetadataSource.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetadataSource.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/MetadataSource.java Wed Dec 23 19:29:07 2009 +0200 @@ -28,7 +28,7 @@ /** * This type define an input source that will be communicated to the notifiers. - * Not used, deprecated and an xml and html file is generated fof signal modules. + * Not used, deprecated and an xml and html file is generated for signal modules. * @ant.type name="metadatasource" category="Metadata" * @deprecated * diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/PolicyLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/PolicyLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/PolicyLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -46,11 +46,18 @@ private Map currentAttributeMap; - + + /** + * Constructor + */ public PolicyLogMetaDataInput() { } + /** + * Helper function to return the attributes of the stream reader + * @returns the attributes as a map. + */ private Map getAttributes(XMLStreamReader streamReader) { int count = streamReader.getAttributeCount() ; if (count > 0 ) { @@ -65,16 +72,24 @@ } - + /** + * Function to process the start event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ boolean startElement (XMLStreamReader streamReader) { String tagName = streamReader.getLocalName(); - //log.debug("startElement: " + tagName); if (tagName.equalsIgnoreCase("error")) { currentAttributeMap = getAttributes(streamReader); } return false; } + /** + * Function to process the end event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ boolean endElement(XMLStreamReader streamReader) throws Exception { boolean retValue = false; try { @@ -111,7 +126,11 @@ } return retValue; } - + + /* Function to process the characters event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ boolean characters (XMLStreamReader streamReader) { return false; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/SBSLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/SBSLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/SBSLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -19,10 +19,12 @@ import java.io.*; import java.util.*; -//import javax.xml.parsers.SAXParser; -//import javax.xml.parsers.SAXParserFactory; +import java.util.regex.Pattern; +import java.util.regex.Matcher; import org.apache.log4j.Logger; import javax.xml.stream.XMLStreamReader; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.events.XMLEvent; /** @@ -33,7 +35,7 @@ * <metadatafilterset filterfile="common.csv" /> * </hlm:metadatafilterset> * - * <hlm:sbsmetadatainput> + * <hlm:sbsmetadatainput cleanLogFile="cleanlog.file" > * <fileset dir="${project.dir}/../data/"> * <include name="*compile.log"/> * </fileset> @@ -44,21 +46,497 @@ */ public class SBSLogMetaDataInput extends XMLLogMetaDataInput { + private static final String SPECIAL_CASE_REG_EX = "(make.exe|make): \\*\\*\\*.*(/.*)(_exe|_dll|_pdd|_ldd|_kext|_lib)/.*"; + + private static final String DRIVE_LETTER_REGEX = "(([a-z]|[A-Z]):(\\\\|/))(.*)(/bld\\.inf)"; + private Logger log = Logger.getLogger(SBSLogMetaDataInput.class); private String currentComponent; + private float currentElapsedTime; + private String logTextInfo = ""; + private HashMap> generalTextEntries = new HashMap>(); + + private CategorizationHandler categorizationHandler; + private int lineNumber; private boolean recordText; + private File cleanLogFile; + private boolean additionalEntry; + + private Pattern specialCasePattern; + + private HashMap componentTimeMap = new HashMap(); + + /** + * Constructor + */ public SBSLogMetaDataInput() { + specialCasePattern = Pattern.compile(SPECIAL_CASE_REG_EX); } - private String getComponent(XMLStreamReader streamReader) { + + /** + * Removes the bld inf and the drive letter from the text + * @param text in which the bld.inf and drive letter to be removed + * @return updated string. + */ + static String removeDriveAndBldInf(String text) { + Matcher matcher = (Pattern.compile(DRIVE_LETTER_REGEX)).matcher(text); + if (matcher.matches()) { + return matcher.group(4); + } else { + return text; + } + } + + /** + * Removes the bld inf and the drive letter from the text + * @param text in which the bld.inf and drive letter to be removed + * @return updated string. + */ + static String getComponent(XMLStreamReader streamReader) { + String currentComponent = getAttribute("bldinf", streamReader); + if ( currentComponent != null && currentComponent.equals("")) { + return null; + } + if (currentComponent != null ) { + currentComponent = removeDriveAndBldInf(currentComponent); + } + return currentComponent; + } + + /** + * Generic function to return the attribute value of an attribute from stream + * @param attribute for which the value from xml stream to be returned. + * @return the attribute value of an attribute. + */ + static String getAttribute(String attribute, XMLStreamReader streamReader) { + int count = streamReader.getAttributeCount() ; + for (int i = 0 ; i < count ; i++) { + if ( streamReader.getAttributeLocalName(i).equals(attribute) ) { + return streamReader.getAttributeValue(i); + } + } + return null; + } + + /** + * Helper function to set the clean log file + * @param logFile which is the clean log file to process for additional categories + */ + public void setCleanLogFile(File logFile) { + cleanLogFile = logFile; + } + + /** + * Function to process the characters event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ + public boolean characters (XMLStreamReader streamReader) { + if (recordText) { + logTextInfo += streamReader.getText(); + } else { + if (!additionalEntry) { + additionalEntry = true; + } + String cdataText = streamReader.getText().trim(); + String [] textList = cdataText.split("\n"); + for (String text : textList) { + Matcher specialCaseMatcher = specialCasePattern.matcher(text); + List entryList = null; + if (specialCaseMatcher.matches()) { + String componentName = specialCaseMatcher.group(2); + String extension = specialCaseMatcher.group(3); + String componentWithTarget = (componentName.substring(1) + "." + + extension.substring(1)).toLowerCase(); + CategoryEntry newEntry = new CategoryEntry(text, componentWithTarget , + "error", streamReader.getLocation().getLineNumber(), getCurrentFile().toString()); + entryList = generalTextEntries.get(componentWithTarget); + if ( entryList == null) { + entryList = new ArrayList(); + generalTextEntries.put(componentWithTarget, entryList); + } + entryList.add(newEntry); + } else { + String componentWithTarget = null; + int indexMakeString = text.indexOf( "make: ***" ); + int indexSlash = text.lastIndexOf( "/" ); + if (indexMakeString != -1 && indexSlash != -1) { + int indexExt = ( indexSlash + 1) + text.substring(indexSlash).indexOf( "." ); + if ( indexExt != -1 ) { + componentWithTarget = (text.substring(indexSlash,indexExt + 3)).toLowerCase(); + } + } + if (componentWithTarget != null) { + CategoryEntry newEntry = new CategoryEntry(text, componentWithTarget , + "error", streamReader.getLocation().getLineNumber(), getCurrentFile().toString()); + entryList = generalTextEntries.get(componentWithTarget); + if (entryList == null) { + entryList = new ArrayList(); + generalTextEntries.put(componentWithTarget, entryList); + } + entryList.add(newEntry); + } + + } + } + } + return false; + } + + /** + * Function to process the start event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ + public boolean startElement (XMLStreamReader streamReader) throws Exception { + try { + String tagName = streamReader.getLocalName(); + if (tagName.equalsIgnoreCase("buildlog")) { + log.debug("starting with buildlog"); + } + if (tagName.equalsIgnoreCase("recipe") ) { + lineNumber = streamReader.getLocation().getLineNumber(); + currentComponent = getComponent(streamReader); + recordText = true; + } else if (tagName.equalsIgnoreCase("error") + || tagName.equalsIgnoreCase("warning")) { + lineNumber = streamReader.getLocation().getLineNumber(); + currentComponent = getComponent(streamReader); + recordText = true; + } else if (tagName.equalsIgnoreCase("whatlog") ) { + currentComponent = getComponent(streamReader); + } else if (tagName.equalsIgnoreCase("time")) { + currentElapsedTime = Float.valueOf(getAttribute("elapsed", streamReader)).floatValue(); + if (currentComponent != null) { + TimeEntry timeObject = componentTimeMap.get(currentComponent); + if (timeObject == null) { + timeObject = new TimeEntry(currentElapsedTime, getCurrentFile().toString()); + componentTimeMap.put(currentComponent, timeObject); + } else { + timeObject.addElapsedTime(currentElapsedTime); + } + } + } + } catch (Exception ex) { + log.debug("exception in startelement",ex); + throw ex; + } + return false; + } + + /** + * Checks whether is there any additional entry. During log parsing, all the text which are not part of any tag + * and are part of CDATA are recorded in a list and checked in this function for any matching errors and processed + * for their categorization. + * @return true if there are any element to be added to the database. + */ + public boolean isAdditionalEntry() { + try { + if (!componentTimeMap.isEmpty()) { + Set componentSet = componentTimeMap.keySet(); + for (String component : componentSet) { + + TimeEntry entry = componentTimeMap.get(component); + addEntry("default", component, entry.getFilePath(), -1, + null, entry.getElapsedTime()); + componentTimeMap.remove(component); + return true; + } + } + if (cleanLogFile != null ) { + if (categorizationHandler == null ) { + log.info("initializing categorization handler"); + categorizationHandler = + new CategorizationHandler(cleanLogFile, generalTextEntries); + } + } + if (categorizationHandler != null && categorizationHandler.hasNext()) { + try { + CategoryEntry entry = categorizationHandler.getNext(); + if (entry != null) { + addEntry(entry.getSeverity(), entry.getCategory(), entry.getLogFile(), + entry.getLineNumber(), entry.getText()); + return true; + } + } catch (Exception ex) { + log.debug("Exception during categorization handler", ex); + return false; + } + } + } catch (Exception ex) { + log.debug("Exception in finding additional entry", ex); + } + return false; + } + + /** + * Function to process the end event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ + public boolean endElement(XMLStreamReader streamReader) throws Exception { + try { + String tagName = streamReader.getLocalName(); + if (tagName.equalsIgnoreCase("recipe")) { + recordText = false; + if (logTextInfo != null) { + if (currentComponent == null) { + currentComponent = "general"; + } + boolean entryCreated = findAndAddEntries(logTextInfo, currentComponent, + getCurrentFile().toString(), lineNumber); + logTextInfo = ""; + if ( entryCreated) { + return true; + } + } + } else if (tagName.equalsIgnoreCase("error") + || tagName.equalsIgnoreCase("warning")) { + recordText = false; + if (currentComponent == null) { + currentComponent = "general"; + } + addEntry(tagName, currentComponent, getCurrentFile().toString(), lineNumber, + logTextInfo); + logTextInfo = ""; + return true; + } else if (tagName.equalsIgnoreCase("whatlog") ) { + addEntry("default", currentComponent, getCurrentFile().toString(), -1, + ""); + return true; + } + } catch (Exception ex) { + log.debug("Exception while processing for sbs metadata input", ex); + throw ex; + } + return false; + } +} + +/* This class stores the temporary Time entry which is being recorded for each data + * at the end of the build and during isAdditionalEntry function, the time for the component + * is updated in the database. + */ +class TimeEntry { + + private float elapsedTime; + private String filePath; + + /** + * Constructor to store the elapsedTime and the path which are to be updated to the database. + * @param elapsedTime: time duration of the component. + * @path of the component. + */ + public TimeEntry(float elapsedTime, String path) { + elapsedTime = elapsedTime; + filePath = path; + } + + + /** + * Helper function to add time to the previous elapsed time. + * @param time to be added to the elapsed timet. + */ + public void addElapsedTime(float time) { + elapsedTime += time; + } + + /** + * Helper function to return the elapsed time + * @return elapsed time of this time entry. + */ + public float getElapsedTime() { + return elapsedTime; + } + + /** + * Helper function to return the file path of this entry + * @return path of this time entry. + */ + public String getFilePath() { + return filePath; + } +} +/* This class stores the temporary category entry which is processed during + * at the end of the build and categorized and written to the database. + */ +class CategoryEntry { + + private String text; + private int lineNumber; + private String fileName; + private String severity; + private String category; + + + /** + * Constructor of the category entry + * @param txt - text message of the entry + * @param ctgry - category of the entry + * @param svrty - severity of this entry + * @param lnNo - line number of this entry + * @param flName - name of the file being processed. + * @return path of this time entry. + */ + public CategoryEntry(String txt, String ctgry, + String svrty, int lnNo, String flName) { + text = txt; + lineNumber = lnNo; + fileName = flName; + severity = svrty; + category = "general"; + if (ctgry != null) { + category = ctgry; + } + } + + /** + * Helper function to set the category + * @param set the category + */ + void setCategory(String ctgry) { + category = ctgry; + } + + /** + * Helper function to return the category + * @return the category of this entry. + */ + String getCategory() { + return category; + } + + /** + * Returns the logfile of this entry + * @return logfile of this entry + */ + String getLogFile() { + return fileName; + } + + /** + * Helper function returns the severity of this entry + * @return severity of this entry + */ + String getSeverity() { + return severity; + } + + /** + * Helper function returns the line number of this entry + * @return the line number of this entry. + */ + + int getLineNumber() { + return lineNumber; + } + + /** + * Helper function returns the text message of this entry + * @return text message of this entry + */ + String getText() { + return text; + } + +} + +/* This class handles the categorization of scanlog errors based on the clean log output + * from raptor. + */ + class CategorizationHandler { + + private int count; + + private String currentComponent; + private boolean isInFileTag; + + private HashMap> categoryList; + + private List currentList; + + private XMLInputFactory xmlInputFactory; + + private XMLStreamReader xmlStreamReader; + + private Logger log = Logger.getLogger(CategorizationHandler.class); + + /** + * Constructor + * @param clean log file input using which the CDATA text are categorized + * @param list of entries to be categorized + */ + public CategorizationHandler(File cleanLogFile, HashMap> ctgMap) { + categoryList = ctgMap; + Set categorySet = categoryList.keySet(); + if (cleanLogFile != null ) { + try { + xmlInputFactory = XMLInputFactory.newInstance(); + xmlStreamReader = xmlInputFactory.createXMLStreamReader(cleanLogFile.toString(), + new BufferedInputStream(new FileInputStream(cleanLogFile))); + } catch ( Exception ex) { + log.debug("exception while initializing stax processor",ex); + } + } + } + + /** + * Checks whether is there any entry (by checking for categorization of the recorded CDATA text) + * @return true if there any entry that are being categorized. + */ + public boolean hasNext() { + boolean generalEntriesStatus = categoryList != null && !categoryList.isEmpty(); + boolean currentListStatus = currentList != null && ! currentList.isEmpty(); + return generalEntriesStatus || currentListStatus; + } + + /** + * Process the startelement event of XML Stream from clean log. + * @param streamReader clean log xml stream reader to be processed + * @return true if there are any entry to be added. + */ + public boolean startElement(XMLStreamReader streamReader) throws Exception { + String tagName = streamReader.getLocalName(); + if (tagName.equals("clean")) { + currentComponent = getCategory(streamReader); + if (currentComponent != null) { + currentComponent = SBSLogMetaDataInput.removeDriveAndBldInf(currentComponent); + } + } + if (tagName.equals("file")) { + isInFileTag = true; + } + return false; + } + + /** + * Process the endelement event of XML Stream from clean log. + * @param streamReader clean log xml stream reader to be processed + * @return true if there are any entry to be added. + */ + public boolean endElement (XMLStreamReader streamReader) throws Exception { + String tagName = streamReader.getLocalName(); + if (tagName.equals("file")) { + isInFileTag = false; + } + return false; + } + + /** + * Internal function to find bld inf from the component + * @param streamReader clean log xml stream reader to be processed + * @return the bld.inf attribute. + */ + private String getCategory(XMLStreamReader streamReader) { int count = streamReader.getAttributeCount() ; for (int i = 0 ; i < count ; i++) { if ( streamReader.getAttributeLocalName(i).equals("bldinf") ) { @@ -68,70 +546,110 @@ return null; } - public boolean characters (XMLStreamReader streamReader) { - if (recordText) { - logTextInfo += streamReader.getText(); + /** + * Internal function to find the CDATA text of the file attribute. + * @param streamReader clean log xml stream reader to be processed + * @return the CDATA text of tag. + */ + private String characters(XMLStreamReader xmlStreamReader) { + if (isInFileTag) { + return xmlStreamReader.getText().toLowerCase(); } - return false; + return null; } - - public boolean startElement (XMLStreamReader streamReader) throws Exception { - try { - String tagName = streamReader.getLocalName(); - if (tagName.equalsIgnoreCase("buildlog")) { - log.debug("starting with buildlog"); + + /** + * Gets the entry which matches the input path. For each line of tag attribute, the entry list + * is compared with that and if there is any match, then it returns the entry from the list, which + * is being mtached. + * @param path for which matching entry is looked for. + * @return entry which matched the path from the clean log file. + */ + private List getEntry(String path) { + int index = 0; + Set categorySet = categoryList.keySet(); + for (String key : categorySet) { + if (path.indexOf(key) != -1) { + List entry = categoryList.get(key); + categoryList.remove(key); + return entry; } - if (tagName.equalsIgnoreCase("recipe") ) { - lineNumber = streamReader.getLocation().getLineNumber(); - //log.debug(" startElement: receipe tag"); - recordText = true; - //currentComponent = attributes.getValue("bldinf"); - currentComponent = getComponent(streamReader); - } else if (tagName.equalsIgnoreCase("error") - || tagName.equalsIgnoreCase("warning")) { - lineNumber = streamReader.getLocation().getLineNumber(); - recordText = true; - } else if (tagName.equalsIgnoreCase("whatlog") ) { - currentComponent = getComponent(streamReader); - } - } catch (Exception ex) { - log.debug("exception in startelement",ex); - throw ex; } - return false; + return null; + } + + /** + * Internal function to update the category entries of the list. + * @param categoryList for which the category. + * @param category which is to be updated to the list. + */ + private void updateCategoryEntries(List categoryList, String category) { + for (CategoryEntry entry : categoryList) { + entry.setCategory(category); + } } - - public boolean endElement(XMLStreamReader streamReader) throws Exception { + /** + * Gets the next entry from the stream based on categorization. + * @return the category entry which is identified as categorized entry. + */ + public CategoryEntry getNext() throws Exception { try { - String tagName = streamReader.getLocalName(); - if (tagName.equalsIgnoreCase("recipe")) { - recordText = false; - if (logTextInfo != null) { - //log.debug("endElement: lineNumber: " + lineNumber); - boolean entryCreated = findAndAddEntries(logTextInfo, currentComponent, - getCurrentFile().toString(), lineNumber); - logTextInfo = ""; - if ( entryCreated) { - //log.debug("Entry creating end element"); - return true; + boolean entryCreated = false; + if (currentList != null && !currentList.isEmpty()) { + CategoryEntry entry = currentList.get(0); + currentList.remove(0); + return entry; + } + if (xmlStreamReader != null ) { + while (xmlStreamReader.hasNext()) { + int eventType = xmlStreamReader.next(); + switch (eventType) { + case XMLEvent.START_ELEMENT: + entryCreated = startElement(xmlStreamReader); + break; + case XMLEvent.END_ELEMENT: + endElement(xmlStreamReader); + break; + case XMLEvent.CHARACTERS: + String path = characters(xmlStreamReader); + if (path != null ) { + currentList = getEntry(path); + if (currentList != null && !currentList.isEmpty()) { + if (currentComponent != null) { + updateCategoryEntries(currentList, currentComponent); + CategoryEntry entry = (CategoryEntry)currentList.remove(0); + return entry; + } + } + } + break; + default: + break; } } - } else if (tagName.equalsIgnoreCase("error") - || tagName.equalsIgnoreCase("warning")) { - recordText = false; - addEntry(tagName, "general", getCurrentFile().toString(), lineNumber, - logTextInfo); - logTextInfo = ""; - return true; - } else if (tagName.equalsIgnoreCase("whatlog") ) { - addEntry("default", currentComponent, getCurrentFile().toString(), -1, - ""); + if (xmlStreamReader != null) { + close(); + } + } + } catch ( Exception ex) { + log.debug("exception in categorization",ex); + throw ex; + } + return null; + } + /** + * Internal function to close the clean log file stream + */ + private void close() { + try { + if (xmlStreamReader != null) { + xmlStreamReader.close(); + xmlStreamReader = null; } } catch (Exception ex) { - log.debug("Exception while processing for sbs metadata input", ex); - throw ex; + log.debug("exception while closing xml stream",ex); } - return false; + } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/TextLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/TextLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/TextLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -43,87 +43,95 @@ private Logger log = Logger.getLogger(TextLogMetaDataInput.class); - private int currentFileIndex; - private int lineNumber; private BufferedReader currentReader; + /** + * Constructor + */ public TextLogMetaDataInput() { } - - protected void setCurrentFileIndex(int fileIndex) { - currentFileIndex = fileIndex; - } + /** + * Helper function to set the line number + * @param lineNo to be set for the entry + */ protected void setLineNumber(int lineNo) { lineNumber = lineNo; } - protected int getCurrentFileIndex() { - return currentFileIndex; - } - + /** + * Helper function to return the line number of this entry. + * @return line number of the entry. + */ protected int getLineNumber() { return lineNumber; } + /** + * Helper function to set the reader of this stream + * @param reader to process the stream. + */ protected void setCurrentReader(BufferedReader reader) { currentReader = reader; } - + + /** + * Function to check if is there any additionaly entry. This is being used for example during streaming + * recorded and at the end of streaming use the recorded data to add any additional entry. Used by + * @return true if there are any additional entries which are to be recorded in the database. + */ + public boolean isAdditionalEntry() { + return false; + } + + /** + * Helper function to return the lbuffer reader for the current meta data input + * @return buffer reader object for the current metadata input. + */ protected BufferedReader getCurrentReader() { return currentReader; } - public boolean isEntryAvailable() throws Exception { - //Todo: Optimize the function so the most of the statements could - //be reused with other metadata input. + public boolean isEntryCreated(File currentFile) throws Exception { String exceptions = ""; - //log.debug("Getting next set of log entries for Text Input"); - //log.debug("currentFileIndex" + currentFileIndex); - List fileList = getFileList(); - //log.debug("is filelist empty" + fileList.isEmpty()); - int fileListSize = fileList.size(); - while (currentFileIndex < fileListSize) { - try { - //log.debug("currentfileindex while getting file name: " + currentFileIndex); - File currentFile = fileList.get(currentFileIndex); - if (currentReader == null) { - lineNumber = 0; - log.debug("Current Text log file name:" + currentFile); - log.info("Processing file: " + currentFile); - currentReader = new BufferedReader(new FileReader(currentFile)); + try { + if (currentReader == null) { + lineNumber = 0; + log.debug("Current Text log file name:" + currentFile); + log.debug("Processing file: " + currentFile); + currentReader = new BufferedReader(new FileReader(currentFile)); + } + String logText = null; + while ((logText = currentReader.readLine()) != null) { + logText = logText.replaceFirst("^[ ]*\\[.+?\\][ ]*", ""); + String severity = getSeverity(logText); + if (severity != null) { + addEntry(severity, currentFile.getName(), currentFile.toString(), + lineNumber, logText ); + lineNumber ++; + return true; } - String logText = null; - while ((logText = currentReader.readLine()) != null) { - //log.debug("logtext : " + logText + " line-number: " + lineNumber); - //log.debug("logtext : " + logText + " line-number: " + lineNumber); - logText = logText.replaceFirst("^[ ]*\\[.+?\\][ ]*", ""); - String severity = getSeverity(logText); - if (severity != null) { -// log.debug("severity:" + severity); -// log.debug("currentFile:" + currentFile); -// log.debug("lineNumber:" + lineNumber); -// log.debug("logText:" + logText); - - addEntry(severity, currentFile.getName(), currentFile.toString(), - lineNumber, logText ); - lineNumber ++; - return true; - } - lineNumber ++; - } + } + currentReader.close(); + currentReader = null; + if (isAdditionalEntry()) { + return true; + } + } catch (Exception ex) { + log.debug("Exception in TextLogMetadata", ex); + try { currentReader.close(); currentReader = null; - currentFileIndex ++; - } catch (Exception ex) { - log.debug("Exception in TextLogMetadata", ex); + } catch (Exception ex1) { + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception in TextLogMetadata", ex1); try { currentReader.close(); } catch ( IOException iex) { - log.info("exception in closing reader"); + // We are Ignoring the errors as no need to fail the build. log.debug("Exception in closing reader", iex); } currentReader = null; diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/XMLLogMetaDataInput.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/XMLLogMetaDataInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/ant/types/XMLLogMetaDataInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -19,12 +19,9 @@ import java.io.*; import java.util.*; -//import javax.xml.parsers.SAXParser; -//import javax.xml.parsers.SAXParserFactory; import org.apache.log4j.Logger; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamReader; -//import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.XMLEvent; @@ -40,20 +37,28 @@ private XMLStreamReader xmlStreamReader; - private int currentFileIndex; + private boolean inParsing; + + /** + * Constructor + */ public XMLLogMetaDataInput() { try { + inParsing = true; xmlInputFactory = XMLInputFactory.newInstance(); xmlInputFactory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES,Boolean.TRUE); xmlInputFactory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES,Boolean.FALSE); xmlInputFactory.setProperty(XMLInputFactory.IS_COALESCING , Boolean.TRUE); } catch (Exception ex) { - ex.printStackTrace(); + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception while initializing stax processing",ex); } } - + /** + * Closes the xml stream + */ private void close() { try { if (xmlStreamReader != null) { @@ -61,106 +66,83 @@ xmlStreamReader = null; } } catch (Exception ex) { - log.info("Exception whil closing xml stream" + ex.getMessage()); - log.debug("exception while closing xml stream",ex); + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception while closing xml stream", ex); } } - - protected File getCurrentFile() { - List fileList = getFileList(); - return fileList.get(currentFileIndex); - } - - boolean isEntryAvailable() throws Exception { - //log.debug("Getting next set of log entries for xml Input"); - //log.debug("currentFileIndex" + currentFileIndex); - int fileListSize = getFileList().size(); - //log.debug("fileList.size" + fileListSize); - //if ( isEntryCreatedForRemainingText() ) { - // log.debug("Entry creating from remaining text"); - // return true; - //} - try { - while (currentFileIndex < fileListSize) { - boolean entryCreated = false; - File currentFile = getCurrentFile(); - if (xmlStreamReader == null) { - log.info("Processing file: " + currentFile); - xmlStreamReader = xmlInputFactory.createXMLStreamReader( - currentFile.toString(), new BufferedInputStream(new FileInputStream(currentFile))); - //First the START_DOCUMENT is the first event directly pointed to. - } - int eventType = xmlStreamReader.getEventType(); - while (xmlStreamReader.hasNext()) { - eventType = xmlStreamReader.next(); - switch (eventType) { - case XMLEvent.START_ELEMENT: - //log.debug("XMLEvent:START_ELEMENT"); - entryCreated = startElement(xmlStreamReader); - break; - case XMLEvent.END_ELEMENT: - //log.debug("XMLEvent:END_ELEMENT"); - entryCreated = endElement(xmlStreamReader); - //log.debug("XMLEvent:END_ELEMENT: entryCreated: " +entryCreated); - break; - case XMLEvent.PROCESSING_INSTRUCTION: - //log.debug("XMLEvent:PI_DATA"); - //printPIData(xmlr); - break; - case XMLEvent.CHARACTERS: - //log.debug("XMLEvent:chacacters"); - entryCreated = characters(xmlStreamReader); - break; - case XMLEvent.COMMENT: - log.debug("XMLEvent:COMMENT"); - break; - case XMLEvent.START_DOCUMENT: - log.debug("XMLEvent:START_DOCUMENT"); - break; - case XMLEvent.END_DOCUMENT: - log.debug("XMLEvent:END_DOCUMENT"); - break; - case XMLEvent.ENTITY_REFERENCE: - log.debug("XMLEvent:ENTITY_REFERENCE"); - break; - case XMLEvent.ATTRIBUTE: - log.debug("XMLEvent:ATTRIBUTE"); - break; - case XMLEvent.DTD: - log.debug("XMLEvent:DTD"); - break; - case XMLEvent.CDATA: - log.debug("XMLEvent:CDATA"); - break; - case XMLEvent.SPACE: - log.debug("XMLEvent:chacacters"); - break; - default: - break; - } - if ( entryCreated) { - return true; - } + /** + * Function to check from the input stream if is there any entries available. + * @param file for which the contents needs to be parsed for errors + * @return true if there are any entry available otherwise false. + */ + boolean isEntryCreated(File currentFile) throws Exception { + boolean entryCreated = false; + if (inParsing ) { + if (xmlStreamReader == null) { + log.debug("Processing file: " + currentFile); + xmlStreamReader = xmlInputFactory.createXMLStreamReader( + currentFile.toString(), new BufferedInputStream(new FileInputStream(currentFile))); + } + int eventType = xmlStreamReader.getEventType(); + while (xmlStreamReader.hasNext()) { + eventType = xmlStreamReader.next(); + switch (eventType) { + case XMLEvent.START_ELEMENT: + entryCreated = startElement(xmlStreamReader); + break; + case XMLEvent.END_ELEMENT: + entryCreated = endElement(xmlStreamReader); + break; + case XMLEvent.CHARACTERS: + entryCreated = characters(xmlStreamReader); + break; + default: + break; } - if (xmlStreamReader != null) { - close(); + if ( entryCreated) { + return true; } - currentFileIndex ++; } - } catch (Exception ex1 ) { - log.info("Exception processing xml stream: " + ex1.getMessage()); - log.debug("exception while parsing the stream", ex1); - close(); + if (xmlStreamReader != null) { + close(); + } + inParsing = false; + } + if (isAdditionalEntry()) { + return true; } return false; } - + /** + * Function to check if is there any additionaly entry. This is being used for example during streaming + * recorded and at the end of streaming use the recorded data to add any additional entry. Used by + * @return true if there are any additional entries which are to be recorded in the database. + */ + public boolean isAdditionalEntry() { + return false; + } + + /** + * Function implemented by the subclasses to process the start event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ abstract boolean startElement (XMLStreamReader streamReader) throws Exception ; + /** + * Function implemented by the subclasses to process the end event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ abstract boolean endElement(XMLStreamReader streamReader) throws Exception; - + + /** + * Function implemented by the subclasses to process the characters event of xml stream callback. + * @param streamReader: the input stream reader which contains the xml data to be parsed for recording data. + * @return true if there are any element to be added to the database. + */ abstract boolean characters (XMLStreamReader streamReader); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/db/MetadataDb.java --- a/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/db/MetadataDb.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/com/nokia/helium/metadata/db/MetadataDb.java Wed Dec 23 19:29:07 2009 +0200 @@ -24,10 +24,10 @@ import java.sql.Statement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; -//import java.sql.DatabaseMetaData; + import org.apache.log4j.Logger; import org.apache.tools.ant.BuildException; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.ArrayList; import java.util.Map; @@ -45,19 +45,23 @@ private static final int LOG_ENTRY_CACHE_LIMIT = 500; - //private static final int RECORD_LIMIT recordLimit = 5000; - + private static final int DB_SCHEMA_VERSION = 1; + + private static final String[] INIT_TABLES = { + "CREATE TABLE schema (version INTEGER default " + DB_SCHEMA_VERSION + ")", "CREATE TABLE metadata (priority_id INTEGER, component_id INTEGER, line_number INTEGER, data TEXT, logpath_id INTEGER)", "CREATE TABLE component (id INTEGER PRIMARY KEY,component TEXT, logPath_id INTEGER, UNIQUE (logPath_id,component))", "CREATE TABLE priority (id INTEGER PRIMARY KEY,priority TEXT)", - "CREATE TABLE logfiles (id INTEGER PRIMARY KEY, path TEXT)" + "CREATE TABLE logfiles (id INTEGER PRIMARY KEY, path TEXT)", + "CREATE TABLE componenttime (cid INTEGER PRIMARY KEY, time DOUBLE default 0, UNIQUE (cid))" }; private static final String INSERT_METADATA_ENTRY = "INSERT INTO metadata VALUES(?, ?, ?, ?, ?)"; private static final String INSERT_LOGENTRY = "INSERT or IGNORE INTO logfiles VALUES(?, ?)"; private static final String INSERT_PRIORITYENTRY = "INSERT INTO priority VALUES(?, ?)"; - private static final String INSERT_COMPONENTENTRY = "INSERT or IGNORE INTO component VALUES((SELECT max(id) FROM component)+1, ?, ?) "; + private static final String INSERT_COMPONENTENTRY = "INSERT or IGNORE INTO component VALUES(?, ?, ?) ";; + private static final String INSERT_COMPONENT_TIME = "INSERT or IGNORE INTO componenttime VALUES(?, ?)"; private String dbPath; @@ -72,6 +76,7 @@ private PreparedStatement insertMetaDataEntryStmt; private PreparedStatement insertLogEntryStmt; private PreparedStatement insertComponentStmt; + private PreparedStatement insertComponentTimeStmt; private int entryCacheSize; @@ -90,18 +95,47 @@ } catch (java.lang.ClassNotFoundException e) { - log.debug("No JDBC Driver found"); throw new BuildException("JDBC Driver could not be found"); } synchronized (MetaDataDb.class) { // See if the database needs to be initialized boolean initializeDatabase = false; - if (!new File(dbPath).exists()) + File dbFile = new File(dbPath); + if (!dbFile.exists()) { initializeDatabase = true; + } else { + try { + log.debug("checking for schema version of db"); + initializeConnection(); + Statement stmt = connection.createStatement(); + ResultSet rs = stmt.executeQuery("select version from schema"); + int version = -1; + if ( rs.next()) { + version = rs.getInt(1); + } + rs.close(); + stmt.close(); + log.debug("schema version of db:" + version); + if (version != DB_SCHEMA_VERSION) { + log.debug("Schema Not matched deleting db file"); + dbFile.delete(); + initializeDatabase = true; + } + finalizeConnection(); + } catch (SQLException ex) { + try { + finalizeConnection(); + } catch (SQLException ex1) { + throw new BuildException("Exception while finalizing Metadata database. ", ex1); + } + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception checking schema for db", ex); + dbFile.delete(); + initializeDatabase = true; + } } - try { initializeConnection(); @@ -123,8 +157,8 @@ statement.addBatch("INSERT INTO priority (priority) VALUES (\"" + priorityValues[i] + "\")"); } + statement.addBatch("INSERT INTO schema (version) VALUES (\"" + DB_SCHEMA_VERSION + " \")"); statement.addBatch("create unique index logfile_unique_1 on logfiles (path)"); - //statement.addBatch("create unique index component_unique_1 on component (component)"); int[] returnCodes = statement.executeBatch(); connection.commit(); @@ -135,8 +169,7 @@ } catch (SQLException e) { - log.debug("problem initializing database",e); - throw new BuildException("Problem initializing database"); + throw new BuildException("Problem while initializing Metadata database. ", e); } } } @@ -146,7 +179,7 @@ { // The values assigned to these enums should match the // automatically assigned values created in the database table - FATAL(1), ERROR(2), WARNING(3), INFO(4), REMARK(5), DEFAULT(6); + FATAL(1), ERROR(2), WARNING(3), INFO(4), REMARK(5), DEFAULT(6), CRITICAL(7); private final int value; Priority(int value) { @@ -163,6 +196,11 @@ }; + /** + * Helper class to store the log entry , used to write to the database + * + * @param databasePath The path to the database + */ public static class LogEntry { private String text; @@ -175,18 +213,36 @@ private String logPath; + private float elapsedTime; + + /** + * Constructor for the helper class + */ public LogEntry(String text, Priority priority, String component, - String logPath, int lineNumber) + String logPath, int lineNumber, float time) { this.text = text; this.priority = priority; this.component = component; this.lineNumber = lineNumber; this.logPath = logPath; + this.elapsedTime = time; } + /** + * Constructor for the helper class + */ + public LogEntry(String text, Priority priority, String component, + String logPath, int lineNumber) + { + this(text, priority, component, logPath, lineNumber, -1); + } + + /** + * Constructor for the helper class + */ public LogEntry(String text, String priorityTxt, String component, String logPath, - int lineNumber) throws Exception + int lineNumber, float time) throws Exception { Priority prty = null; String prtyText = priorityTxt.trim().toLowerCase(); @@ -202,6 +258,8 @@ prty = Priority.REMARK; } else if (prtyText.equals("default")) { prty = Priority.DEFAULT; + } else if (prtyText.equals("critical")) { + prty = Priority.CRITICAL; } else { log.debug("Error: priority " + prtyText + " is not acceptable by metadata and set to Error"); prty = Priority.ERROR; @@ -214,8 +272,23 @@ this.component = component; this.lineNumber = lineNumber; + this.elapsedTime = time; } - + + /** + * Constructor for the helper class + */ + public LogEntry(String text, String priorityTxt, String component, String logPath, + int lineNumber) throws Exception + { + this(text, priorityTxt, component, logPath, lineNumber, -1); + } + + /** + * Helper function to return to getLogPath + * @ + */ + public String getLogPath() { return logPath; @@ -241,6 +314,10 @@ { return priority; } + + public double getElapsedTime() { + return elapsedTime; + } public void setPriority(Priority priority) { @@ -268,7 +345,6 @@ public void finalizeStatements() throws SQLException { if (statementsInitialized) { if ( entryCacheSize > 0) { - //log.debug("writing to database"); entryCacheSize = 0; writeLogDataToDB(); } @@ -291,8 +367,8 @@ finalizeConnection(); } } catch (SQLException ex) { - log.debug("exception while finalizing the db", ex); - //throw ex; + // We are Ignoring the errors as no need to fail the build. + log.debug("Exception while finalizing the Metadata database. ", ex); } } @@ -302,8 +378,7 @@ */ //Note: Always the query should be in "/" format only public Map> getIndexMap(String query) { - Map> indexMap = new HashMap>(); - log.debug("sql query" + query); + Map> indexMap = new LinkedHashMap>(); try { initializeConnection(); Statement stmt = connection.createStatement(); @@ -319,12 +394,13 @@ int type = rsmd.getColumnType(i); if (type == java.sql.Types.INTEGER ) { data = "" + rs.getInt(i); + } else if (type == java.sql.Types.DOUBLE ) { + data = "" + rs.getDouble(i); } else { data = rs.getString(i); } - log.debug("data:" + data); if ( i == 1) { - key = data; + key = data; } else { dataList.add(data); } @@ -335,8 +411,8 @@ stmt.close(); finalizeConnection(); } catch (Exception ex) { + // We are Ignoring the errors as no need to fail the build. log.debug("Warning: Exception while getting the index map", ex); - //throw ex; } return indexMap; } @@ -344,7 +420,6 @@ //Note: Always the query should be in "/" format only public List> getRecords(String query) { List> rowList = new ArrayList>(); - log.debug("sql query" + query); try { initializeConnection(); Statement stmt = connection.createStatement(); @@ -355,26 +430,19 @@ List columnNames = new ArrayList(); for (int i = 1; i <= numberOfColumns; i++) { columnNames.add(rsmd.getColumnName(i)); - log.debug("columnName:" + rsmd.getColumnName(i)); } - - log.debug("resultSet MetaData column Count=" + numberOfColumns); if (rs.isBeforeFirst()) { while (rs.next()) { - Map recordMap = new HashMap(); - log.debug("adding records"); + Map recordMap = new LinkedHashMap(); for (int i = 1; i <= numberOfColumns; i++) { int type = rsmd.getColumnType(i); String columnName = columnNames.get(i - 1); - //log.debug("columnName:" + columnName); if (type == java.sql.Types.INTEGER ) { Integer data = new Integer(rs.getInt(i)); recordMap.put(columnName, data); - log.debug("data:" + data); } else { String data = rs.getString(i); recordMap.put(columnName, data ); - log.debug("data:" + data); } } rowList.add(recordMap); @@ -383,8 +451,8 @@ stmt.close(); finalizeConnection(); } catch (Exception ex) { - log.debug("Warning: Exception while getting the record details", ex); - //throw ex; + // We are Ignoring the errors as no need to fail the build. + log.warn("Warning: Exception while getting the record details", ex); } return rowList; } @@ -409,7 +477,6 @@ private void updateIndexTables(LogEntry entry) throws SQLException { connection.setAutoCommit(false); - //log.debug("updating logpath: " + entry.getLogPath()); insertLogEntryStmt.setNull(1, 4); insertLogEntryStmt.setString(2, entry.getLogPath()); insertLogEntryStmt.addBatch(); @@ -418,7 +485,6 @@ readConnection = DriverManager.getConnection(url); readConnection.setAutoCommit(false); Statement stmt = readConnection.createStatement(); - log.debug("exiting synchronization---2"); ResultSet rs = stmt.executeQuery("select id from logfiles where path='" + entry.getLogPath().trim() + "'"); int logPathId = 0; @@ -427,7 +493,6 @@ } stmt.close(); readConnection.close(); - log.debug("exiting synchronization---5"); insertComponentStmt.setNull(1, 4); insertComponentStmt.setString(2, entry.getComponent()); insertComponentStmt.setInt(3, logPathId); @@ -452,27 +517,49 @@ stmt.close(); } + public void removeEntries(List logPathList) throws Exception { + initializeConnection(); + Statement stmt = connection.createStatement(); + for (String logPath : logPathList) { + log.debug("logpath for delete: " + logPath); + log.debug("logpath delete query1 " + "DELETE FROM metadata WHERE logpath_id IN (SELECT id from logfiles WHERE path like '" + logPath + "')"); + stmt.executeUpdate("DELETE FROM metadata WHERE logpath_id IN (SELECT id from logfiles WHERE path like '%" + logPath + "%')"); + log.debug("logpath for delete2: " + "DELETE FROM component_time WHERE cid IN (select id from component where logpath_id in (select id from logfiles where path like '%" + logPath + "%'))"); + stmt.executeUpdate("DELETE FROM componenttime WHERE cid IN (select id from component where logpath_id in (select id from logfiles where path like '%" + logPath + "%'))"); + log.debug("logpath for delete3: " + "DELETE FROM component WHERE logpath_id IN (select id from logfiles where path like '%" + logPath + "%')"); + stmt.executeUpdate("DELETE FROM component WHERE logpath_id IN (select id from logfiles where path like '%" + logPath + "%')"); + log.debug("logpath for delete: " + "DELETE FROM logfiles WHERE path like ('%" + logPath + "%')"); + stmt.executeUpdate("DELETE FROM logfiles WHERE path like ('%" + logPath + "%')"); + } + stmt.close(); + finalizeConnection(); + } + public void addLogEntry(LogEntry entry) throws Exception { synchronized (MetaDataDb.class) { try { if (!statementsInitialized) { - log.debug("initializing statements for JDBC"); + log.debug("Initializing statements for JDBC"); initializeConnection(); - insertMetaDataEntryStmt = connection.prepareStatement("INSERT INTO metadata VALUES(?, ?, ?, ?, ?)"); - insertLogEntryStmt = connection.prepareStatement("INSERT or IGNORE INTO logfiles VALUES(?, ?)"); - insertComponentStmt = connection.prepareStatement("INSERT or IGNORE INTO component VALUES(?, ?, ?) "); + insertMetaDataEntryStmt = connection.prepareStatement(INSERT_METADATA_ENTRY); + insertLogEntryStmt = connection.prepareStatement(INSERT_LOGENTRY); + insertComponentStmt = connection.prepareStatement(INSERT_COMPONENTENTRY); + insertComponentTimeStmt = connection.prepareStatement(INSERT_COMPONENT_TIME); statementsInitialized = true; } - log.debug("MetaDataDB:entry:priority: " + entry.getPriority()); connection.setAutoCommit(false); updateIndexTables(entry); - if ( entry.getPriority() != Priority.DEFAULT) { + double time = entry.getElapsedTime(); + int logPathId = 0; + int componentId = 0; + Statement stmt = null; + ResultSet rs = null; + if ((time != -1) || entry.getPriority() != Priority.DEFAULT) { readConnection = DriverManager.getConnection(url); - Statement stmt = readConnection.createStatement(); - ResultSet rs = stmt.executeQuery("select id from logfiles where path='" + + stmt = readConnection.createStatement(); + rs = stmt.executeQuery("select id from logfiles where path='" + entry.getLogPath().trim() + "'"); - int logPathId = 0; if ( rs.next()) { logPathId = rs.getInt(1); } @@ -482,13 +569,27 @@ stmt = readConnection.createStatement(); rs = stmt.executeQuery("select id from component where component='" + entry.getComponent() + "' and logpath_id='" + logPathId + "'"); - int componentId = 0; if ( rs.next()) { componentId = rs.getInt(1); } rs.close(); stmt.close(); + } + if (time != -1) { + connection.setAutoCommit(false); + insertComponentTimeStmt.setInt(1, componentId); + insertComponentTimeStmt.setDouble(2, 0); + insertComponentTimeStmt.addBatch(); + insertComponentTimeStmt.executeBatch(); + connection.commit(); + insertComponentTimeStmt.clearBatch(); + stmt = readConnection.createStatement(); + stmt.executeUpdate("UPDATE componenttime SET time= (time + " + time + + ") WHERE cid = " + componentId ); + stmt.close(); readConnection.close(); + } + if ( entry.getPriority() != Priority.DEFAULT) { insertMetaDataEntryStmt.setInt(1, entry.getPriority().getValue()); insertMetaDataEntryStmt.setInt(2, componentId); insertMetaDataEntryStmt.setInt(3, entry.getLineNumber()); @@ -496,14 +597,14 @@ insertMetaDataEntryStmt.addBatch(); entryCacheSize ++; if (entryCacheSize >= LOG_ENTRY_CACHE_LIMIT) { - log.debug("writing data to database"); writeLogDataToDB(); entryCacheSize = 0; } } } catch (SQLException ex) { - log.debug(" Exception while writing the record"); - throw ex; + throw new BuildException("Exception while writing the records into Metadata DB", ex); + } catch (Exception ex1) { + throw new BuildException("Exception while writing the records into Metadata DB", ex1); } } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/templates/general_category.txt.ftl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/templates/general_category.txt.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,36 @@ +<#-- +============================================================================ +Name : summary.html.ftl +Part of : Helium + +Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +All rights reserved. +This component and the accompanying materials are made available +under the terms of the License "Eclipse Public License v1.0" +which accompanies this distribution, and is available +at the URL "http://www.eclipse.org/legal/epl-v10.html". + +Initial Contributors: +Nokia Corporation - initial contribution. + +Contributors: + +Description: + +============================================================================ +--> + + <#assign table_info = pp.loadData('com.nokia.helium.metadata.SQLFMPPLoader', + "${dbPath}") > + + + +<#assign logpath_table = table_info['select * from logfiles'] > +<#assign logpath_id = logpath_table?keys> +<#list logpath_id as logpath> + <#assign component_table = table_info['select id, component from component where logPath_id=${logpath}'] > + <#assign component_ids = component_table?keys?sort > + <#list component_ids as component_id > + component : ${component_table['${component_id}']} : logfile : ${logpath_table['${logpath}']} + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/src/templates/summary.html.ftl --- a/buildframework/helium/external/helium-antlib/metadata/src/templates/summary.html.ftl Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/src/templates/summary.html.ftl Wed Dec 23 19:29:07 2009 +0200 @@ -97,27 +97,29 @@ <@helium_logger_node_content nodeid="${helium_node_id}"> <#list component_ids as component_id> - <#assign helium_node_id = helium_node_id + 1> - <@helium_logger_node_head nodeid="${helium_node_id}" title="${component_table['${component_id}']}"> + <#if "${component_table['${component_id}']}" != "general" > + <#assign helium_node_id = helium_node_id + 1> + <@helium_logger_node_head nodeid="${helium_node_id}" title="${component_table['${component_id}']}"> + <#list priority_ids as priority> + <#assign priority_text = "${priority_table['${priority}']}"?lower_case> + <#assign priority_count = "${table_info['select count(data) as COUNT from metadata where logpath_id=${logpath} and priority_id = ${priority} and component_id = ${component_id}'][0]['COUNT']}" > + <@logfile_severity "${component_table['${component_id}']}", "${priority_text}", + "${priority_count}", + "${helium_node_id}" /> + + + <@helium_logger_node_content nodeid="${helium_node_id}"> <#list priority_ids as priority> - <#assign priority_text = "${priority_table['${priority}']}"?lower_case> - <#assign priority_count = "${table_info['select count(data) as COUNT from metadata where logpath_id=${logpath} and priority_id = ${priority} and component_id = ${component_id}'][0]['COUNT']}" > - <@logfile_severity "${component_table['${component_id}']}", "${priority_text}", - "${priority_count}", - "${helium_node_id}" /> + <#list table_info['select * from metadata where logpath_id = ${logpath} and priority_id = ${priority} and component_id = ${component_id}'] as recordentry > + <#-- <#if sublog?node_name == "logfile"> --> + <@logfile_entry_detail recordentry, "${helium_node_id}" /> + <#-- <#elseif sublog?node_name == "log"> + <@antlognode sublog/> + --> + - - <@helium_logger_node_content nodeid="${helium_node_id}"> - <#list priority_ids as priority> - <#list table_info['select * from metadata where logpath_id = ${logpath} and priority_id = ${priority} and component_id = ${component_id}'] as recordentry > - <#-- <#if sublog?node_name == "logfile"> --> - <@logfile_entry_detail recordentry, "${helium_node_id}" /> - <#-- <#elseif sublog?node_name == "log"> - <@antlognode sublog/> - --> - - - + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/build.xml --- a/buildframework/helium/external/helium-antlib/metadata/tests/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -22,7 +22,8 @@ --> Helium Antlib metadata tests. - + + @@ -33,9 +34,6 @@ - - - - + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/data/categorization.log --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/data/categorization.log Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,45 @@ + + + + +make: *** [X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/urel/srchuiresultview.o] Error 1 + +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.def" +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.dso" +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin_udeb_objects.via +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb +X:/epoc32/release/armv5/udeb +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5.product/udeb + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/data/categorization_clean.log --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/data/categorization_clean.log Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,33 @@ + + + +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.def" +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.dso" +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin_udeb_objects.via +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb +X:/epoc32/release/armv5/udeb +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5.product/udeb + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/data/noerror.log --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/data/noerror.log Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,3 @@ + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/data/parser_error.log --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/data/parser_error.log Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,46 @@ + + + + +make: *** [X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/urel/srchuiresultview.o] Error 1 + +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.def" +"X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin{000a0000}.dso" +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresviewplugin_udeb_objects.via +X:/epoc32/release/armv5/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o.d +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultview.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultcontainer.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/srchuiresultmodel.o +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb/proxy.o +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.sym +X:/epoc32/release/armv5.product/udeb/srchuiresviewplugin.dll.map +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5 +X:/epoc32/build/organizer/c_aff6bae36b1df6c4/srchuiresviewplugin_dll/armv5/udeb +X:/epoc32/release/armv5/udeb +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5/lib +X:/epoc32/release/armv5.product/udeb + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/functionality/func_add_test.ant.xml --- a/buildframework/helium/external/helium-antlib/metadata/tests/functionality/func_add_test.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/functionality/func_add_test.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -160,6 +160,26 @@ + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/metadata/tests/functionality/test_regex.ant.xml --- a/buildframework/helium/external/helium-antlib/metadata/tests/functionality/test_regex.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/metadata/tests/functionality/test_regex.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -41,6 +41,102 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + macro: ${project.dir}/../../src/templates/macro + + + dbPath: ${project.dir}/../output/log-entry-for-no-error.sqlite + ant: antProperties() + + + + + + Helium antlib quality tests. + + + - - - - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/bld.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/bld.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,60 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) + +REM Configure Ant +if not defined TESTED_ANT_HOME ( +set TESTED_ANT_HOME=C:\Apps\ant_1.7 +) +if exist %TESTED_ANT_HOME% (set ANT_HOME=%TESTED_ANT_HOME%) + +REM Configure the expected Ant Version details below +SET expMajorVer=1 +SET expMinorVer=7 + +rem *** Verify Ant Version *** +rem -- Run the 'ant -version' command and capture the output to a variable +for /f "tokens=*" %%a in ('ant -version') do (set antversion=%%a) +echo *** Installed Version : %antversion% + +rem -- Parse the version string obtained above and get the version number +for /f "tokens=4 delims= " %%a in ("%antversion%") do set val=%%a +rem -- Parse the version number delimiting the '.' and set the major and +rem minor versions +for /f "tokens=1-2 delims=." %%a in ("%val%") do ( +set /A majorVersion=%%a +set /A minorVersion=%%b +) +rem -- Check whether major version is greater than or equal to the expected. +if %majorVersion% geq %expMajorVer% ( +rem -- if major version is valid, check minor version. If minor version is less +rem than expected display message and abort the execution. +if %minorVersion% lss %expMinorVer% (echo *** Incorrect version of Ant found. Please check you have atleast Ant 1.7.0 & goto :errorstop ) +) + +ant %* +endlocal + +:errorstop +@echo *** Build aborted with error +exit /b 1 \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,37 @@ + + + + Helium Antlib sbs build file. + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SAXSysdefParser.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SAXSysdefParser.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,94 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.sbs; + +import org.dom4j.Document; +import org.apache.tools.ant.BuildException; +import org.dom4j.Element; +import org.dom4j.Attribute; +import org.dom4j.ElementPath; +import org.dom4j.ElementHandler; +import org.dom4j.io.SAXReader; +import org.apache.log4j.Logger; + +import java.io.*; +import java.util.*; + +/** + * Parses the sysdef config file and extracts the available configurations + */ +public class SAXSysdefParser { + private File sysdefFile; + private String configs = ""; + private List layers; + private boolean initialized; + private Logger log = Logger.getLogger(SAXSysdefParser.class); + + /** + * Constructor + * @param fileName - name of the sysdef file to parse + */ + public SAXSysdefParser(File fileName) { + + sysdefFile = fileName; + } + + public List getLayers() { + if (!initialized ) { + initialized = true; + parseConfig("layer"); + if (layers == null) { + throw new BuildException("No layers found from sysdef"); + } + } + return layers; + } + + /** + * Constructor + * @return list of available configurations that can be built. + */ + public void parseConfig(String nodeToGet) { + layers = new ArrayList(); + SAXReader reader = new SAXReader(); + reader.addHandler( "/SystemDefinition/systemModel/" + nodeToGet, + new ElementHandler() { + public void onStart(ElementPath path) { + } + public void onEnd(ElementPath path) { + Element row = path.getCurrent(); + Iterator itr = row.attributeIterator(); + while (itr.hasNext()) + { + Attribute child = (Attribute) itr.next(); + String attrName = child.getQualifiedName(); + if (attrName.equals("name")) { + layers.add((String)child.getValue()); + } + } + row.detach(); + } + } + ); + try { + Document doc = reader.read(sysdefFile); + } catch (Exception e) { + e.printStackTrace(); + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SBSCommandBase.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SBSCommandBase.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,83 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sbs; + +import java.io.File; +import java.util.HashMap; + +import com.nokia.helium.core.plexus.CommandBase; + +/** + * Simple SBS wrapper based on the CommandBase class. + * + */ +public class SBSCommandBase extends CommandBase { + + private File workingDir; + + private String cleanLogPath; + + /** + * @return sbs. + */ + @Override + protected String getExecutable() { + return "sbs"; + } + + /** + * {@inheritDoc} + */ + @Override + protected void throwException(String message, Throwable t) throws SBSException { + throw new SBSException(message, t); + } + + /** + * Set the working directory where emake should be called. + * @param workingDir the working directory. + */ + public void setWorkingDir(File workingDir) { + this.workingDir = workingDir; + } + + /** + * Get the workingDir defined by the user. + * @return the working dir. + */ + @Override + public File getWorkingDir() { + return workingDir; + } + + public void setCleanLogFilePath(String path) { + cleanLogPath = path; + } + + /** + * Executes the command using as argline, instead of argument. + * @param argLine, argline to execute. + */ + public void execute(String argLine) throws SBSException { + HashMap envMap = new HashMap(); + envMap.put("PYTHONPATH", ""); + if ( cleanLogPath != null) { + envMap.put("SBS_CLEAN_LOG_FILE", cleanLogPath); + } + executeCmdLine(argLine, envMap, null); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SBSException.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/SBSException.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,41 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sbs; + +/** + * Exception raise by the SBS Modules. + * + */ +public class SBSException extends Exception { + + /** + * An exception with message. + * @param message + */ + public SBSException(String message) { + super(message); + } + + /** + * An exception with message and cause. + * @param message + */ + public SBSException(String message, Throwable t) { + super(message, t); + } + +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/SBSBuildList.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/SBSBuildList.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ + +package com.nokia.helium.sbs.ant; + +import org.apache.tools.ant.Project; +import org.apache.tools.ant.BuildException; +import java.util.Hashtable; +import java.util.List; +import java.util.HashMap; +import com.nokia.helium.sbs.ant.types.*; +import com.nokia.helium.sbs.ant.taskdefs.*; +import org.apache.log4j.Logger; + +public final class SBSBuildList { + + + + private static HashMap sbsBuildMap; + + private static Logger log = Logger.getLogger(SBSBuildList.class); + + private SBSBuildList() { + } + + public static List getSBSInputList(Project project, String buildName) { + if (sbsBuildMap == null) { + initialize(project); + } + SBSBuild sbsBuild = sbsBuildMap.get(buildName); + if (sbsBuild == null) { + throw new BuildException("Config name : " + buildName + " is not valid"); + } + List retList = null; + if (sbsBuild != null) { + retList = sbsBuild.getSBSInputList(); + } + return retList; + } + + private static void initialize(Project project) { + Hashtable references = project.getReferences(); + sbsBuildMap = new HashMap(); + for (Object key : references.keySet()) { + Object sbsBuildObject = references.get(key); + if ( sbsBuildObject != null && sbsBuildObject instanceof SBSBuild) { + sbsBuildMap.put(key, (SBSBuild)sbsBuildObject); + } + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,32 @@ + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/taskdefs/GetSBSInputs.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/taskdefs/GetSBSInputs.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ + +package com.nokia.helium.sbs.ant.taskdefs; + +import java.util.List; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Task; +import com.nokia.helium.sbs.ant.types.*; +import com.nokia.helium.sbs.ant.*; +import org.apache.log4j.Logger; + +public class GetSBSInputs extends Task { + + private Logger log = Logger.getLogger(GetSBSInputs.class); + private String configName; + private String outputProperty; + + + public void setConfig(String name) { + configName = name; + } + + public void setOutputProperty(String property) { + outputProperty = property; + } + + /** + * Execute the task. Set the property with number of severities. + * @throws BuildException + */ + public void execute() { + if (configName == null) { + throw new BuildException("configInput is not defined"); + } + List sbsInputList = SBSBuildList.getSBSInputList(getProject(), configName); + StringBuffer inputs = new StringBuffer(); + for (SBSInput input : sbsInputList) { + if (inputs.length() > 0) { + inputs.append(","); + } + inputs.append(input.getRefid().getRefId()); + getProject().setProperty(outputProperty,inputs.toString()); + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/taskdefs/SBSTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/taskdefs/SBSTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,311 @@ +/* + * Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). + * All rights reserved. + * This component and the accompanying materials are made available + * under the terms of the License "Eclipse Public License v1.0" + * which accompanies this distribution, and is available + * at the URL "http://www.eclipse.org/legal/epl-v10.html". + * + * Initial Contributors: + * Nokia Corporation - initial contribution. + * + * Contributors: + * + * Description: + * + */ + +package com.nokia.helium.sbs.ant.taskdefs; + +import java.util.List; +import org.w3c.dom.*; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.*; +import javax.xml.transform.dom.*; +import javax.xml.transform.stream.*; +import java.io.FileWriter; +import java.text.DecimalFormat; +import java.util.Date; +import com.nokia.helium.core.plexus.AntStreamConsumer; +import java.io.File; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Task; +import com.nokia.helium.core.ant.types.Variable; +import com.nokia.helium.core.ant.types.VariableSet; +import com.nokia.helium.sbs.ant.types.*; +import com.nokia.helium.sbs.ant.*; +import org.apache.log4j.Logger; +import org.apache.tools.ant.types.PatternSet; +import com.nokia.helium.sbs.SAXSysdefParser; +import com.nokia.helium.sbs.SBSCommandBase; +import com.nokia.helium.sbs.SBSException; +import java.util.ArrayList; +import java.util.Hashtable; +import com.nokia.helium.core.plexus.FileStreamConsumer; +import java.util.Collection; + +public class SBSTask extends Task { + + private Logger log = Logger.getLogger(SBSTask.class); + private String sbsInputName; + private String layerPatternSetRef; + private File sysDefFile; + private File workingDir; + private File errorFile; + private String logSuffix; + private String cleanLog; + private String outputLogName; + private File statsLog; + private boolean executeCmd = true; + private boolean failOnError = true; + private Date startTime; + private Date endTime; + + + public void setCleanLog(String logPath) { + cleanLog = logPath; + } + + public void setOutputLog(String logName) { + outputLogName = logName; + } + + public void setStatsLog(File log) { + statsLog = log; + } + + public void setSBSInput(String inputName) { + sbsInputName = inputName; + } + + public void setErrorOutput(File file) { + errorFile = file; + } + + public void setSysDefFile(File file) { + sysDefFile = file; + } + + public void setLogSuffix(String suffix) { + logSuffix = suffix; + } + + public void setLayerPatternSetRef(String id) { + layerPatternSetRef = id; + } + + public void setWorkingDir(File dir) { + workingDir = dir; + } + + public void setExecute(boolean execute) { + executeCmd = execute; + } + + public void setFailOnError(boolean failBuild) { + failOnError = failBuild; + } + + /** + * Execute the task. Set the property with number of severities. + * @throws BuildException + */ + public void execute() { + if (sbsInputName == null) { + throw new BuildException("sbsInputName is not defined"); + } + if (sysDefFile == null) { + throw new BuildException("System Definition file is missing"); + } + if (workingDir == null) { + throw new BuildException("workingDir must be set"); + } + + List filteredLayers = getFilteredLayers(); + SBSCommandBase sbsCmd = new SBSCommandBase(); + sbsCmd.setWorkingDir(workingDir); + if (cleanLog != null) { + sbsCmd.setCleanLogFilePath(cleanLog); + } + try { + log.debug("error stream file : " + errorFile); + sbsCmd.addOutputLineHandler(new AntStreamConsumer(this)); + if (errorFile == null) { + log.debug("redirecting error to Antstream"); + sbsCmd.addErrorLineHandler(new AntStreamConsumer(this)); + } else { + log.debug("redirecting error to file stream"); + sbsCmd.addErrorLineHandler(new FileStreamConsumer(errorFile)); + } + } catch (java.io.FileNotFoundException ex) { + log.info("file path: " + errorFile + "Not valid" ); + } + Object refObject = getProject().getReferences().get(sbsInputName); + if (refObject == null) { + throw new BuildException("invalid sbs input reference: " + sbsInputName); + } + if ( refObject != null && ! (refObject instanceof SBSInput)) { + throw new BuildException("sbs input name " + sbsInputName + "is not valid"); + } + SBSInput sbsInput = (SBSInput)refObject; + StringBuffer cmdOptions = new StringBuffer(); + VariableSet sbsOptions = sbsInput.getFullSBSOptions(); + cmdOptions.append(" -s " + sysDefFile); + Collection variableList = sbsOptions.getVariables(); + if (sbsOptions != null ) { + if (variableList.isEmpty()) { + throw new BuildException("sbsoptions cannot be empty for input: " + sbsInputName); + } + } + for (Variable variable : variableList) { + cmdOptions.append(" " + variable.getParameter()); + } + SBSMakeOptions sbsMakeOptions = sbsInput.getFullSBSMakeOptions(); + variableList = null; + if (sbsMakeOptions != null) { + cmdOptions.append(" -e " + sbsMakeOptions.getEngine()); + String ppThreads = sbsMakeOptions.getPPThreads(); + if (ppThreads != null) { + cmdOptions.append(" -j " + ppThreads); + } + variableList = sbsMakeOptions.getVariables(); + //if (variableList.isEmpty()) { + // throw new BuildException("sbs make options cannot be empty for input: " + sbsInputName); + //} + for (Variable variable : variableList) { + cmdOptions.append(" --mo="); + cmdOptions.append(variable.getParameter()); + } + } + if (filteredLayers != null) { + if (filteredLayers.isEmpty()) { + log.info("Warning: No matching layers to build from system definition file, skipped"); + return; + } else { + for (String layer : filteredLayers) { + cmdOptions.append(" -l " + layer); + } + } + } + startTime = new Date(); + try { + log("sbs commands: " + cmdOptions.toString()); + if (executeCmd) { + sbsCmd.execute(cmdOptions.toString()); + } + } catch (SBSException sex) { + log.info("SBS exception occured during sbs execution"); + if (failOnError) { + throw new BuildException("exception during SBS execution", sex); + } + } catch (Exception ex) { + log.info("Exception occured during sbs execution"); + if (failOnError) { + throw new BuildException("exception during SBS execution", ex); + } + } + endTime = new Date(); + updateSBSLogStatistics(statsLog, outputLogName); + } + + private List getFilteredLayers() { + List filteredLayers = null; + if (layerPatternSetRef != null) { + Hashtable references = getProject().getReferences(); + Object layerPatternSetObject = references.get(layerPatternSetRef); + if ( layerPatternSetObject != null && ! (layerPatternSetObject instanceof PatternSet)) { + throw new BuildException("Layer Pattern set is not of type PatternSet"); + } + if (layerPatternSetObject != null) { + PatternSet layerPatternSet = (PatternSet) layerPatternSetObject; + SAXSysdefParser sysDefParser = new SAXSysdefParser(sysDefFile); + List fullLayerList = sysDefParser.getLayers(); + filteredLayers = new ArrayList(); + String[] includes = layerPatternSet.getIncludePatterns(getProject()); + String[] excludes = layerPatternSet.getExcludePatterns(getProject()); + if (includes == null && excludes == null) { + throw new BuildException("No patterns specified"); + } + for (String layer : fullLayerList) { + if (includes == null) { + if (!isExcluded(layer, excludes)) { + filteredLayers.add(layer); + } + continue; + } + if (isIncluded(layer, includes) ) { + if (!isExcluded(layer, excludes)) { + filteredLayers.add(layer); + } + } + } + } + } + return filteredLayers; + } + + + private boolean isIncluded(String text, String[] includes) { + if (includes != null) { + for (String pattern : includes) { + if (text.matches(pattern)) { + return true; + } + } + } + return false; + } + + private void updateSBSLogStatistics(File infoFileName, + String logFileName) { + + try { + DocumentBuilderFactory sbsInfo = DocumentBuilderFactory.newInstance(); + DocumentBuilder docBuilder = sbsInfo.newDocumentBuilder(); + Document doc = docBuilder.newDocument(); + Element root = doc.createElement("sbsinfo"); + doc.appendChild(root); + Element child = doc.createElement("logfile"); + child.setAttribute("name", logFileName); + root.appendChild(child); + + long timeDiff = (endTime.getTime() - startTime.getTime()) / 1000; + child = doc.createElement("durationlong"); + child.setAttribute("time", "" + timeDiff); + root.appendChild(child); + child = doc.createElement("duration"); + int hours = (int) (timeDiff / 3600); + int minutesSeconds = (int)(timeDiff % 3600); + int minutes = minutesSeconds / 60; + int seconds = minutesSeconds % 60; + DecimalFormat decimalFormat = new DecimalFormat(); + decimalFormat.setMinimumIntegerDigits(2); + String duration = decimalFormat.format(hours) + "H:" + + decimalFormat.format(minutes) + "M:" + decimalFormat.format(seconds) + "S"; + //Todo: handle if the time difference is greater than 24 hours + child.setAttribute("time", duration); + root.appendChild(child); + Transformer transformer = TransformerFactory.newInstance().newTransformer(); + transformer.setOutputProperty(OutputKeys.INDENT, "yes"); + FileWriter sbsWriter = new FileWriter(infoFileName); + StreamResult result = new StreamResult(sbsWriter); + DOMSource sbsSource = new DOMSource(doc); + transformer.transform(sbsSource, result); + } catch (Exception ex) { + log.debug("exception while xml writing sbs log info", ex); + } + + } + + private boolean isExcluded(String text, String[] excludes) { + if (excludes != null) { + for (String pattern : excludes) { + if (text.matches(pattern)) { + return true; + } + } + } + return false; + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSBuild.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSBuild.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,92 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.sbs.ant.types; + +import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.BuildException; +import java.util.Vector; +import java.util.List; +import java.util.ArrayList; +import org.apache.tools.ant.types.Reference; +import org.apache.log4j.Logger; + + +/** + * Helper class to store the command line variables + * with name / value pair. + * @ant.type name="arg" + * @ant.type name="makeOption" + */ +public class SBSBuild extends DataType +{ + private static Logger log = Logger.getLogger(SBSBuild.class); + + private String name; + + private Vector sbsInputList = new Vector(); + + + public SBSBuild() { + } + + /** + * Set the name of the variable. + * @param name + */ + public void setName(String nm) { + name = nm; + } + + /** + * Gets the name of the build input. + * @param name + */ + public String getName() { + return name; + } + + /** + * Creates an empty variable element and adds + * it to the variables list + * @return empty Variable pair + */ + public SBSInput createSBSInput() { + SBSInput input = new SBSInput(); + sbsInputList.add(input); + return input; + } + + public List getSBSInputList() { + List inputList = new ArrayList(); + Reference refId = getRefid(); + Object sbsInputObject = null; + if (refId != null) { + try { + sbsInputObject = refId.getReferencedObject(); + } catch ( Exception ex) { + //log.info("Reference id of sbsinput list is not valid"); + throw new BuildException("Reference id (" + refId.getRefId() + ") of sbsinput list is not valid"); + } + if (sbsInputObject != null && sbsInputObject instanceof SBSInput) { + inputList.add((SBSInput)sbsInputObject); + } + } + inputList.addAll(sbsInputList); + return inputList; + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSInput.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,251 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.sbs.ant.types; + +import com.nokia.helium.core.ant.types.VariableSet; +import com.nokia.helium.core.ant.VariableIFImpl; +import java.util.Collection; +import com.nokia.helium.core.ant.types.Variable; +import org.apache.tools.ant.BuildException; +import java.util.Vector; +import org.apache.tools.ant.types.Reference; +import org.apache.log4j.Logger; + +/** + * Helper class to store the variable set (list of variables + * with name / value pair) + * @ant.type name="argSet" + */ +public class SBSInput extends VariableIFImpl { + + private static Logger log = Logger.getLogger(SBSInput.class); + + private Vector sbsOptions = new Vector(); + private Vector sbsMakeOptions = new Vector(); + private Vector sbsInputList = new Vector(); + + /** + * Constructor + */ + public SBSInput() { + } + + /** + * Creates an empty variable element and adds + * it to the variables list + * @return empty Variable pair + */ + public VariableSet createSBSOptions() { + SBSInput sbsInput = new SBSInput(); + VariableSet varSet = new VariableSet(); + sbsInput.addSBSOptions(varSet); + sbsInputList.add(sbsInput); + return varSet; + } + + public void addSBSOptions(VariableSet varSet) { + sbsOptions.add(varSet); + } + + /** + * Creates an empty variable element and adds + * it to the variables list + * @return empty Variable pair + */ + public VariableSet createSBSMakeOptions() { + SBSInput sbsInput = new SBSInput(); + SBSMakeOptions varSet = new SBSMakeOptions(); + sbsInput.addSBSMakeOptions(varSet); + sbsInputList.add(sbsInput); + return varSet; + } + + public void addSBSMakeOptions(SBSMakeOptions varSet) { + sbsMakeOptions.add(varSet); + } + + public SBSInput createSBSInput() { + SBSInput sbsInput = new SBSInput(); + sbsInputList.add(sbsInput); + return sbsInput; + } + + public Vector getSBSOptions() { + return sbsOptions; + } + + public Vector getSBSMakeOptions() { + return sbsMakeOptions; + } + + private void validateInput() { + if (getRefid() != null && (!sbsMakeOptions.isEmpty() || !sbsOptions.isEmpty())) { + throw new BuildException("SBSInput with refid should not have sbsoptions / sbsmakeoptions"); + } + } + + private Vector getSBSOptions(SBSInput sbsInput) { + Vector fullList = null; + sbsInput.validateInput(); + Reference refId = sbsInput.getRefid(); + Object sbsInputObject = null; + if (refId != null) { + try { + sbsInputObject = refId.getReferencedObject(); + } catch ( Exception ex) { + //log.info("Reference id of sbsinput list is not valid"); + throw new BuildException("Reference id (" + refId.getRefId() + ") of sbsinput list is not valid"); + } + if (sbsInputObject != null && sbsInputObject instanceof SBSInput) { + VariableSet options = ((SBSInput)sbsInputObject).getFullSBSOptions(); + if (options != null ) { + if (fullList == null) { + fullList = new Vector(); + } + fullList.add(options); + } + } + } + Vector optionsList = sbsInput.getSBSOptions(); + if (optionsList != null ) { + if (fullList == null) { + fullList = new Vector(); + } + fullList.addAll(optionsList); + } + return fullList; + } + + private Vector getSBSMakeOptions(SBSInput sbsInput) { + Vector sbsMakeOptionsList = null; + Reference refId = sbsInput.getRefid(); + Object sbsInputObject = null; + if (refId != null) { + try { + sbsInputObject = refId.getReferencedObject(); + } catch ( Exception ex) { + throw new BuildException("Reference id (" + refId.getRefId() + ") of sbsinput list is not valid"); + } + if (sbsInputObject != null && sbsInputObject instanceof SBSInput) { + SBSMakeOptions options = ((SBSInput)sbsInputObject).getFullSBSMakeOptions(); + if (options != null ) { + if (sbsMakeOptionsList == null) { + sbsMakeOptionsList = new Vector(); + } + sbsMakeOptionsList.add(options); + } + } + } + Vector options = sbsInput.getSBSMakeOptions(); + if (options != null) { + if (sbsMakeOptionsList == null) { + sbsMakeOptionsList = new Vector(); + } + sbsMakeOptionsList.addAll(options); + } + return sbsMakeOptionsList; + } + + public VariableSet getFullSBSOptions() { + Vector fullList = null; + VariableSet resultSet = null; + Vector currentOptions = getSBSOptions(this); + if (currentOptions != null && !currentOptions.isEmpty()) { + if (fullList == null ) { + fullList = new Vector(); + } + fullList.addAll(currentOptions); + } + for (SBSInput sbsInput : sbsInputList) { + Vector options = getSBSOptions(sbsInput); + if (options != null && !options.isEmpty()) { + if (fullList == null ) { + fullList = new Vector(); + } + fullList.addAll(options); + } + } + if (fullList != null) { + for (VariableSet varSet : fullList) { + for (Variable var : varSet.getVariables()) { + if (resultSet == null) { + resultSet = new VariableSet(); + } + resultSet.add(var); + } + } + } + return resultSet; + } + + public SBSMakeOptions getFullSBSMakeOptions() { + Vector sbsMakeOptionsList = null; + SBSMakeOptions resultSet = null; + Vector currentOptions = getSBSMakeOptions(this); + if (currentOptions != null && !currentOptions.isEmpty()) { + if (sbsMakeOptionsList == null ) { + sbsMakeOptionsList = new Vector(); + } + sbsMakeOptionsList.addAll(currentOptions); + } + for (SBSInput sbsInput : sbsInputList) { + Vector options = getSBSMakeOptions(sbsInput); + if (options != null && !options.isEmpty()) { + if (sbsMakeOptionsList == null ) { + sbsMakeOptionsList = new Vector(); + } + sbsMakeOptionsList.addAll(options); + } + } + if (sbsMakeOptionsList != null ) { + String engine = null; + for (SBSMakeOptions varSet : sbsMakeOptionsList) { + String currentEngine = varSet.getEngine(); + if (currentEngine != null) { + if (engine == null) { + engine = currentEngine; + if (resultSet == null ) { + resultSet = new SBSMakeOptions(); + } + resultSet.setEngine(currentEngine); + } else { + if (!engine.equals(currentEngine) ) { + throw new BuildException("inheriting engine types mismatch: " + engine + " != " + currentEngine); + } + } + } + if (resultSet == null ) { + resultSet = new SBSMakeOptions(); + } + for (Variable var : varSet.getVariables()) { + resultSet.add(var); + } + } + } + return resultSet; + } + + public Collection getVariables() { + Collection varList = null; + VariableSet options = getFullSBSOptions(); + if (options != null) { + varList = options.getVariables(); + } + return varList; + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSMakeOptions.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/src/com/nokia/helium/sbs/ant/types/SBSMakeOptions.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,136 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ + +package com.nokia.helium.sbs.ant.types; + +import com.nokia.helium.core.ant.types.VariableSet; +import org.apache.log4j.Logger; +import org.apache.tools.ant.types.Reference; +import org.apache.tools.ant.BuildException; +import java.util.List; + +/** + * Helper class to store the variable set (list of variables + * with name / value pair) + * @ant.type name="argSet" + */ +public class SBSMakeOptions extends VariableSet { + + private static Logger log = Logger.getLogger(SBSMakeOptions.class); + + private String engine; + + private String ppThreads; + + + private boolean initialized; + /** + * Constructor + */ + public SBSMakeOptions() { + } + + /** + * Helper function called by ant to create the new varset + */ + public SBSMakeOptions createSBSMakeOptions() { + SBSMakeOptions options = new SBSMakeOptions(); + add(options); + return options; + } + + /** + * Helper function to add the created varset + * @param filter to be added to the varset + */ + public void add(SBSMakeOptions option) { + super.add(option); + } + + /** + * Sets the engine type for this options + * @param engine for which the make options are used + */ + public void setEngine(String engineName) { + engine = engineName; + } + + /** + * Sets the ppthreads (no. bldinfs to process) + * @param ppBlock no. bldinfs to process per block + */ + public void setPPThreads(String ppBlock) { + ppThreads = ppBlock; + } + + public String getPPThreads() { + if (!initialized) { + initializeAll(); + initialized = true; + } + return ppThreads; + } + + /** + * Returns the engine name + * @return type of make engine + */ + public String getEngine() { + if (!initialized) { + initializeAll(); + initialized = true; + } + if (engine == null) { + throw new BuildException("engine should not be null"); + } + return engine; + } + private void initializeAll() { + Object sbsInputObject = null; + List varSets = getVariableSets(); + initialize(this); + for (VariableSet varSet : varSets) { + initialize(varSet); + } + } + + private void initialize(VariableSet varSet) { + SBSMakeOptions makeOptions = null; + Reference refId = varSet.getRefid(); + if (refId != null) { + try { + makeOptions = (SBSMakeOptions)refId.getReferencedObject(); + if (makeOptions != null) { + String refEngine = makeOptions.getEngine(); + String threads = makeOptions.getPPThreads(); + if (engine != null && !(engine.equals(refEngine))) { + throw new BuildException(" Config's engine type " + engine + " not matching with reference : " + + refId.getRefId() + ": engine: " + refEngine); + } + if (ppThreads == null) { + ppThreads = threads; + } + if (engine == null) { + engine = refEngine; + } + } + } catch ( Exception ex) { + throw new BuildException(ex.getMessage()); + } + } + } +} \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/bld.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/bld.sh Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,24 @@ +#!/bin/csh + +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + + + +module load java/1.6.0 +module load mercurial +setenv ANT_ARGS "-lib ../lib -lib ../../lib -lib ../../core/lib -lib ../../bin/helium-core.jar -lib ../../bin/helium-sbs.jar -lib ../../antlibs" +ant $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib ..\lib -lib ..\..\lib -lib ..\..\core\lib -lib ..\..\bin\helium-core.jar -lib ..\..\bin\helium-sbs.jar -lib ..\..\antlibs +ant %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,37 @@ + + + + Helium Antlib sbs tests. + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/sbs_default_config.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/sbs_default_config.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,164 @@ + + + + Helium Antlib default sbs configuration. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/test.sysdef.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/test.sysdef.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,142 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sbs/tests/test_sbs_input.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sbs/tests/test_sbs_input.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,179 @@ + + + + + Helium Antlib sbs unittests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/demo/build.xml --- a/buildframework/helium/external/helium-antlib/scm/demo/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/demo/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -398,4 +398,12 @@ + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ChangelogAction.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ChangelogAction.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ChangelogAction.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,11 @@ import java.util.Date; import java.text.SimpleDateFormat; import java.util.Iterator; +import java.io.*; + +import org.dom4j.Document; +import org.dom4j.Element; +import org.dom4j.io.SAXReader; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.ScmRevision; @@ -63,7 +68,7 @@ private String startVersion; private String endVersion; - private String xmlproperty; + private File xmlbom; /** @@ -131,13 +136,13 @@ } /** - * Output property for xml output of changeset list + * File for xml output of changeset list * * @ant.not-required */ - public void setXMLProperty(String xmlproperty) + public void setXmlbom(File xmlbom) { - this.xmlproperty = xmlproperty; + this.xmlbom = xmlbom; } /** @@ -164,7 +169,7 @@ } catch (Exception e) { - throw new BuildException("Date Format not supported jash:" + e.getMessage()); + throw new ScmException("Date Format not supported:" + e.getMessage()); } } else @@ -191,7 +196,7 @@ { getTask().log(changelogSet.toXML()); } - else if (xmlproperty != null) + else if (xmlbom != null) { String output = ""; for (Object o : changelogSet.getChangeSets()) @@ -206,8 +211,36 @@ output = output + "" + revision + "" + c.getComment() + "" + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(c.getDate()) + ""; } - - getProject().setProperty(xmlproperty, output); + String[] path = getTask().getScmUrl().split("/"); + String xml = "untitled\n"; + String thisproject = "" + "" + path[path.length - 1] + "" + "" + getTask().getScmUrl() + "" + "mercurial" + output + "\n"; + xml = xml + thisproject; + try { + if (xmlbom.exists()) + { + SAXReader xmlReader = new SAXReader(); + Document antDoc = xmlReader.read(xmlbom); + for (Iterator iterator = antDoc.selectNodes("//project").iterator(); iterator.hasNext();) + { + boolean equal = false; + Element e = (Element) iterator.next(); + for (Iterator iterator2 = antDoc.selectNodes("//baseline").iterator(); iterator2.hasNext();) + { + Element e2 = (Element) iterator2.next(); + if (e2.getText().equals(getTask().getScmUrl())) + equal = true; + } + if (!equal) + xml = xml + e.asXML() + "\n"; + } + } + xml = xml + ""; + + FileWriter fstream = new FileWriter(xmlbom); + BufferedWriter out = new BufferedWriter(fstream); + out.write(xml); + out.close(); + } catch (Exception e) { e.printStackTrace(); } } else { diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/InitAction.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/InitAction.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/InitAction.java Wed Dec 23 19:29:07 2009 +0200 @@ -24,7 +24,23 @@ import com.nokia.maven.scm.provider.ScmProviderExt; +/** + * Create a new repository. In the following example the new_repo mercurial repository + * will be created under /some/path/. E.g: + * + *
    + * <hlm:scm verbose="true" scmUrl="scm:hg:/some/path/new_repo">
    + *     <hlm:checkout baseDir="scm:hg:/some/path/new_repo" />
    + * </hlm:scm>
    + * 
    + * + * @ant.type name="init" category="SCM" + */ public class InitAction extends BaseDirectoryScmAction { + + /** + * {@inheritDoc} + */ @Override public void execute(ScmRepository repository) throws ScmException { ScmManager scmManager = getTask().getScmManager(); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ScmAction.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ScmAction.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/ScmAction.java Wed Dec 23 19:29:07 2009 +0200 @@ -21,6 +21,14 @@ import org.apache.maven.scm.repository.ScmRepository; import org.apache.tools.ant.ProjectComponent; +/** + * Abstract class which implements common setting between + * ScmAction implementations. + * + * Any implementing action must implement the execute method as + * a execution of the action. The owning task should be used + * to log message to the user. + */ public abstract class ScmAction extends ProjectComponent { private ScmTask scmtask; @@ -39,6 +47,10 @@ this.scmtask = task; } + /** + * Get the action name based on the classname. + * @return the lowercase class name. + */ public String getName() { String className = getClass().getName(); String commandName = className diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/StatusAction.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/StatusAction.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/StatusAction.java Wed Dec 23 19:29:07 2009 +0200 @@ -36,7 +36,7 @@ * </fileset> * </hlm:status> * </hlm:scm > - * + * * * @ant.type name="status" category="SCM" */ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/TagsAction.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/TagsAction.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/taskdefs/TagsAction.java Wed Dec 23 19:29:07 2009 +0200 @@ -29,14 +29,33 @@ import com.nokia.maven.scm.command.tags.TagsScmResult; import com.nokia.maven.scm.manager.ExtendedScmManager; +/** + * This action will tag current repository revision using a specific name. + * + *
    + * <hlm:scm verbose="true" scmUrl="scm:${repo.type}:${repo.dir}/test1">
    + *     <hlm:tags baseDir="${repo.dir}/test1" reference="my.tags" />
    + * </hlm:scm>
    + * 
    + * + * @ant.type name="tags" category="SCM" + */ public class TagsAction extends BaseDirectoryScmAction { private String reference; + /** + * Defines the id of the result type generated by the query. + * @param reference the name of the reference to use. + * @ant.not-required Only log result by default. + */ public void setReference(String reference) { this.reference = reference; } + /** + * {@inheritDoc} + */ @Override public void execute(ScmRepository repository) throws ScmException { TagsScmResult result; diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/LatestTag.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/LatestTag.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/LatestTag.java Wed Dec 23 19:29:07 2009 +0200 @@ -28,19 +28,43 @@ import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; +/** + * The latestTag element will help you to find the latest revision based + * on a pattern. + * + *
    + *  <latestTag name="1.0.*">
    + *     <tagSet id="set.of.tags"/>
    + *  latestTag
    + *  
    + * + * @ant.type name="latestTag" category="SCM" + */ public class LatestTag extends Tag { private String pattern; private List tagSets = new ArrayList(); + /** + * Defines the pattern. + * @ant.required + */ public void setPattern(String pattern) { this.pattern = pattern; } + /** + * Add a set of release to search the latest release in. + * @param tagSet + */ public void add(TagSet tagSet) { tagSets.add(tagSet); } + + /** + * {@inheritDoc} + */ @Override public String getName() { if (pattern == null) @@ -56,6 +80,11 @@ return tags.get(0).getName(); } + /** + * Get a reduce set of tags, only tag matching the pattern will be + * included. + * @return the reduce list of tags. + */ protected List getCleanedList() { Pattern pVer = getPattern(); List tags = new ArrayList(); @@ -67,6 +96,15 @@ return tags; } + /** + * Get the pattern as a regular expression. + * '*' will be transformed into (\d+) (which means only numbers + * will be considered). Other character will be quoted. + * For example: + * 1.0.* => 1\.0\.(\d+) + * + * @return the transformed pattern. + */ protected Pattern getPattern() { // Quoting the current pattern getProject().log("pattern: " + pattern, Project.MSG_DEBUG); @@ -79,6 +117,10 @@ return Pattern.compile(qVer); } + /** + * Get the list of tags, from the tagSets. + * @return a list of Tags objects. + */ protected List getTags() { List tags = new ArrayList(); for (TagSet ts : tagSets) { @@ -92,15 +134,30 @@ return tags; } + /** + * This class implements the comparator interface, which will help to + * order the tag compare to the pattern. For example: + * 1.0.2 > 1.0.1 (for pattern 1.0.*) + * 1.1.1 > 1.0.2 (for pattern 1.*.*) + * + * @param A Tag kind of class. + */ public class TagComparator implements Comparator { // Pattern to match for the comparison private Pattern pVer; + /** + * Configure the Comparator with the pattern. + * @param pattern + */ public TagComparator(Pattern pattern) { pVer = pattern; } + /** + * {@inheritDoc} + */ @Override public int compare(T o1, T o2) { getProject().log("Comparing: " + o1.getName() + ">" + o2.getName(), Project.MSG_DEBUG); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Revision.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Revision.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Revision.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,14 @@ import org.apache.tools.ant.types.DataType; +/** + * The revision element store a particular revision id. + + *
    + *  <revision name="release_1.0" />
    + *  
    + * + */ public class Revision extends DataType { private String name; @@ -35,6 +43,10 @@ this.name = name; } + /** + * Get the revision. + * @return the revision. + */ public String getName() { return name; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Tag.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Tag.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/Tag.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,14 @@ import org.apache.tools.ant.types.DataType; +/** + * The tag type store the value of a desired SCM tag. + * + *
    + *  <tag name="release_1.0" />
    + *  
    + * + */ public class Tag extends DataType { private String name; @@ -34,6 +42,10 @@ this.name = name; } + /** + * Get the tag value. + * @return the tag value + */ public String getName() { return name; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/TagSet.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/TagSet.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/helium/scm/ant/types/TagSet.java Wed Dec 23 19:29:07 2009 +0200 @@ -23,20 +23,45 @@ import org.apache.tools.ant.types.DataType; +/** + * This tagSet type is a container of tag elements. + * + * Example: + *
    + *  <tagSet>
    + *      <tag name="release_1.0" />
    + *      <tag name="release_1.0.1" />
    + *  <7tagSet>
    + *  
    + * + * @ant.type name="tagSet" category="SCM" + */ public class TagSet extends DataType { private List tags = new ArrayList(); + /** + * Add a Tag element. + * @param tag + */ public void add(Tag tag) { tags.add(tag); } + /** + * Create and add a Taf element. + * @return the newly created Tag. + */ public Tag createTag() { Tag tag = new Tag(); tags.add(tag); return tag; } - + + /** + * Get the list of tags. + * @return the list of stored tag elements. + */ public List getTags() { return new ArrayList(tags); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/init/InitScmResult.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/init/InitScmResult.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/init/InitScmResult.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,7 +20,15 @@ import org.apache.maven.scm.ScmResult; +/** + * Class to store the result of the init command. + * + */ public class InitScmResult extends ScmResult { + + /** + * {@inheritDoc} + */ public InitScmResult(String commandLine, String providerMessage, String commandOutput, boolean success) { super(commandLine, providerMessage, commandOutput, success); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/AbstractPullCommand.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/AbstractPullCommand.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/AbstractPullCommand.java Wed Dec 23 19:29:07 2009 +0200 @@ -28,16 +28,32 @@ import org.apache.maven.scm.command.AbstractCommand; import org.apache.maven.scm.provider.ScmProviderRepository; +/** + * Abstract class to representing a pull command. + * This functionality is mainly targeted for distributed + * repository like Git or Mercurial. + */ public abstract class AbstractPullCommand extends AbstractCommand { + /** + * {@inheritDoc} + */ + @Override public ScmResult executeCommand(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { return executePullCommand(repository, fileSet, parameters.getScmVersion(CommandParameter.SCM_VERSION, new ScmTag("tip"))); } - + /** + * Implements the pull functionality. + * @param repository the reporsitory. + * @param fileSet + * @param scmVersion what revision to pull. + * @return + * @throws ScmException + */ protected abstract PullScmResult executePullCommand(ScmProviderRepository repository, ScmFileSet fileSet, ScmVersion scmVersion) throws ScmException; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/PullScmResult.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/PullScmResult.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/pull/PullScmResult.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,10 @@ import org.apache.maven.scm.ScmResult; +/** + * + * + */ public class PullScmResult extends ScmResult { public PullScmResult( String commandLine, String providerMessage, String commandOutput, boolean success ) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/AbstractTagsCommand.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/AbstractTagsCommand.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/AbstractTagsCommand.java Wed Dec 23 19:29:07 2009 +0200 @@ -24,9 +24,16 @@ import org.apache.maven.scm.command.AbstractCommand; import org.apache.maven.scm.provider.ScmProviderRepository; +/** + * Abstract class representing a tags command. + * Tags consist in retrieving existing tags for a particular repository. + * + */ public abstract class AbstractTagsCommand extends AbstractCommand { - + /** + * {@inheritDoc} + */ public TagsScmResult executeCommand(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { @@ -34,6 +41,14 @@ } + /** + * Execute the tags operation on the repository. + * @param repository the repository to use for the action + * @param fileSetCommand + * @param parameters + * @return a TagsScmResult representing the output of the command. + * @throws ScmException + */ protected abstract TagsScmResult executeTagsCommand(ScmProviderRepository repository, ScmFileSet fileSetCommand, CommandParameters parameters) throws ScmException; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/TagsScmResult.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/TagsScmResult.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/command/tags/TagsScmResult.java Wed Dec 23 19:29:07 2009 +0200 @@ -25,14 +25,33 @@ import org.apache.maven.scm.ScmResult; import org.apache.maven.scm.ScmTag; +/** + * Class to store the result from the tags operation. + * + */ public class TagsScmResult extends ScmResult { private List scmVersions = new ArrayList(); + /** + * Default constructor. + * @param commandLine + * @param providerMessage + * @param commandOutput + * @param success + */ public TagsScmResult(String commandLine, String providerMessage, String commandOutput, boolean success) { super(commandLine, providerMessage, commandOutput, success); } + /** + * This constructor stores also the ScmTag retrieved. + * @param commandLine + * @param providerMessage + * @param commandOutput + * @param success + * @param scmVersions + */ public TagsScmResult(String commandLine, String providerMessage, String commandOutput, boolean success, Enumeration scmVersions) { @@ -43,6 +62,10 @@ } } + /** + * Get the list of tags from the command. + * @return + */ public List getTags() { return scmVersions; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/manager/BasicExtendedScmManager.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/manager/BasicExtendedScmManager.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/manager/BasicExtendedScmManager.java Wed Dec 23 19:29:07 2009 +0200 @@ -31,9 +31,17 @@ import com.nokia.maven.scm.command.info.InfoScmResult; import com.nokia.maven.scm.provider.ScmProviderExt; +/** + * Extended SCM manager which implements the additional functionalities + * defined by the ExtendedScmManager. + * + */ public class BasicExtendedScmManager extends BasicScmManager implements ExtendedScmManager { + /** + * {@inheritDoc} + */ @Override public PullScmResult pull(ScmRepository repository, File path) throws ScmException { @@ -47,6 +55,9 @@ } } + /** + * {@inheritDoc} + */ @Override public TagsScmResult tags(ScmRepository repository, File path) throws ScmException { try { @@ -59,6 +70,9 @@ } } + /** + * {@inheritDoc} + */ @Override public InfoScmResult info(ScmRepository repository, File path) throws ScmException { try { diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/ScmProviderExt.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/ScmProviderExt.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/ScmProviderExt.java Wed Dec 23 19:29:07 2009 +0200 @@ -30,16 +30,50 @@ import com.nokia.maven.scm.command.tags.TagsScmResult; import com.nokia.maven.scm.command.info.InfoScmResult; +/** + * This interface describe additional functionalities provider could implement + * compare to the default ScmProvider interface + * + */ public interface ScmProviderExt { + /** + * Create a new repository. + * @param repository the repository to create. + * @return ScmResult result. + * @throws ScmException + */ ScmResult init(ScmRepository repository) throws ScmException; + /** + * Pull changes, this function is target for distributed SCM file Hg or Git. + * @param repository + * @param path the location of the checkout. + * @return a PullScmResult result object. + * @throws ScmException + */ PullScmResult pull(ScmRepository repository, File path) throws ScmException; + /** + * Get a list of tags from a repository. + * @param repository the repository + * @param fileSet + * @param parameters + * @return a TagsScmResult with the list of tags if successful. + * @throws ScmException + */ TagsScmResult tags(ScmRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException; + /** + * Get information about current revision. + * @param repository + * @param fileSet + * @param parameters + * @return a InfoScmResult with current revision if successful. + * @throws ScmException + */ InfoScmResult info(ScmRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/init/HgInitCommand.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/init/HgInitCommand.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/init/HgInitCommand.java Wed Dec 23 19:29:07 2009 +0200 @@ -30,7 +30,7 @@ import org.apache.maven.scm.provider.ScmProviderRepository; import org.apache.maven.scm.provider.hg.HgUtils; import org.apache.maven.scm.provider.hg.command.HgCommandConstants; -import org.apache.maven.scm.provider.hg.repository.HgScmProviderRepository; +import com.nokia.maven.scm.provider.hg.repository.HgScmProviderRepository; import com.nokia.maven.scm.command.init.InitScmResult; @@ -39,32 +39,36 @@ private static Logger log = Logger.getLogger(HgInitCommand.class); @Override - protected ScmResult executeCommand(ScmProviderRepository arg0, - ScmFileSet arg1, CommandParameters arg2) throws ScmException { - // TODO Auto-generated method stub - return null; + protected ScmResult executeCommand(ScmProviderRepository repository, + ScmFileSet basedir, CommandParameters args) throws ScmException { + return executeInitCommand(repository); } public InitScmResult executeInitCommand(ScmProviderRepository repository) throws ScmException { // Get the directory in which to create a new repository. Only local // filesystems supported. - log.info("executeInitCommand" + repository); + log.info("executeInitCommand: " + repository); HgScmProviderRepository hgRepo = (HgScmProviderRepository) repository; String uri = hgRepo.getURI(); - String fileUri = uri.substring("scm:hg:file:/".length()); - log.info(fileUri); - File hgRepoDir = new File(fileUri); + log.info(uri); + File hgRepoDir = new File(uri); + File hgRepoRootDir = hgRepoDir.getParentFile(); - boolean workingDirReady = hgRepoDir.mkdirs(); - if (!workingDirReady) { - throw new ScmException("Could not initiate test branch at: " - + hgRepoDir); + if (!hgRepoRootDir.exists()) { + boolean workingDirReady = hgRepoRootDir.mkdirs(); + if (!workingDirReady) { + throw new ScmException("Could not initiate test branch at: " + + hgRepoRootDir); + } } // Create and run the command - String[] initCmd = new String[] { HgCommandConstants.INIT_CMD }; - HgUtils.execute(new File("f:/hg"), initCmd); - return null; + String[] initCmd = new String[] { HgCommandConstants.INIT_CMD, hgRepoDir.getName()}; + ScmResult result = HgUtils.execute(hgRepoRootDir, initCmd); + return new InitScmResult(result.getCommandLine(), + result.getProviderMessage(), + result.getCommandOutput(), + result.isSuccess()); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/update/HgUpdateCommand.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/update/HgUpdateCommand.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/command/update/HgUpdateCommand.java Wed Dec 23 19:29:07 2009 +0200 @@ -49,6 +49,9 @@ ScmFileSet fileSet, ScmVersion tag) throws ScmException { File workingDir = fileSet.getBasedir(); + // Find changes from last revision + int previousRevision = HgUtils.getCurrentRevisionNumber(getLogger(), + workingDir); // Update branch String[] updateCmd = new String[] { "update", @@ -65,30 +68,35 @@ // Find changes from last revision int currentRevision = HgUtils.getCurrentRevisionNumber(getLogger(), workingDir); - int previousRevision = currentRevision - 1; - String[] diffCmd = new String[] { HgCommandConstants.DIFF_CMD, - HgCommandConstants.REVISION_OPTION, "" + previousRevision }; + List updatedFiles = new ArrayList(); + List changes = new ArrayList(); + String[] diffCmd = null; + if (currentRevision == 0) { + diffCmd = new String[] { HgCommandConstants.DIFF_CMD, + "-c", "" + currentRevision}; + } else { + diffCmd = new String[] { HgCommandConstants.DIFF_CMD, + HgCommandConstants.REVISION_OPTION, "" + previousRevision, + HgCommandConstants.REVISION_OPTION, "" + currentRevision}; + } HgDiffConsumer diffConsumer = new HgDiffConsumer(getLogger(), - workingDir); - ScmResult diffResult = HgUtils.execute(diffConsumer, getLogger(), + workingDir); + updateResult = HgUtils.execute(diffConsumer, getLogger(), workingDir, diffCmd); // Now translate between diff and update file status - List updatedFiles = new ArrayList(); - List changes = new ArrayList(); List diffFiles = diffConsumer.getChangedFiles(); Map diffChanges = diffConsumer.getDifferences(); for (ScmFile diffFile : diffFiles) { changes.add(diffChanges.get(diffFile.getPath())); if (diffFile.getStatus() == ScmFileStatus.MODIFIED) { - updatedFiles.add(new ScmFile(diffFile.getPath(),ScmFileStatus.PATCHED)); + updatedFiles.add(new ScmFile(diffFile.getPath(), ScmFileStatus.PATCHED)); } else { updatedFiles.add(diffFile); } } - return new UpdateScmResultWithRevision(updatedFiles, changes, String - .valueOf(currentRevision), diffResult); + .valueOf(currentRevision), updateResult); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/repository/HgScmProviderRepository.java --- a/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/repository/HgScmProviderRepository.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/src/com/nokia/maven/scm/provider/hg/repository/HgScmProviderRepository.java Wed Dec 23 19:29:07 2009 +0200 @@ -70,9 +70,12 @@ /** {@inheritDoc} */ public String toString() { - return "Hg Repository Interpreted from: " + orgUrl + ":\nProtocol: " + if (orgUrl != null) { + return "Hg Repository Interpreted from: " + orgUrl + ":\nProtocol: " + orgUrl.getProtocol() + "\nHost: " + getHost() + "\nPort: " + getPort() + "\nUsername: " + getUser() + "\nPassword: " + getPassword() + "\nPath: " + orgUrl.getPath(); + } + return "Hg Repository Interpreted from: " + url; } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/tests/abstract_unittest_scmtask.ant.xml --- a/buildframework/helium/external/helium-antlib/scm/tests/abstract_unittest_scmtask.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/tests/abstract_unittest_scmtask.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -279,6 +279,15 @@ + + + + + + + + + Not in repo2 @@ -511,6 +520,11 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + content... + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/scm/tests/build.xml --- a/buildframework/helium/external/helium-antlib/scm/tests/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/scm/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -23,15 +23,13 @@ Helium Antlib SCM unittests. - + + - - - - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/Notifier.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/Notifier.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/Notifier.java Wed Dec 23 19:29:07 2009 +0200 @@ -38,11 +38,13 @@ /** * Sends the data to the requested sender list with specified notifier * - * @param senderList - * sends the data to the list of requested user. + * @param signalName is the name of the signal that has been raised. + * @param failStatus indicates whether to fail the build or not + * @param notifierInput contains signal notifier info + * @param message is the message from the signal that has been raised. */ void sendData(String signalName, boolean failStatus, - NotifierInput notifierInput); + NotifierInput notifierInput, String message ); /** * Sends the data to the requested sender list with specified notifier diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/SignalStatusList.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/SignalStatusList.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/SignalStatusList.java Wed Dec 23 19:29:07 2009 +0200 @@ -69,7 +69,7 @@ statusBuffer.append(signalStatus); statusBuffer.append("\n"); } - log.debug("SignalStatusList:getErrorMsg:msg:" + statusBuffer.toString()); + log.debug("getErrorMsg:msg:" + statusBuffer.toString()); return statusBuffer.toString(); } @@ -79,7 +79,7 @@ * @return true if any signal are pending. */ public boolean hasSignalInList() { - log.debug("SignalStatusList:hasDeferMsgInList:size:" + log.debug("asDeferMsgInList:size:" + signals.size()); return signals.size() > 0; } @@ -88,10 +88,10 @@ * Clear all deferred signals. */ public void clearStatusList() { - log.debug("SignalStatusList:clearStatusList:size1:" + log.debug("clearStatusList:size1:" + signals.size()); signals.clear(); - log.debug("SignalStatusList:clearStatusList:size2:" + log.debug("clearStatusList:size2:" + signals.size()); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalList.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalList.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalList.java Wed Dec 23 19:29:07 2009 +0200 @@ -147,8 +147,8 @@ return targetsMap.get(targetName); } - protected void sendNotifications(Vector notifierList, String signalName) { - sendNotifications(notifierList, signalName, false, null); + protected void sendNotifications(Vector notifierList, String signalName, String errorMessage ) { + sendNotifications( notifierList, signalName, false, null, errorMessage ); } public void processForSignal(Project prj, SignalNotifierInput signalNotifierInput, String signalName, String targetName, @@ -165,7 +165,7 @@ } NotifierInput notifierInput = signalNotifierInput.getNotifierInput(); sendNotifications(notifierList, signalName, failBuild, - notifierInput); + notifierInput, errorMessage ); if (failBuild) { String failStatus = "now"; if (signalInput != null) { @@ -174,17 +174,17 @@ log.debug("Could not find config for signal: " + signalName); } if (failStatus == null || failStatus.equals("now")) { + log.debug("Adding now signal. Signal name is " + signalName); SignalStatusList.getNowSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); throw new BuildException(new SignalStatus(signalName, errorMessage, targetName, new Date()).toString()); } else if (failStatus.equals("defer")) { - log.debug("SignalList:adding defer signal:"); - log.info("Signal " + signalName + " will be deferred."); + log.debug("Adding deffer signal. Signal " + signalName + " will be deferred."); SignalStatusList.getDeferredSignalList().addSignalStatus(new SignalStatus( signalName, errorMessage, targetName, new Date())); } else if (failStatus.equals("never")) { - log.debug("SignalList:adding never signal:"); + log.debug("Adding never signal. Signal name is " + signalName); SignalStatusList.getNeverSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); } else if (!failStatus.equals("never")) { @@ -204,13 +204,13 @@ * @param notifierList */ protected void sendNotifications(Vector notifierList, String signalName, - boolean failStatus, NotifierInput notifierInput) { + boolean failStatus, NotifierInput notifierInput, String errorMessage ) { if (notifierList == null) { return; } for (Notifier notifier : notifierList) { if (notifier != null) { - notifier.sendData(signalName, failStatus, notifierInput); + notifier.sendData(signalName, failStatus, notifierInput, errorMessage ); } } } @@ -241,7 +241,7 @@ } processForSignal(prj, config.getSignalNotifierInput(), signalName, targetName, errorMessage, condition != null); - log.debug("SignalList:fail:signalName: " + signalName); + log.debug("checkAndNotifyFailure: SignalName: " + signalName); } } return retValue; @@ -251,7 +251,7 @@ Condition retCondition = null; Vector conditionList = targetCondition.getConditions(); for (Condition condition : conditionList) { - log.debug("SignalList:getErrorMessage:" + condition.eval()); + log.debug("getFailureCondition:" + condition.eval()); if (condition.eval()) { retCondition = condition; break; @@ -346,17 +346,17 @@ } log.debug("failStatus: " + failStatus); if (failStatus == null || failStatus.equals("now")) { + log.debug("Adding now signal. Signal name is " + signalName); SignalStatusList.getNowSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); throw new BuildException(new SignalStatus(signalName, errorMessage, targetName, new Date()).toString()); } else if (failStatus.equals("defer")) { - log.debug("SignalList1:adding defer signal:"); - log.info("Signal " + signalName + " will be deferred."); + log.debug("Adding deffer signal. Signal name is " + signalName); SignalStatusList.getDeferredSignalList().addSignalStatus(new SignalStatus( signalName, errorMessage, targetName, new Date())); } else if (failStatus.equals("never")) { - log.debug("SignalList1:adding never signal:"); + log.debug("Adding never signal. Signal name is " + signalName); SignalStatusList.getNeverSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); } else if (!failStatus.equals("never")) { @@ -365,7 +365,7 @@ throw new BuildException(new SignalStatus(signalName, errorMessage, targetName, new Date()).toString()); } else { - log.info("Signal " + signalName + log.debug("Signal " + signalName + " set to be ignored by the configuration."); } } @@ -396,7 +396,7 @@ if (configCurrent != null && configCurrent instanceof SignalConfig) { signalName = refid; } - log.debug("SignalList:fail:signalName: " + signalName); + log.debug("checkAndNotify:signalName: " + signalName); notifierList = config.getSignalInput().getSignalNotifierList(); if (notifierList == null) { Object obj = (Object) prj @@ -407,24 +407,24 @@ } } failStatus = config.getSignalInput().getFailBuild(); - log.debug("SignalList:failStatus:" + failStatus); + log.debug("checkAndNotify:failStatus:" + failStatus); buildFailed = condition != null; sendNotify(notifierList, signalName, buildFailed, fileList); } if (buildFailed) { if (failStatus == null || failStatus.equals("now")) { + log.debug("Adding now signal. Signal name is " + signalName); SignalStatusList.getNowSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); throw new BuildException(new SignalStatus(signalName, errorMessage, targetName, new Date()).toString()); } else if (failStatus.equals("defer")) { - log.debug("SignalList1:adding defer signal:"); - log.info("Signal " + signalName + " will be deferred."); + log.debug("Signal " + signalName + " will be deferred."); SignalStatusList.getDeferredSignalList().addSignalStatus(new SignalStatus( signalName, errorMessage, targetName, new Date())); } else if (failStatus.equals("never")) { - log.debug("SignalList1:adding never signal:"); + log.debug("Adding never signal. Signal name is " + signalName); SignalStatusList.getNeverSignalList().addSignalStatus(new SignalStatus(signalName, errorMessage, targetName, new Date())); } else if (!failStatus.equals("never")) { @@ -433,7 +433,7 @@ throw new BuildException(new SignalStatus(signalName, errorMessage, targetName, new Date()).toString()); } else { - log.info("Signal " + signalName + log.debug("Signal " + signalName + " set to be ignored by the configuration."); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalListener.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalListener.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/SignalListener.java Wed Dec 23 19:29:07 2009 +0200 @@ -88,8 +88,7 @@ signalList.checkAndNotifyFailure(event.getTarget(),event.getProject()); //} } catch (Exception e) { - log.debug("SignalListenerException: ", e); - throw new BuildException(e.getMessage()); + throw new BuildException(e.getMessage(), e); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/EMAILNotifier.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/EMAILNotifier.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/EMAILNotifier.java Wed Dec 23 19:29:07 2009 +0200 @@ -151,13 +151,15 @@ /** * Rendering the template, and sending the result through email. * - * @param signalName - * - Name of the signal that has been raised. + * @param signalName - is the name of the signal that has been raised. + * @param failStatus - indicates whether to fail the build or not + * @param notifierInput - contains signal notifier info + * @param message - is the message from the signal that has been raised. */ @SuppressWarnings("unchecked") public void sendData(String signalName, boolean failStatus, - NotifierInput notifierInput) { + NotifierInput notifierInput, String message ) { if (notifyWhen != null && (notifyWhen.equals("always") || (notifyWhen.equals("fail") && failStatus) || (notifyWhen.equals("pass") && !failStatus))) { @@ -175,36 +177,43 @@ throw new HlmAntLibException(SignalListener.MODULE_NAME, "ldap attribute has not been defined."); } + + String smtpUpdated = getProject().replaceProperties(smtp); + String ldapUpdated = getProject().replaceProperties(ldap); + String rootdnUpdated = getProject().replaceProperties(rootdn); + String additionalRecipientsUpdated = getProject().replaceProperties(additionalRecipients); + log.debug("Sending data by e-mail."); EmailDataSender emailSender; - if (rootdn != null) + if (rootdnUpdated != null) { String[] to = null; - if (additionalRecipients != null) + if (additionalRecipientsUpdated != null) { - to = additionalRecipients.split(","); + to = additionalRecipientsUpdated.split(","); } - emailSender = new EmailDataSender(to, smtp, ldap, rootdn); + emailSender = new EmailDataSender(to, smtpUpdated, ldapUpdated, rootdnUpdated); } else { emailSender = new EmailDataSender( - additionalRecipients, smtp, ldap); + additionalRecipientsUpdated, smtpUpdated, ldapUpdated); } if (from != null) { emailSender.setFrom(from); } - log.debug("EmailNotifier:arlist: " + additionalRecipients); + log.debug("EmailNotifier:arlist: " + additionalRecipientsUpdated); Project subProject = getProject().createSubProject(); subProject.setProperty("signal.name", signalName); subProject.setProperty("signal.status", "" + failStatus); + subProject.setProperty("signal.message", "" + message); emailSender.addCurrentUserToAddressList(); String filePath = ""; File fileToSend = null; if (notifierInput != null) { - fileToSend = notifierInput.getFile(".html"); + fileToSend = notifierInput.getFile(".*.html"); if (fileToSend != null) { filePath = fileToSend.toString(); } @@ -224,6 +233,7 @@ Hashtable signalProperties = new Hashtable(); signalProperties.put("signal.name", signalName); signalProperties.put("signal.status", "" + failStatus); + signalProperties.put("signal.message", "" + message); sourceList.add(new PropertiesSource("signaling", signalProperties)); @@ -319,4 +329,4 @@ this.rootdn = rootdn; } -} \ No newline at end of file +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/ExecuteTaskNotifier.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/ExecuteTaskNotifier.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/ExecuteTaskNotifier.java Wed Dec 23 19:29:07 2009 +0200 @@ -19,15 +19,17 @@ package com.nokia.helium.signal.ant.types; import java.util.ArrayList; +import java.util.List; import java.util.Vector; -import java.util.List; + import org.apache.log4j.Logger; import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.BuildListener; import org.apache.tools.ant.Project; import org.apache.tools.ant.Task; -import org.apache.tools.ant.BuildListener; import org.apache.tools.ant.TaskContainer; import org.apache.tools.ant.types.DataType; + import com.nokia.helium.signal.Notifier; /** @@ -57,9 +59,17 @@ private Logger log = Logger.getLogger(ExecuteTaskNotifier.class); private List tasks = new ArrayList(); + /** + * Method executes a series of given tasks on raising of the specified signal. + * + * @param signalName is the name of the signal that has been raised. + * @param failStatus indicates whether to fail the build or not + * @param notifierInput contains signal notifier info + * @param message is the message from the signal that has been raised. + */ @SuppressWarnings("unchecked") public void sendData(String signalName, boolean failStatus, - NotifierInput notifierInput) { + NotifierInput notifierInput, String message ) { try { // Configure the project Project prj = getProject().createSubProject(); @@ -73,6 +83,7 @@ prj.setProperty("signal.name", signalName); prj.setProperty("signal.status", "" + failStatus); + prj.setProperty("signal.message", message ); // Converting the list of inputs into a string. String inputs = ""; if (notifierInput != null) { @@ -85,7 +96,8 @@ task.perform(); } } catch (BuildException e) { - log.debug(e); + // We are Ignoring the errors as no need to fail the build. + log.debug(e.toString(), e); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/LogSourceList.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/LogSourceList.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/LogSourceList.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,9 +20,7 @@ import org.apache.tools.ant.types.DataType; import com.nokia.helium.core.LogSource; - -import com.nokia.helium.core.HlmAntLibException; -import com.nokia.helium.signal.ant.SignalListener; +import org.apache.tools.ant.BuildException; import java.util.Vector; @@ -67,7 +65,7 @@ */ public Vector getLogSourceList() { if (sourceList.isEmpty()) { - throw new HlmAntLibException(SignalListener.MODULE_NAME, "notifierlist is empty."); + throw new BuildException("Signal notifierlist is empty."); } return sourceList; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/NotifierInput.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/NotifierInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/NotifierInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -20,6 +20,8 @@ import java.io.File; +import java.util.List; +import java.util.ArrayList; import java.util.Vector; import org.apache.tools.ant.types.DataType; import org.apache.tools.ant.types.FileSet; @@ -55,29 +57,50 @@ } /** - * Updates the list of filelist from the input fileset. - * @param fileSetList input fileset list + * Return a file from the input fileset. + * @param pattern pattern to match from the input fileset * @return the matched files including the base dir. */ public File getFile(String pattern) { + File fileToReturn = null; if (file != null) { - return file; + if (file.toString().matches(pattern)) { + fileToReturn = file; + } + return fileToReturn; } - File fileFromList = null; for (FileSet fs : fileSetList) { DirectoryScanner ds = fs.getDirectoryScanner(getProject()); String[] includedFiles = ds.getIncludedFiles(); for ( String filePath : includedFiles ) { if (filePath.matches(pattern)) { - fileFromList = new File(ds.getBasedir(), filePath); - log.debug("matched file for pattern: " + pattern + ":" + fileFromList); + fileToReturn = new File(ds.getBasedir(), filePath); + log.debug("matched file for pattern: " + pattern + ":" + fileToReturn); break; } } } - return fileFromList; + return fileToReturn; } + /** + * Returns the list of filelist from the input fileset. + * @param pattern pattern to match from the input fileset + * @return the matched files including the base dir. + */ + public List getFileList(String pattern) { + List fileList = new ArrayList(); + for (FileSet fs : fileSetList) { + DirectoryScanner ds = fs.getDirectoryScanner(getProject()); + String[] includedFiles = ds.getIncludedFiles(); + for ( String filePath : includedFiles ) { + if (filePath.matches(pattern)) { + fileList.add(new File(ds.getBasedir(), filePath)); + } + } + } + return fileList; + } /** * Helper function called by ant to set the input file. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SMSNotifier.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SMSNotifier.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SMSNotifier.java Wed Dec 23 19:29:07 2009 +0200 @@ -29,10 +29,14 @@ } /** * Sends the data to the requested sender list with specified notifier - * @param senderList sends the data to the list of requested user. + * + * @param signalName is the name of the signal that has been raised. + * @param failStatus indicates whether to fail the build or not + * @param notifierInput contains signal notifier info + * @param message is the message from the signal that has been raised. */ public void sendData(String signalName, boolean failStatus, - NotifierInput notifierInput) { + NotifierInput notifierInput, String message ) { } /** diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalConfig.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalConfig.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalConfig.java Wed Dec 23 19:29:07 2009 +0200 @@ -48,6 +48,8 @@ public class SignalConfig extends DataType { + private static boolean warningPrinted; + private Vector signalInputListRef = new Vector(); private HashMap targetConditionsMap = new HashMap(); private Vector targetConditions = new Vector(); @@ -55,15 +57,19 @@ private Vector sourceList = new Vector(); private String configID; - + + /** * {@inheritDoc} */ public void setProject(Project project) { super.setProject(project); - getProject().log("signalConfig element is now deprecated. Please consider moving to signalListenerConfig element or" + + if (!warningPrinted) { + getProject().log("signalConfig element is now deprecated. Please consider moving to signalListenerConfig element or" + " signal task nested element.", Project.MSG_WARN); + warningPrinted = true; + } } /** diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalExceptionHandler.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalExceptionHandler.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalExceptionHandler.java Wed Dec 23 19:29:07 2009 +0200 @@ -17,9 +17,8 @@ package com.nokia.helium.signal.ant.types; - +import net.sf.antcontrib.logic.RunTargetTask; import org.apache.tools.ant.Project; -import org.apache.tools.ant.types.Reference; import com.nokia.helium.core.ant.HlmExceptionHandler; import com.nokia.helium.signal.ant.taskdefs.*; import com.nokia.helium.signal.ant.SignalListener; @@ -38,19 +37,12 @@ * @throws BuildException */ public void handleException(Project project, String module, Exception e) { - log.debug("SignalExceptionHandler:handleException: start"); - String refId = project.getProperty("signals.buildexception.signalinput"); - if (refId != null) { - Signal signal = new Signal(); - SignalNotifierInput signalNotifierInput = new SignalNotifierInput(); - signal.add(signalNotifierInput); - Reference ref = new Reference(project, refId); - SignalInput signalInput = signalNotifierInput.createSignalInput(); - signalInput.setRefid(ref); - signal.setProject(project); - signal.setName("buildExceptionSignal"); - signal.setMessage(e.getMessage()); - signal.execute(); + String exceptionTarget = project.getProperty("exceptions.target"); + if (exceptionTarget != null) { + RunTargetTask runTargetTask = new RunTargetTask(); + runTargetTask.setProject(project); + runTargetTask.setTarget(exceptionTarget); + runTargetTask.execute(); } - } + } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalInput.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -22,13 +22,12 @@ import java.util.Vector; import org.apache.tools.ant.types.DataType; - -import com.nokia.helium.core.HlmAntLibException; import com.nokia.helium.core.ant.types.ReferenceType; import org.apache.log4j.Logger; import com.nokia.helium.signal.Notifier; -import com.nokia.helium.signal.ant.SignalListener; + +import org.apache.tools.ant.BuildException; /** * SignalInput class which is a type to store input for signals @@ -131,6 +130,6 @@ } return notifierList; } - throw new HlmAntLibException(SignalListener.MODULE_NAME, "No notifierlist reference"); + throw new BuildException("No signal notifierlist reference defined."); } } \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierInput.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierInput.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierInput.java Wed Dec 23 19:29:07 2009 +0200 @@ -23,8 +23,6 @@ import org.apache.tools.ant.types.DataType; -import org.apache.log4j.Logger; - import org.apache.tools.ant.BuildException; @@ -42,8 +40,6 @@ private Vector notifierInputList = new Vector(); - private Logger log = Logger.getLogger(SignalInput.class); - /** * Helper function called by ant to create a new notifier for * this input. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierList.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierList.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalNotifierList.java Wed Dec 23 19:29:07 2009 +0200 @@ -19,10 +19,9 @@ package com.nokia.helium.signal.ant.types; import org.apache.tools.ant.types.DataType; +import org.apache.tools.ant.BuildException; +import com.nokia.helium.signal.Notifier; -import com.nokia.helium.core.HlmAntLibException; -import com.nokia.helium.signal.Notifier; -import com.nokia.helium.signal.ant.SignalListener; import java.util.Vector; @@ -59,7 +58,7 @@ */ public Vector getNotifierList() { if (notifierlist.isEmpty()) { - throw new HlmAntLibException(SignalListener.MODULE_NAME, "notifierlist is empty."); + throw new BuildException(" Signal notifierlist is empty."); } return notifierlist; } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalStatusDef.java --- a/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalStatusDef.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/src/com/nokia/helium/signal/ant/types/SignalStatusDef.java Wed Dec 23 19:29:07 2009 +0200 @@ -38,9 +38,7 @@ * @throws BuildException */ public void execute(Project prj, String module, String[] targetNames) { - log.debug("SignalStatusDef:execute:"); if (SignalStatusList.getDeferredSignalList().hasSignalInList()) { - log.debug("SignalStatusDef:execute: Error message: " + SignalStatusList.getDeferredSignalList().getErrorMsg()); throw new BuildException(SignalStatusList.getDeferredSignalList().getErrorMsg()); } } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/tests/build.xml --- a/buildframework/helium/external/helium-antlib/signaling/tests/build.xml Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -22,16 +22,14 @@ --> Helium Antlib signaling tests. + - - - - + @@ -40,13 +38,7 @@ - - - - - - - + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/tests/src/com/nokia/helium/signaling/tests/TestEmailSender.java --- a/buildframework/helium/external/helium-antlib/signaling/tests/src/com/nokia/helium/signaling/tests/TestEmailSender.java Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/tests/src/com/nokia/helium/signaling/tests/TestEmailSender.java Wed Dec 23 19:29:07 2009 +0200 @@ -49,7 +49,7 @@ en.setLdap("test"); NotifierInput input = new NotifierInput(); input.setFile(new File(System.getProperty("testdir") + "/tests/test_signal/data/test.log_status.html")); - en.sendData("test", true, input); + en.sendData("test", true, input, "Test Message"); } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/tests/test_signal/signaling_test.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/signaling/tests/test_signal/signaling_test.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,272 @@ + + + + + Test all the helium signals + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/signaling/tests/test_signal/test_signaling.ant.xml --- a/buildframework/helium/external/helium-antlib/signaling/tests/test_signal/test_signaling.ant.xml Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,262 +0,0 @@ - - - - - Test all the helium signals - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/bld.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/bld.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,60 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) + +REM Configure Ant +if not defined TESTED_ANT_HOME ( +set TESTED_ANT_HOME=C:\Apps\ant_1.7 +) +if exist %TESTED_ANT_HOME% (set ANT_HOME=%TESTED_ANT_HOME%) + +REM Configure the expected Ant Version details below +SET expMajorVer=1 +SET expMinorVer=7 + +rem *** Verify Ant Version *** +rem -- Run the 'ant -version' command and capture the output to a variable +for /f "tokens=*" %%a in ('ant -version') do (set antversion=%%a) +echo *** Installed Version : %antversion% + +rem -- Parse the version string obtained above and get the version number +for /f "tokens=4 delims= " %%a in ("%antversion%") do set val=%%a +rem -- Parse the version number delimiting the '.' and set the major and +rem minor versions +for /f "tokens=1-2 delims=." %%a in ("%val%") do ( +set /A majorVersion=%%a +set /A minorVersion=%%b +) +rem -- Check whether major version is greater than or equal to the expected. +if %majorVersion% geq %expMajorVer% ( +rem -- if major version is valid, check minor version. If minor version is less +rem than expected display message and abort the execution. +if %minorVersion% lss %expMinorVer% (echo *** Incorrect version of Ant found. Please check you have atleast Ant 1.7.0 & goto :errorstop ) +) + +ant %* +endlocal + +:errorstop +@echo *** Build aborted with error +exit /b 1 \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,36 @@ + + + + Helium Antlib Sydef. + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\bin\helium-sysdef.jar -lib %CD%\..\..\antlibs +ant %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,36 @@ + + + + Helium Antlib sysdef demo. + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/layer1/package1/package_definition.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/layer1/package1/package_definition.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/layer1/package2/package_definition.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/layer1/package2/package_definition.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/layer2/package3/package_definition.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/layer2/package3/package_definition.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/layer2/package4/package_definition.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/layer2/package4/package_definition.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/layer3/package5/package_definition.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/layer3/package5/package_definition.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,32 @@ + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer1.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer1.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer2.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer2.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,129 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer3.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_definition_layer3.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,128 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_model.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/root/system_model.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,129 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/filter-module.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/filter-module.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,110 @@ + + + + + + + + + + + + + + + + + + + + + + x + + + hide + + + + + + + + + + + + + + + + + + + + + + x + x + + + + hide + + + + + + + + + + + + + + + + x + + + hide + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/filtersysdef.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/filtersysdef.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,40 @@ + + + + + + +/os/deviceplatformrelease/foundation_system/system_model/system_definition.xml + +only + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/joinsysdef-module.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/joinsysdef-module.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,402 @@ + + + + + http://www.symbian.org/system-definition + + + + + Cannot process this document + + + + + + + + + + + + + + + + + Linked ID "" () must match linking document "" () + + + + + + + + Cannot set "", already set + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + + + + + + + Cannot create namespace prefix for downstream default namespace + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + / + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Could not find namespace for + + + + + + + + + + + + + + + + Error + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + / + + + + + + + + + + + + + + + + + + + ../ + + + + + + + + + + + / + + + + + + + + + + + + + + + + + + + + / + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/joinsysdef.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/joinsysdef.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,45 @@ + + + + + + +/os/deviceplatformrelease/foundation_system/system_model/system_definition.xml + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/mergesysdef-module.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/mergesysdef-module.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,513 @@ + + + + +http://www.symbian.org/system-definition + + + Syntax not supported + + + Can only merge stand-alone system models + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + "" for + + + + + + + + + + + ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + + + + + + + Cannot create namespace prefix for downstream default namespace + + + + + + + + + + + + + + + + + + + + + + + + Syntax not supported + + + Can only merge stand-alone system models + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Note: levels differ "" vs "" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Warning: "" moved in downstream model. Ignoring moved + + + + + + + + + + + + + + + Warning: All content in downstream "" is invalid. Ignoring + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Warning: "" moved in downstream model. Ignoring moved + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/mergesysdef.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/mergesysdef.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,107 @@ + + + + mcl/System_Definition_Template.xml + + + + + + + + + Syntax not supported + + + + Upstream + + + + + + + + + + + Downstream + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/sysdefdowngrade.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/demo/data/sf/os/buildtools/bldsystemtools/buildsystemtools/sysdefdowngrade.xsl Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,397 @@ + + + + + + os/deviceplatformrelease/foundation_system/system_model + + + + + + + Cannot process this document + + + + + + + + + + + + + + + + + + + + Package definition cannot link another package + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Error: IDs do not match: vs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Excessive nesting of packages: Ignoring + + + + + + + + + + + + + + + + + + + + + Y + + + plugin + + placeholder + PC + + + + + + + + + + + + + + + + + + + + + + + Y + N + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + / + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]> +]]> + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/lib/serializer.jar Binary file buildframework/helium/external/helium-antlib/sysdef/lib/serializer.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/lib/xalan.jar Binary file buildframework/helium/external/helium-antlib/sysdef/lib/xalan.jar has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,30 @@ + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/helium.antlib.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/helium.antlib.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,28 @@ + + + + + Ant task definition declarations. + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/AbstractSydefTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/AbstractSydefTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,237 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sysdef.ant.taskdefs; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Map; + +import javax.xml.transform.ErrorListener; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Task; + +/** + * This is the base class for manipulating Sysdef v3 files using XML Stylesheet. + * The implementation/interface is not frozen yet. It is morelikely going to + * change in the future, depending of the underlaying implementation. + * + */ +public abstract class AbstractSydefTask extends Task implements ErrorListener { + + private File srcFile; + private File destFile; + private File epocroot; + private boolean failOnError = true; + + /** + * Get the source file. + * @return + */ + public File getSrcFile() { + return srcFile; + } + + /** + * Defines the location of the source system definition file. + * @param srcfile + * @ant.required + */ + public void setSrcFile(File srcfile) { + this.srcFile = srcfile; + } + + /** + * Get the location of the output file. + * @return + */ + public File getDestFile() { + return destFile; + } + + /** + * The the name of the destination file. + * @param destfile + * @ant.required + */ + public void setDestFile(File destfile) { + this.destFile = destfile; + } + + /** + * Get the epocroot. + * If epocroot is not set by the user it return the value from the EPOCROOT environment variable. + * If the EPOCROOT environment variable is not defined then a BuildException is thrown. + * @return the epocroot location as a File object. + */ + public File getEpocroot() { + if (epocroot == null) { + if (System.getenv("EPOCROOT") != null) { + return new File(System.getenv("EPOCROOT")); + } + else { + throw new BuildException("'epocroot' attribute has not been defined."); + } + } + return epocroot; + } + + /** + * Location of the EPOCROOT. + * @ant.not-required By default the EPOCROOT environment variable is used. + * @param epocroot path to the epocroot. + */ + public void setEpocroot(File epocroot) { + this.epocroot = epocroot; + } + + /** + * Shall we fail the build on error. + * @return is the task should failonerror. + */ + public boolean isFailOnError() { + return failOnError; + } + + /** + * Defines if the file should fail on error. + * @param failonerror + * @ant.not-required Default is true. + */ + public void setFailOnError(boolean failonerror) { + this.failOnError = failonerror; + } + + /** + * This method should be defined by the implementing class + * to define the location of the XSLT file. + * @return the XSLT file location. + */ + abstract File getXsl(); + + /** + * Check if required attribute have been configured correctly. + * If not the method will raise a BuildException. + */ + protected void check() { + if (srcFile == null) { + throw new BuildException("'srcfile' attribute is not defined"); + } + if (destFile == null) { + throw new BuildException("'destfile' attribute is not defined"); + } + File xslt = getXsl(); + if (!xslt.exists()) { + throw new BuildException("Could not find " + xslt); + } + if (!srcFile.exists()) { + throw new BuildException("Could not find source file " + srcFile); + } + } + + /** + * Transform the srcfile using the stylesheet provided by getXsl. The data parameters are + * passed to the template engine. The result is save to the destfile. + * + * @param data a set of key/value to pass to the XSLT engine. + */ + public void transform(Map data) { + check(); + if (destFile.exists()) { + log("Deleting previous output file: " + destFile, Project.MSG_DEBUG); + destFile.delete(); + } + + FileOutputStream output = null; + try { + output = new FileOutputStream(destFile); + TransformerFactory factory = TransformerFactory.newInstance(); + Transformer transformer = factory.newTransformer(new StreamSource(getXsl())); + + transformer.setParameter("path", srcFile); + for (Map.Entry entry : data.entrySet()) { + transformer.setParameter(entry.getKey(), entry.getValue()); + } + transformer.setErrorListener(this); + transformer.transform(new StreamSource(srcFile), new StreamResult( + output)); + + } catch (Exception exc) { + // deleting the intermediate file in case of error. + if (destFile.exists()) { + // closing current output stream, so we can delete the file + try { + if (output != null) { + output.close(); + } + } catch (IOException ioe) { + // we should just ignore that error. + log(ioe, Project.MSG_DEBUG); + } + log("Deleting " + destFile + " because an error occured.", Project.MSG_INFO); + destFile.delete(); + } + // Raising the error to Ant. + throw new BuildException(exc.toString()); + } + } + + /** + * {@inheritDoc} + * Reports errors to the Ant logging system of throw the exception if the task + * is set to failonerror. + */ + @Override + public void error(TransformerException message) throws TransformerException { + if (this.isFailOnError()) { + throw message; + } else { + log("ERROR: " + message.getMessageAndLocation(), Project.MSG_ERR); + } + } + + /** + * {@inheritDoc} + * Fails the task in case of fatal error. The is nothing we can do about that. + */ + @Override + public void fatalError(TransformerException message) throws TransformerException { + log("ERROR: " + message.getMessageAndLocation(), Project.MSG_ERR); + throw message; + } + + /** + * {@inheritDoc} + * Reports errors to the Ant logging system of throw the exception if the task + * is set to failonerror. + */ + @Override + public void warning(TransformerException message) throws TransformerException { + if (this.isFailOnError()) { + throw message; + } else { + log("WARNING: " + message.getMessageAndLocation(), Project.MSG_WARN); + } + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/DowngradeTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/DowngradeTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,59 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sysdef.ant.taskdefs; + +import java.io.File; +import java.util.Hashtable; + +/** + *

    This task allows to downgrade of a system definition file v3.0 into v2.0.

    + * + *

    The following example shows how you can downgrade the X:\model.sysdef.xml as + * X:\model_2_0_1.sysdef.xml.

    + * + * E.g: + *
    + *   <hlm:downgradeSysdef epocroot="X:\" srcfile="X:\model.sysdef.xml" 
    + *                      destfile="X:\model_2_0_1.sysdef.xml" />
    + *   
    + * + * For more information about system definition file v3.0 please check + * http://developer.symbian.org/wiki/index.php/System_Definition. + * + * @ant.task name="downgradeSysdef" category="Sysdef" + */ +public class DowngradeTask extends AbstractSydefTask { + private static final String XSLT = "sf/os/buildtools/bldsystemtools/buildsystemtools/sysdefdowngrade.xsl"; + + /** + * {@inheritDoc} + */ + public void execute() { + check(); + log("Downgrading " + this.getSrcFile() + " to 2.0.1 schema."); + log("Creating " + this.getDestFile()); + transform(new Hashtable()); + } + + /** + * {@inheritDoc} + */ + @Override + protected File getXsl() { + return new File(this.getEpocroot(), XSLT); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/JoinTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/JoinTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,62 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sysdef.ant.taskdefs; + +import java.io.File; +import java.util.Hashtable; + +/** + *

    This task allows to do the join operation on system definition file v3.0. + * Join operation consist in combining a distributed system definition file into + * a stand-alone version.

    + * + *

    The following example shows how you can join the X:\layer.sysdef.xml under + * X:\joined_layer.sysdef.xml.

    + * + * E.g: + *
    + *   <hlm:joinSysdef epocroot="X:\" srcfile="X:\layer.sysdef.xml" 
    + *                      destfile="X:\joined_layer.sysdef.xml" />
    + * 
    + * + * For more information about system definition file v3.0 please check + * http://developer.symbian.org/wiki/index.php/System_Definition. + * + * @ant.task name="joinSysdef" category="Sysdef" + */ + +public class JoinTask extends AbstractSydefTask { + private static final String XSLT = "sf/os/buildtools/bldsystemtools/buildsystemtools/joinsysdef.xsl"; + + /** + * {@inheritDoc} + */ + public void execute() { + check(); + log("Joining " + this.getSrcFile()); + log("Creating " + this.getDestFile()); + transform(new Hashtable()); + } + + /** + * {@inheritDoc} + */ + @Override + protected File getXsl() { + return new File(this.getEpocroot(), XSLT); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/MergeTask.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/src/com/nokia/helium/sysdef/ant/taskdefs/MergeTask.java Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,97 @@ +/* +* Copyright (c) 2007-2008 Nokia Corporation and/or its subsidiary(-ies). +* All rights reserved. +* This component and the accompanying materials are made available +* under the terms of the License "Eclipse Public License v1.0" +* which accompanies this distribution, and is available +* at the URL "http://www.eclipse.org/legal/epl-v10.html". +* +* Initial Contributors: +* Nokia Corporation - initial contribution. +* +* Contributors: +* +* Description: +* +*/ +package com.nokia.helium.sysdef.ant.taskdefs; + +import java.io.File; +import java.util.Hashtable; +import java.util.Map; + +import org.apache.tools.ant.BuildException; + +/** + *

    This task allows to do the merge operation on system definition file v3.0. + * Merge operation consist in combining a the data of two models into one stand-alone + * system definition file (Also called canonical system definition file).

    + * + *

    The following example shows how you can merge the X:\layer.sysdef.xml and X:\vendor.sysdef.xml + * as X:\joined_layer.sysdef.xml.

    + * + * E.g: + *
    + *   <hlm:mergeSysdef epocroot="X:\"
    + *                      srcfile="X:\layer.sysdef.xml" 
    + *                      downstreamfile="X:\vendor.sysdef.xml" 
    + *                      destfile="X:\joined_layer.sysdef.xml" />
    + * 
    + * + * + * For more information about system definition file v3.0 please check + * http://developer.symbian.org/wiki/index.php/System_Definition. + * + * @ant.task name="mergeSysdef" category="Sysdef" + */ + +public class MergeTask extends AbstractSydefTask { + private static final String XSLT = "sf/os/buildtools/bldsystemtools/buildsystemtools/mergesysdef.xsl"; + private File downstreamFile; + + /** + * Get the downstream file for the merge. + * @return + */ + public File getDownstreamFile() { + return downstreamFile; + } + + /** + * Defines the location of the downstream file. + * @param downstreamfile + * @ant.required + */ + public void setDownstreamfile(File downstreamFile) { + this.downstreamFile = downstreamFile; + } + + + /** + * {@inheritDoc} + */ + public void execute() { + check(); + if (downstreamFile == null) { + throw new BuildException("'downstreamfile' attribute is not defined"); + } + if (!downstreamFile.exists()) { + throw new BuildException("Could not find downstream file " + downstreamFile); + } + + log("Merging " + this.getSrcFile()); + log("Downstream " + downstreamFile); + log("Creating " + this.getDestFile()); + Map data = new Hashtable(); + data.put("Downstream", downstreamFile.toString()); + transform(data); + } + + /** + * {@inheritDoc} + */ + @Override + protected File getXsl() { + return new File(this.getEpocroot(), XSLT); + } +} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_downgrade_sysdef.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_downgrade_sysdef.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,74 @@ + + + + Helium antlib downgrade sysdef tests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_join_sysdef.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_join_sysdef.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,86 @@ + + + + Helium antlib join sysdef tests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + test-srcfile-destfile-wrong-epocroot + + + + + + + + test-wrong-content + + + + + + + + test-wrong-content + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_merge_sysdef.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/antunit/test_merge_sysdef.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,101 @@ + + + + Helium antlib merge sysdef tests. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + test-wrong-content + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/bld.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/bld.sh Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,24 @@ +#!/bin/csh + +# +# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +# All rights reserved. +# This component and the accompanying materials are made available +# under the terms of the License "Eclipse Public License v1.0" +# which accompanies this distribution, and is available +# at the URL "http://www.eclipse.org/legal/epl-v10.html". +# +# Initial Contributors: +# Nokia Corporation - initial contribution. +# +# Contributors: +# +# Description: +# + + + +module load java/1.6.0 +module load mercurial +setenv ANT_ARGS "-lib ../lib -lib ../../lib -lib ../../bin/helium-core.jar -lib ../../bin/helium-sysdef.jar -lib ../../antlibs" +ant -Dant.executor.class="com.nokia.helium.core.ant.HeliumExecutor" $* diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/build.bat --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/build.bat Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,27 @@ +@echo off + +rem +rem Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). +rem All rights reserved. +rem This component and the accompanying materials are made available +rem under the terms of the License "Eclipse Public License v1.0" +rem which accompanies this distribution, and is available +rem at the URL "http://www.eclipse.org/legal/epl-v10.html". +rem +rem Initial Contributors: +rem Nokia Corporation - initial contribution. +rem +rem Contributors: +rem +rem Description: +rem + +setlocal +if not defined JAVA_6_HOME ( +set TESTED_JAVA=C:\Apps\j2sdk_1.6.0_02 +) ELSE set TESTED_JAVA=%JAVA_6_HOME% +if exist %TESTED_JAVA% (set JAVA_HOME=%TESTED_JAVA%) +set ANT_ARGS=-lib %CD%\..\lib -lib %CD%\..\..\lib -lib %CD%\..\..\bin\helium-core.jar -lib %CD%\..\..\bin\helium-sysdef.jar -lib %CD%\..\..\antlibs +ant -Dant.executor.class=com.nokia.helium.core.ant.HeliumExecutor %* +endlocal + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/sysdef/tests/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/sysdef/tests/build.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,35 @@ + + + + Helium Antlib Sysdef unittests. + + + + + + + + + + + diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/helium-antlib/test-macros.ant.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/helium-antlib/test-macros.ant.xml Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,46 @@ + + + + Helium Antlib test macro. + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/bin/nosetests-script.py --- a/buildframework/helium/external/python/bin/nosetests-script.py Wed Oct 28 14:39:48 2009 +0000 +++ b/buildframework/helium/external/python/bin/nosetests-script.py Wed Dec 23 19:29:07 2009 +0200 @@ -5,5 +5,5 @@ from pkg_resources import load_entry_point sys.exit( - load_entry_point('nose==0.10.4', 'console_scripts', 'nosetests')() + load_entry_point('nose==0.11.1', 'console_scripts', 'nosetests')() ) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Amara-1.2.0.1-py2.5.egg Binary file buildframework/helium/external/python/lib/2.5/Amara-1.2.0.1-py2.5.egg has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Jinja-1.2-py2.5-win32.egg Binary file buildframework/helium/external/python/lib/2.5/Jinja-1.2-py2.5-win32.egg has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Jinja2-2.0rc1-py2.5-win32.egg Binary file buildframework/helium/external/python/lib/2.5/Jinja2-2.0rc1-py2.5-win32.egg has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Pygments-0.10-py2.5.egg Binary file buildframework/helium/external/python/lib/2.5/Pygments-0.10-py2.5.egg has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/PKG-INFO --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/PKG-INFO Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,45 +0,0 @@ -Metadata-Version: 1.0 -Name: Sphinx -Version: 0.5.1 -Summary: Python documentation generator -Home-page: http://sphinx.pocoo.org/ -Author: Georg Brandl -Author-email: georg@python.org -License: BSD -Download-URL: http://pypi.python.org/pypi/Sphinx -Description: - Sphinx is a tool that makes it easy to create intelligent and beautiful - documentation for Python projects (or other documents consisting of - multiple reStructuredText sources), written by Georg Brandl. - It was originally created to translate the new Python documentation, - but has now been cleaned up in the hope that it will be useful to many - other projects. - - Sphinx uses reStructuredText as its markup language, and many of its strengths - come from the power and straightforwardness of reStructuredText and its - parsing and translating suite, the Docutils. - - Although it is still under constant development, the following features - are already present, work fine and can be seen "in action" in the Python docs: - - * Output formats: HTML (including Windows HTML Help), plain text and LaTeX, - for printable PDF versions - * Extensive cross-references: semantic markup and automatic links - for functions, classes, glossary terms and similar pieces of information - * Hierarchical structure: easy definition of a document tree, with automatic - links to siblings, parents and children - * Automatic indices: general index as well as a module index - * Code handling: automatic highlighting using the Pygments highlighter - * Various extensions are available, e.g. for automatic testing of snippets - and inclusion of appropriately formatted docstrings. - -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Documentation -Classifier: Topic :: Utilities diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/SOURCES.txt --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/SOURCES.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,216 +0,0 @@ -AUTHORS -CHANGES -EXAMPLES -LICENSE -MANIFEST.in -Makefile -README -TODO -babel.cfg -ez_setup.py -setup.cfg -setup.py -sphinx-build.py -sphinx-quickstart.py -Sphinx.egg-info/PKG-INFO -Sphinx.egg-info/SOURCES.txt -Sphinx.egg-info/dependency_links.txt -Sphinx.egg-info/entry_points.txt -Sphinx.egg-info/not-zip-safe -Sphinx.egg-info/requires.txt -Sphinx.egg-info/top_level.txt -doc/Makefile -doc/builders.rst -doc/changes.rst -doc/concepts.rst -doc/conf.py -doc/config.rst -doc/contents.rst -doc/examples.rst -doc/extensions.rst -doc/glossary.rst -doc/intro.rst -doc/rest.rst -doc/templating.rst -doc/_static/sphinx.png -doc/_templates/index.html -doc/_templates/indexsidebar.html -doc/_templates/layout.html -doc/ext/appapi.rst -doc/ext/autodoc.rst -doc/ext/builderapi.rst -doc/ext/coverage.rst -doc/ext/doctest.rst -doc/ext/ifconfig.rst -doc/ext/intersphinx.rst -doc/ext/math.rst -doc/ext/refcounting.rst -doc/ext/todo.rst -doc/markup/code.rst -doc/markup/desc.rst -doc/markup/index.rst -doc/markup/inline.rst -doc/markup/misc.rst -doc/markup/para.rst -sphinx/__init__.py -sphinx/_jinja.py -sphinx/addnodes.py -sphinx/application.py -sphinx/builder.py -sphinx/cmdline.py -sphinx/config.py -sphinx/environment.py -sphinx/highlighting.py -sphinx/htmlhelp.py -sphinx/htmlwriter.py -sphinx/latexwriter.py -sphinx/linkcheck.py -sphinx/quickstart.py -sphinx/roles.py -sphinx/search.py -sphinx/setup_command.py -sphinx/textwriter.py -sphinx/directives/__init__.py -sphinx/directives/code.py -sphinx/directives/desc.py -sphinx/directives/other.py -sphinx/ext/__init__.py -sphinx/ext/autodoc.py -sphinx/ext/coverage.py -sphinx/ext/doctest.py -sphinx/ext/ifconfig.py -sphinx/ext/intersphinx.py -sphinx/ext/jsmath.py -sphinx/ext/mathbase.py -sphinx/ext/pngmath.py -sphinx/ext/refcounting.py -sphinx/ext/todo.py -sphinx/locale/__init__.py -sphinx/locale/__init__.pyc -sphinx/locale/sphinx.pot -sphinx/locale/cs/LC_MESSAGES/sphinx.js -sphinx/locale/cs/LC_MESSAGES/sphinx.mo -sphinx/locale/cs/LC_MESSAGES/sphinx.po -sphinx/locale/de/LC_MESSAGES/sphinx.js -sphinx/locale/de/LC_MESSAGES/sphinx.mo -sphinx/locale/de/LC_MESSAGES/sphinx.po -sphinx/locale/es/LC_MESSAGES/sphinx.js -sphinx/locale/es/LC_MESSAGES/sphinx.mo -sphinx/locale/es/LC_MESSAGES/sphinx.po -sphinx/locale/fr/LC_MESSAGES/sphinx.js -sphinx/locale/fr/LC_MESSAGES/sphinx.mo -sphinx/locale/fr/LC_MESSAGES/sphinx.po -sphinx/locale/ja/LC_MESSAGES/sphinx.js -sphinx/locale/ja/LC_MESSAGES/sphinx.mo -sphinx/locale/ja/LC_MESSAGES/sphinx.po -sphinx/locale/nl/LC_MESSAGES/sphinx.js -sphinx/locale/nl/LC_MESSAGES/sphinx.mo -sphinx/locale/nl/LC_MESSAGES/sphinx.po -sphinx/locale/pl/LC_MESSAGES/sphinx.js -sphinx/locale/pl/LC_MESSAGES/sphinx.mo -sphinx/locale/pl/LC_MESSAGES/sphinx.po -sphinx/locale/pt_BR/LC_MESSAGES/sphinx.js -sphinx/locale/pt_BR/LC_MESSAGES/sphinx.mo -sphinx/locale/pt_BR/LC_MESSAGES/sphinx.po -sphinx/locale/sl/LC_MESSAGES/sphinx.js -sphinx/locale/sl/LC_MESSAGES/sphinx.mo -sphinx/locale/sl/LC_MESSAGES/sphinx.po -sphinx/locale/zh_TW/LC_MESSAGES/sphinx.js -sphinx/locale/zh_TW/LC_MESSAGES/sphinx.mo -sphinx/locale/zh_TW/LC_MESSAGES/sphinx.po -sphinx/static/contents.png -sphinx/static/default.css -sphinx/static/doctools.js -sphinx/static/file.png -sphinx/static/jquery.js -sphinx/static/minus.png -sphinx/static/navigation.png -sphinx/static/plus.png -sphinx/static/rightsidebar.css -sphinx/static/searchtools.js -sphinx/static/sphinxdoc.css -sphinx/static/stickysidebar.css -sphinx/static/traditional.css -sphinx/templates/defindex.html -sphinx/templates/genindex-single.html -sphinx/templates/genindex-split.html -sphinx/templates/genindex.html -sphinx/templates/layout.html -sphinx/templates/modindex.html -sphinx/templates/opensearch.xml -sphinx/templates/page.html -sphinx/templates/search.html -sphinx/templates/changes/frameset.html -sphinx/templates/changes/rstsource.html -sphinx/templates/changes/versionchanges.html -sphinx/texinputs/Makefile -sphinx/texinputs/fncychap.sty -sphinx/texinputs/howto.cls -sphinx/texinputs/manual.cls -sphinx/texinputs/python.ist -sphinx/texinputs/sphinx.sty -sphinx/texinputs/tabulary.sty -sphinx/util/__init__.py -sphinx/util/compat.py -sphinx/util/console.py -sphinx/util/jsdump.py -sphinx/util/png.py -sphinx/util/smartypants.py -sphinx/util/stemmer.py -sphinx/util/texescape.py -tests/path.py -tests/path.pyc -tests/run.py -tests/test_application.py -tests/test_application.pyc -tests/test_autodoc.py -tests/test_autodoc.pyc -tests/test_build.py -tests/test_build.pyc -tests/test_config.py -tests/test_config.pyc -tests/test_coverage.py -tests/test_coverage.pyc -tests/test_env.py -tests/test_env.pyc -tests/test_i18n.py -tests/test_i18n.pyc -tests/test_markup.py -tests/test_markup.pyc -tests/test_quickstart.py -tests/test_quickstart.pyc -tests/util.py -tests/util.pyc -tests/etree13/ElementPath.py -tests/etree13/ElementPath.pyc -tests/etree13/ElementTree.py -tests/etree13/ElementTree.pyc -tests/etree13/HTMLTreeBuilder.py -tests/etree13/__init__.py -tests/etree13/__init__.pyc -tests/root/Makefile -tests/root/autodoc.txt -tests/root/conf.py -tests/root/contents.txt -tests/root/desc.txt -tests/root/ext.py -tests/root/ext.pyc -tests/root/images.txt -tests/root/img.gif -tests/root/img.pdf -tests/root/img.png -tests/root/includes.txt -tests/root/literal.inc -tests/root/markup.txt -tests/root/math.txt -tests/root/wrongenc.inc -tests/root/_static/README -tests/root/_templates/layout.html -tests/root/special/api.h -tests/root/subdir/images.txt -tests/root/subdir/img.png -tests/root/subdir/include.inc -tests/root/subdir/simg.png -utils/check_sources.py -utils/pylintrc -utils/reindent.py \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/dependency_links.txt --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/dependency_links.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/entry_points.txt --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/entry_points.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -[console_scripts] -sphinx-build = sphinx:main -sphinx-quickstart = sphinx.quickstart:main - -[distutils.commands] -build_sphinx = sphinx.setup_command:BuildDoc - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/not-zip-safe --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/not-zip-safe Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/requires.txt --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/requires.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -Pygments>=0.8 -Jinja>=1.1 -docutils>=0.4 \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/top_level.txt --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/EGG-INFO/top_level.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sphinx diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/__init__.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Sphinx - ~~~~~~ - - The Sphinx documentation toolchain. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import sys - -__revision__ = '$Revision$' -__version__ = '0.5.1' -__released__ = '0.5.1' - - -def main(argv=sys.argv): - if sys.version_info[:3] < (2, 4, 0): - print >>sys.stderr, \ - 'Error: Sphinx requires at least Python 2.4 to run.' - return 1 - - try: - from sphinx import cmdline - except ImportError, err: - errstr = str(err) - if errstr.lower().startswith('no module named'): - whichmod = errstr[16:] - if whichmod.startswith('docutils'): - whichmod = 'Docutils library' - elif whichmod.startswith('jinja'): - whichmod = 'Jinja library' - elif whichmod == 'roman': - whichmod = 'roman module (which is distributed with Docutils)' - else: - whichmod += ' module' - print >>sys.stderr, \ - 'Error: The %s cannot be found. Did you install Sphinx '\ - 'and its dependencies correctly?' % whichmod - return 1 - raise - return cmdline.main(argv) - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/_jinja.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/_jinja.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx._jinja - ~~~~~~~~~~~~~ - - Jinja glue. - - :copyright: 2007-2008 by Georg Brandl, Horst Gutmann. - :license: BSD. -""" - -import codecs -from os import path - -from sphinx.util import mtimes_of_files -from sphinx.application import TemplateBridge - -from jinja import Environment -from jinja.loaders import BaseLoader -from jinja.exceptions import TemplateNotFound - - -def babel_extract(fileobj, keywords, comment_tags, options): - """ - Simple extractor to get some basic Babel support. - """ - env = Environment() - for lineno, sg, pl in env.get_translations_for_string(fileobj.read()): - yield lineno, None, (sg, pl), '' - - -class SphinxFileSystemLoader(BaseLoader): - """ - A loader that loads templates either relative to one of a list of given - paths, or from an absolute path. - """ - - def __init__(self, basepath, extpaths): - self.basepath = path.abspath(basepath) - self.extpaths = map(path.abspath, extpaths) - self.searchpaths = self.extpaths + [self.basepath] - - def get_source(self, environment, name, parent): - name = name.replace('/', path.sep) - if name.startswith('!'): - name = name[1:] - if not path.exists(path.join(self.basepath, name)): - raise TemplateNotFound(name) - filename = path.join(self.basepath, name) - elif path.isabs(name): - if not path.exists(name): - raise TemplateNotFound(name) - filename = name - else: - for searchpath in self.searchpaths: - if path.exists(path.join(searchpath, name)): - filename = path.join(searchpath, name) - break - else: - raise TemplateNotFound(name) - f = codecs.open(filename, 'r', environment.template_charset) - try: - return f.read() - finally: - f.close() - - -class TranslatorEnvironment(Environment): - class _Translator(object): - def __init__(self, translator): - self.trans = translator - - def gettext(self, string): - return self.trans.ugettext(string) - - def ngettext(self, singular, plural, n): - return self.trans.ungettext(singular, plural, n) - - def __init__(self, *args, **kwargs): - self.translator = kwargs['translator'] - del kwargs['translator'] - super(TranslatorEnvironment, self).__init__(*args, **kwargs) - - def get_translator(self, context): - return TranslatorEnvironment._Translator(self.translator) - - -class BuiltinTemplates(TemplateBridge): - def init(self, builder): - self.templates = {} - base_templates_path = path.join(path.dirname(__file__), 'templates') - ext_templates_path = [path.join(builder.confdir, dir) - for dir in builder.config.templates_path] - self.templates_path = [base_templates_path] + ext_templates_path - loader = SphinxFileSystemLoader(base_templates_path, ext_templates_path) - if builder.translator is not None: - self.jinja_env = TranslatorEnvironment(loader=loader, - friendly_traceback=False, translator=builder.translator) - else: - self.jinja_env = Environment(loader=loader, - # disable traceback, more likely that something - # in the application is broken than in the templates - friendly_traceback=False) - - def newest_template_mtime(self): - return max(mtimes_of_files(self.templates_path, '.html')) - - def render(self, template, context): - if template in self.templates: - return self.templates[template].render(context) - templateobj = self.templates[template] = \ - self.jinja_env.get_template(template) - return templateobj.render(context) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/addnodes.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/addnodes.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.addnodes - ~~~~~~~~~~~~~~~ - - Additional docutils nodes. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -from docutils import nodes - -# index markup -class index(nodes.Invisible, nodes.Inline, nodes.TextElement): pass - -# description units (classdesc, funcdesc etc.) - -# parent node for signature and content -class desc(nodes.Admonition, nodes.Element): pass - -# additional name parts (module name, class name) -class desc_addname(nodes.Part, nodes.Inline, nodes.TextElement): pass -# compatibility alias -desc_classname = desc_addname -# return type (C); object type, e.g. -> annotation (Python) -class desc_type(nodes.Part, nodes.Inline, nodes.TextElement): pass -# main name of object -class desc_name(nodes.Part, nodes.Inline, nodes.TextElement): pass -# argument list -class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement): pass -class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement): - child_text_separator = ', ' -class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement): pass -class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement): - child_text_separator = ', ' - def astext(self): - return '[' + nodes.TextElement.astext(self) + ']' -# annotation (not Python 3-style annotations) -class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement): pass - -# node for content -class desc_content(nodes.General, nodes.Element): pass - -# \versionadded, \versionchanged, \deprecated -class versionmodified(nodes.Admonition, nodes.TextElement): pass - -# seealso -class seealso(nodes.Admonition, nodes.Element): pass - -# productionlist -class productionlist(nodes.Admonition, nodes.Element): pass -class production(nodes.Part, nodes.Inline, nodes.TextElement): pass - -# toc tree -class toctree(nodes.General, nodes.Element): pass - -# centered -class centered(nodes.Part, nodes.Element): pass - -# pending xref -class pending_xref(nodes.Element): pass - -# compact paragraph -- never makes a

    -class compact_paragraph(nodes.paragraph): pass - -# for the ACKS list -class acks(nodes.Element): pass - -# sets the highlighting language for literal blocks -class highlightlang(nodes.Element): pass - -# like emphasis, but doesn't apply further text processors, e.g. smartypants -class literal_emphasis(nodes.emphasis): pass - -# glossary -class glossary(nodes.Element): pass - -# module declaration -class module(nodes.Element): pass - -# start of a file, used in the LaTeX builder only -class start_of_file(nodes.Element): pass - -# tabular column specification, used for the LaTeX writer -class tabular_col_spec(nodes.Element): pass - -# meta directive -- same as docutils' standard meta node, but pickleable -class meta(nodes.Special, nodes.PreBibliographic, nodes.Element): pass - -# make them known to docutils. this is needed, because the HTML writer -# will choke at some point if these are not added -nodes._add_node_class_names("""index desc desc_content desc_signature desc_type - desc_addname desc_name desc_parameterlist desc_parameter desc_optional - centered versionmodified seealso productionlist production toctree - pending_xref compact_paragraph highlightlang literal_emphasis - glossary acks module start_of_file tabular_col_spec meta""".split()) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/application.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/application.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,325 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.application - ~~~~~~~~~~~~~~~~~~ - - Sphinx application object. - - Gracefully adapted from the TextPress system by Armin. - - - :copyright: 2008 by Georg Brandl, Armin Ronacher. - :license: BSD. -""" - -import sys -import posixpath -from cStringIO import StringIO - -from docutils import nodes -from docutils.parsers.rst import directives, roles - -import sphinx -from sphinx.roles import xfileref_role, innernodetypes -from sphinx.config import Config -from sphinx.builder import builtin_builders, StandaloneHTMLBuilder -from sphinx.directives import desc_directive, target_directive, additional_xref_types -from sphinx.environment import SphinxStandaloneReader -from sphinx.util.console import bold - - -class SphinxError(Exception): - """ - Base class for Sphinx errors that are shown to the user in a nicer - way than normal exceptions. - """ - category = 'Sphinx error' - -class ExtensionError(SphinxError): - """Raised if something's wrong with the configuration.""" - category = 'Extension error' - - def __init__(self, message, orig_exc=None): - super(ExtensionError, self).__init__(message) - self.orig_exc = orig_exc - - def __repr__(self): - if self.orig_exc: - return '%s(%r, %r)' % (self.__class__.__name__, - self.message, self.orig_exc) - return '%s(%r)' % (self.__class__.__name__, self.message) - - def __str__(self): - parent_str = super(ExtensionError, self).__str__() - if self.orig_exc: - return '%s (exception: %s)' % (parent_str, self.orig_exc) - return parent_str - - -# List of all known core events. Maps name to arguments description. -events = { - 'builder-inited': '', - 'env-purge-doc': 'env, docname', - 'source-read': 'docname, source text', - 'doctree-read': 'the doctree before being pickled', - 'missing-reference': 'env, node, contnode', - 'doctree-resolved': 'doctree, docname', - 'env-updated': 'env', - 'html-page-context': 'pagename, context, doctree or None', - 'build-finished': 'exception', -} - -CONFIG_FILENAME = 'conf.py' - -class Sphinx(object): - - def __init__(self, srcdir, confdir, outdir, doctreedir, buildername, - confoverrides, status, warning=sys.stderr, freshenv=False): - self.next_listener_id = 0 - self._listeners = {} - self.builderclasses = builtin_builders.copy() - self.builder = None - - self.srcdir = srcdir - self.confdir = confdir - self.outdir = outdir - self.doctreedir = doctreedir - - if status is None: - self._status = StringIO() - self.quiet = True - else: - self._status = status - self.quiet = False - if warning is None: - self._warning = StringIO() - else: - self._warning = warning - self._warncount = 0 - - self._events = events.copy() - - # status code for command-line application - self.statuscode = 0 - - # read config - self.config = Config(confdir, CONFIG_FILENAME, confoverrides) - - # load all extension modules - for extension in self.config.extensions: - self.setup_extension(extension) - # the config file itself can be an extension - if self.config.setup: - self.config.setup(self) - - # now that we know all config values, collect them from conf.py - self.config.init_values() - - if buildername is None: - print >>status, 'No builder selected, using default: html' - buildername = 'html' - if buildername not in self.builderclasses: - raise SphinxError('Builder name %s not registered' % buildername) - - self.info(bold('Sphinx v%s, building %s' % (sphinx.__released__, - buildername))) - - builderclass = self.builderclasses[buildername] - self.builder = builderclass(self, freshenv=freshenv) - self.emit('builder-inited') - - def build(self, all_files, filenames): - try: - if all_files: - self.builder.build_all() - elif filenames: - self.builder.build_specific(filenames) - else: - self.builder.build_update() - except Exception, err: - self.emit('build-finished', err) - raise - else: - self.emit('build-finished', None) - - def warn(self, message): - self._warncount += 1 - try: - self._warning.write('WARNING: %s\n' % message) - except UnicodeEncodeError: - encoding = getattr(self._warning, 'encoding', 'ascii') - self._warning.write(('WARNING: %s\n' % message).encode(encoding, 'replace')) - - def info(self, message='', nonl=False): - try: - self._status.write(message) - except UnicodeEncodeError: - encoding = getattr(self._status, 'encoding', 'ascii') - self._status.write(message.encode(encoding, 'replace')) - if not nonl: - self._status.write('\n') - self._status.flush() - - # general extensibility interface - - def setup_extension(self, extension): - """Import and setup a Sphinx extension module.""" - try: - mod = __import__(extension, None, None, ['setup']) - except ImportError, err: - raise ExtensionError('Could not import extension %s' % extension, err) - if hasattr(mod, 'setup'): - mod.setup(self) - - def import_object(self, objname, source=None): - """Import an object from a 'module.name' string.""" - try: - module, name = objname.rsplit('.', 1) - except ValueError, err: - raise ExtensionError('Invalid full object name %s' % objname + - (source and ' (needed for %s)' % source or ''), err) - try: - return getattr(__import__(module, None, None, [name]), name) - except ImportError, err: - raise ExtensionError('Could not import %s' % module + - (source and ' (needed for %s)' % source or ''), err) - except AttributeError, err: - raise ExtensionError('Could not find %s' % objname + - (source and ' (needed for %s)' % source or ''), err) - - # event interface - - def _validate_event(self, event): - event = intern(event) - if event not in self._events: - raise ExtensionError('Unknown event name: %s' % event) - - def connect(self, event, callback): - self._validate_event(event) - listener_id = self.next_listener_id - if event not in self._listeners: - self._listeners[event] = {listener_id: callback} - else: - self._listeners[event][listener_id] = callback - self.next_listener_id += 1 - return listener_id - - def disconnect(self, listener_id): - for event in self._listeners.itervalues(): - event.pop(listener_id, None) - - def emit(self, event, *args): - result = [] - if event in self._listeners: - for _, callback in self._listeners[event].iteritems(): - result.append(callback(self, *args)) - return result - - def emit_firstresult(self, event, *args): - for result in self.emit(event, *args): - if result is not None: - return result - return None - - # registering addon parts - - def add_builder(self, builder): - if not hasattr(builder, 'name'): - raise ExtensionError('Builder class %s has no "name" attribute' % builder) - if builder.name in self.builderclasses: - raise ExtensionError('Builder %r already exists (in module %s)' % ( - builder.name, self.builderclasses[builder.name].__module__)) - self.builderclasses[builder.name] = builder - - def add_config_value(self, name, default, rebuild_env): - if name in self.config.values: - raise ExtensionError('Config value %r already present' % name) - self.config.values[name] = (default, rebuild_env) - - def add_event(self, name): - if name in self._events: - raise ExtensionError('Event %r already present' % name) - self._events[name] = '' - - def add_node(self, node, **kwds): - nodes._add_node_class_names([node.__name__]) - for key, val in kwds.iteritems(): - try: - visit, depart = val - except ValueError: - raise ExtensionError('Value for key %r must be a (visit, depart) ' - 'function tuple' % key) - if key == 'html': - from sphinx.htmlwriter import HTMLTranslator as translator - elif key == 'latex': - from sphinx.latexwriter import LaTeXTranslator as translator - elif key == 'text': - from sphinx.textwriter import TextTranslator as translator - else: - # ignore invalid keys for compatibility - continue - setattr(translator, 'visit_'+node.__name__, visit) - if depart: - setattr(translator, 'depart_'+node.__name__, depart) - - def add_directive(self, name, func, content, arguments, **options): - func.content = content - func.arguments = arguments - func.options = options - directives.register_directive(name, func) - - def add_role(self, name, role): - roles.register_canonical_role(name, role) - - def add_description_unit(self, directivename, rolename, indextemplate='', - parse_node=None, ref_nodeclass=None): - additional_xref_types[directivename] = (rolename, indextemplate, parse_node) - directives.register_directive(directivename, desc_directive) - roles.register_canonical_role(rolename, xfileref_role) - if ref_nodeclass is not None: - innernodetypes[rolename] = ref_nodeclass - - def add_crossref_type(self, directivename, rolename, indextemplate='', - ref_nodeclass=None): - additional_xref_types[directivename] = (rolename, indextemplate, None) - directives.register_directive(directivename, target_directive) - roles.register_canonical_role(rolename, xfileref_role) - if ref_nodeclass is not None: - innernodetypes[rolename] = ref_nodeclass - - def add_transform(self, transform): - SphinxStandaloneReader.transforms.append(transform) - - def add_javascript(self, filename): - StandaloneHTMLBuilder.script_files.append( - posixpath.join('_static', filename)) - - -class TemplateBridge(object): - """ - This class defines the interface for a "template bridge", that is, a class - that renders templates given a template name and a context. - """ - - def init(self, builder): - """ - Called by the builder to initialize the template system. *builder* - is the builder object; you'll probably want to look at the value of - ``builder.config.templates_path``. - """ - raise NotImplementedError('must be implemented in subclasses') - - def newest_template_mtime(self): - """ - Called by the builder to determine if output files are outdated - because of template changes. Return the mtime of the newest template - file that was changed. The default implementation returns ``0``. - """ - return 0 - - def render(self, template, context): - """ - Called by the builder to render a *template* with a specified - context (a Python dictionary). - """ - raise NotImplementedError('must be implemented in subclasses') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/builder.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/builder.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1272 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.builder - ~~~~~~~~~~~~~~ - - Builder classes for different output formats. - - :copyright: 2007-2008 by Georg Brandl, Sebastian Wiesner, Horst Gutmann. - :license: BSD. -""" - -import os -import time -import codecs -import shutil -import gettext -import cPickle as pickle -from os import path -from cgi import escape - -from docutils import nodes -from docutils.io import StringOutput, FileOutput, DocTreeInput -from docutils.core import publish_parts -from docutils.utils import new_document -from docutils.frontend import OptionParser -from docutils.readers.doctree import Reader as DoctreeReader - -from sphinx import addnodes, locale, __version__ -from sphinx.util import ensuredir, relative_uri, SEP, os_path, texescape, ustrftime -from sphinx.htmlhelp import build_hhx -from sphinx.htmlwriter import HTMLWriter, HTMLTranslator, SmartyPantsHTMLTranslator -from sphinx.textwriter import TextWriter -from sphinx.latexwriter import LaTeXWriter -from sphinx.environment import BuildEnvironment, NoUri -from sphinx.highlighting import PygmentsBridge -from sphinx.util.console import bold, purple, darkgreen -from sphinx.search import js_index - -try: - import json -except ImportError: - try: - import simplejson as json - except ImportError: - json = None - -# side effect: registers roles and directives -from sphinx import roles -from sphinx import directives - -ENV_PICKLE_FILENAME = 'environment.pickle' -LAST_BUILD_FILENAME = 'last_build' -INVENTORY_FILENAME = 'objects.inv' - - -class Builder(object): - """ - Builds target formats from the reST sources. - """ - - # builder's name, for the -b command line options - name = '' - - def __init__(self, app, env=None, freshenv=False): - self.srcdir = app.srcdir - self.confdir = app.confdir - self.outdir = app.outdir - self.doctreedir = app.doctreedir - if not path.isdir(self.doctreedir): - os.makedirs(self.doctreedir) - - self.app = app - self.warn = app.warn - self.info = app.info - self.config = app.config - - self.load_i18n() - - # images that need to be copied over (source -> dest) - self.images = {} - - # if None, this is set in load_env() - self.env = env - self.freshenv = freshenv - - self.init() - self.load_env() - - # helper methods - - def init(self): - """Load necessary templates and perform initialization.""" - raise NotImplementedError - - def init_templates(self): - # Call this from init() if you need templates. - if self.config.template_bridge: - self.templates = self.app.import_object( - self.config.template_bridge, 'template_bridge setting')() - else: - from sphinx._jinja import BuiltinTemplates - self.templates = BuiltinTemplates() - self.templates.init(self) - - def get_target_uri(self, docname, typ=None): - """ - Return the target URI for a document name (typ can be used to qualify - the link characteristic for individual builders). - """ - raise NotImplementedError - - def get_relative_uri(self, from_, to, typ=None): - """ - Return a relative URI between two source filenames. May raise environment.NoUri - if there's no way to return a sensible URI. - """ - return relative_uri(self.get_target_uri(from_), - self.get_target_uri(to, typ)) - - def get_outdated_docs(self): - """ - Return an iterable of output files that are outdated, or a string describing - what an update build will build. - """ - raise NotImplementedError - - def status_iterator(self, iterable, summary, colorfunc=darkgreen): - l = -1 - for item in iterable: - if l == -1: - self.info(bold(summary), nonl=1) - l = 0 - self.info(colorfunc(item) + ' ', nonl=1) - yield item - if l == 0: - self.info() - - supported_image_types = [] - - def post_process_images(self, doctree): - """ - Pick the best candidate for all image URIs. - """ - for node in doctree.traverse(nodes.image): - if '?' in node['candidates']: - # don't rewrite nonlocal image URIs - continue - if '*' not in node['candidates']: - for imgtype in self.supported_image_types: - candidate = node['candidates'].get(imgtype, None) - if candidate: - break - else: - self.warn('%s:%s: no matching candidate for image URI %r' % - (node.source, getattr(node, 'lineno', ''), node['uri'])) - continue - node['uri'] = candidate - else: - candidate = node['uri'] - if candidate not in self.env.images: - # non-existing URI; let it alone - continue - self.images[candidate] = self.env.images[candidate][1] - - # build methods - - def load_i18n(self): - """ - Load translated strings from the configured localedirs if - enabled in the configuration. - """ - self.translator = None - if self.config.language is not None: - self.info(bold('loading translations [%s]... ' % self.config.language), - nonl=True) - locale_dirs = [path.join(path.dirname(__file__), 'locale')] + \ - [path.join(self.srcdir, x) for x in self.config.locale_dirs] - for dir_ in locale_dirs: - try: - trans = gettext.translation('sphinx', localedir=dir_, - languages=[self.config.language]) - if self.translator is None: - self.translator = trans - else: - self.translator._catalog.update(trans.catalog) - except Exception: - # Language couldn't be found in the specified path - pass - if self.translator is not None: - self.info('done') - else: - self.info('locale not available') - if self.translator is None: - self.translator = gettext.NullTranslations() - self.translator.install(unicode=True) - locale.init() # translate common labels - - def load_env(self): - """Set up the build environment.""" - if self.env: - return - if not self.freshenv: - try: - self.info(bold('loading pickled environment... '), nonl=True) - self.env = BuildEnvironment.frompickle(self.config, - path.join(self.doctreedir, ENV_PICKLE_FILENAME)) - self.info('done') - except Exception, err: - if type(err) is IOError and err.errno == 2: - self.info('not found') - else: - self.info('failed: %s' % err) - self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config) - self.env.find_files(self.config) - else: - self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config) - self.env.find_files(self.config) - self.env.set_warnfunc(self.warn) - - def build_all(self): - """Build all source files.""" - self.build(None, summary='all source files', method='all') - - def build_specific(self, filenames): - """Only rebuild as much as needed for changes in the source_filenames.""" - # bring the filenames to the canonical format, that is, - # relative to the source directory and without source_suffix. - dirlen = len(self.srcdir) + 1 - to_write = [] - suffix = self.config.source_suffix - for filename in filenames: - filename = path.abspath(filename)[dirlen:] - if filename.endswith(suffix): - filename = filename[:-len(suffix)] - filename = filename.replace(os.path.sep, SEP) - to_write.append(filename) - self.build(to_write, method='specific', - summary='%d source files given on command ' - 'line' % len(to_write)) - - def build_update(self): - """Only rebuild files changed or added since last build.""" - to_build = self.get_outdated_docs() - if isinstance(to_build, str): - self.build(['__all__'], to_build) - else: - to_build = list(to_build) - self.build(to_build, - summary='targets for %d source files that are ' - 'out of date' % len(to_build)) - - def build(self, docnames, summary=None, method='update'): - if summary: - self.info(bold('building [%s]: ' % self.name), nonl=1) - self.info(summary) - - updated_docnames = [] - # while reading, collect all warnings from docutils - warnings = [] - self.env.set_warnfunc(warnings.append) - self.info(bold('updating environment: '), nonl=1) - iterator = self.env.update(self.config, self.srcdir, self.doctreedir, self.app) - # the first item in the iterator is a summary message - self.info(iterator.next()) - for docname in self.status_iterator(iterator, 'reading sources... ', purple): - updated_docnames.append(docname) - # nothing further to do, the environment has already done the reading - for warning in warnings: - if warning.strip(): - self.warn(warning) - self.env.set_warnfunc(self.warn) - - if updated_docnames: - # save the environment - self.info(bold('pickling environment... '), nonl=True) - self.env.topickle(path.join(self.doctreedir, ENV_PICKLE_FILENAME)) - self.info('done') - - # global actions - self.info(bold('checking consistency... '), nonl=True) - self.env.check_consistency() - self.info('done') - else: - if method == 'update' and not docnames: - self.info(bold('no targets are out of date.')) - return - - # another indirection to support methods which don't build files - # individually - self.write(docnames, updated_docnames, method) - - # finish (write static files etc.) - self.finish() - status = self.app.statuscode == 0 and 'succeeded' or 'finished with problems' - if self.app._warncount: - self.info(bold('build %s, %s warning%s.' % - (status, self.app._warncount, - self.app._warncount != 1 and 's' or ''))) - else: - self.info(bold('build %s.' % status)) - - def write(self, build_docnames, updated_docnames, method='update'): - if build_docnames is None or build_docnames == ['__all__']: - # build_all - build_docnames = self.env.found_docs - if method == 'update': - # build updated ones as well - docnames = set(build_docnames) | set(updated_docnames) - else: - docnames = set(build_docnames) - - # add all toctree-containing files that may have changed - for docname in list(docnames): - for tocdocname in self.env.files_to_rebuild.get(docname, []): - docnames.add(tocdocname) - docnames.add(self.config.master_doc) - - self.info(bold('preparing documents... '), nonl=True) - self.prepare_writing(docnames) - self.info('done') - - # write target files - warnings = [] - self.env.set_warnfunc(warnings.append) - for docname in self.status_iterator(sorted(docnames), - 'writing output... ', darkgreen): - doctree = self.env.get_and_resolve_doctree(docname, self) - self.write_doc(docname, doctree) - for warning in warnings: - if warning.strip(): - self.warn(warning) - self.env.set_warnfunc(self.warn) - - def prepare_writing(self, docnames): - raise NotImplementedError - - def write_doc(self, docname, doctree): - raise NotImplementedError - - def finish(self): - raise NotImplementedError - - -class StandaloneHTMLBuilder(Builder): - """ - Builds standalone HTML docs. - """ - name = 'html' - copysource = True - out_suffix = '.html' - indexer_format = js_index - supported_image_types = ['image/svg+xml', 'image/png', 'image/gif', - 'image/jpeg'] - searchindex_filename = 'searchindex.js' - add_header_links = True - add_definition_links = True - - # This is a class attribute because it is mutated by Sphinx.add_javascript. - script_files = ['_static/jquery.js', '_static/doctools.js'] - - def init(self): - """Load templates.""" - self.init_templates() - self.init_translator_class() - if self.config.html_file_suffix: - self.out_suffix = self.config.html_file_suffix - - if self.config.language is not None: - jsfile = path.join(path.dirname(__file__), 'locale', self.config.language, - 'LC_MESSAGES', 'sphinx.js') - if path.isfile(jsfile): - self.script_files.append('_static/translations.js') - - def init_translator_class(self): - if self.config.html_translator_class: - self.translator_class = self.app.import_object( - self.config.html_translator_class, 'html_translator_class setting') - elif self.config.html_use_smartypants: - self.translator_class = SmartyPantsHTMLTranslator - else: - self.translator_class = HTMLTranslator - - def render_partial(self, node): - """Utility: Render a lone doctree node.""" - doc = new_document('') - doc.append(node) - return publish_parts( - doc, - source_class=DocTreeInput, - reader=DoctreeReader(), - writer=HTMLWriter(self), - settings_overrides={'output_encoding': 'unicode'} - ) - - def prepare_writing(self, docnames): - from sphinx.search import IndexBuilder - - self.indexer = IndexBuilder(self.env) - self.load_indexer(docnames) - self.docwriter = HTMLWriter(self) - self.docsettings = OptionParser( - defaults=self.env.settings, - components=(self.docwriter,)).get_default_values() - - # format the "last updated on" string, only once is enough since it - # typically doesn't include the time of day - lufmt = self.config.html_last_updated_fmt - if lufmt is not None: - self.last_updated = ustrftime(lufmt or _('%b %d, %Y')) - else: - self.last_updated = None - - logo = self.config.html_logo and \ - path.basename(self.config.html_logo) or '' - - favicon = self.config.html_favicon and \ - path.basename(self.config.html_favicon) or '' - if favicon and os.path.splitext(favicon)[1] != '.ico': - self.warn('html_favicon is not an .ico file') - - if not isinstance(self.config.html_use_opensearch, basestring): - self.warn('html_use_opensearch config value must now be a string') - - self.relations = self.env.collect_relations() - - rellinks = [] - if self.config.html_use_index: - rellinks.append(('genindex', _('General Index'), 'I', _('index'))) - if self.config.html_use_modindex and self.env.modules: - rellinks.append(('modindex', _('Global Module Index'), 'M', _('modules'))) - - self.globalcontext = dict( - project = self.config.project, - release = self.config.release, - version = self.config.version, - last_updated = self.last_updated, - copyright = self.config.copyright, - master_doc = self.config.master_doc, - style = self.config.html_style, - use_opensearch = self.config.html_use_opensearch, - docstitle = self.config.html_title, - shorttitle = self.config.html_short_title, - show_sphinx = self.config.html_show_sphinx, - has_source = self.config.html_copy_source, - file_suffix = self.out_suffix, - script_files = self.script_files, - sphinx_version = __version__, - rellinks = rellinks, - builder = self.name, - parents = [], - logo = logo, - favicon = favicon, - ) - self.globalcontext.update(self.config.html_context) - - def get_doc_context(self, docname, body, metatags): - """Collect items for the template context of a page.""" - # find out relations - prev = next = None - parents = [] - rellinks = self.globalcontext['rellinks'][:] - related = self.relations.get(docname) - titles = self.env.titles - if related and related[2]: - try: - next = {'link': self.get_relative_uri(docname, related[2]), - 'title': self.render_partial(titles[related[2]])['title']} - rellinks.append((related[2], next['title'], 'N', _('next'))) - except KeyError: - next = None - if related and related[1]: - try: - prev = {'link': self.get_relative_uri(docname, related[1]), - 'title': self.render_partial(titles[related[1]])['title']} - rellinks.append((related[1], prev['title'], 'P', _('previous'))) - except KeyError: - # the relation is (somehow) not in the TOC tree, handle that gracefully - prev = None - while related and related[0]: - try: - parents.append( - {'link': self.get_relative_uri(docname, related[0]), - 'title': self.render_partial(titles[related[0]])['title']}) - except KeyError: - pass - related = self.relations.get(related[0]) - if parents: - parents.pop() # remove link to the master file; we have a generic - # "back to index" link already - parents.reverse() - - # title rendered as HTML - title = titles.get(docname) - title = title and self.render_partial(title)['title'] or '' - # the name for the copied source - sourcename = self.config.html_copy_source and docname + '.txt' or '' - - # metadata for the document - meta = self.env.metadata.get(docname) - - return dict( - parents = parents, - prev = prev, - next = next, - title = title, - meta = meta, - body = body, - metatags = metatags, - rellinks = rellinks, - sourcename = sourcename, - toc = self.render_partial(self.env.get_toc_for(docname))['fragment'], - # only display a TOC if there's more than one item to show - display_toc = (self.env.toc_num_entries[docname] > 1), - ) - - def write_doc(self, docname, doctree): - self.post_process_images(doctree) - destination = StringOutput(encoding='utf-8') - doctree.settings = self.docsettings - - self.imgpath = relative_uri(self.get_target_uri(docname), '_images') - self.docwriter.write(doctree, destination) - self.docwriter.assemble_parts() - body = self.docwriter.parts['fragment'] - metatags = self.docwriter.clean_meta - - ctx = self.get_doc_context(docname, body, metatags) - self.index_page(docname, doctree, ctx.get('title', '')) - self.handle_page(docname, ctx, event_arg=doctree) - - def finish(self): - self.info(bold('writing additional files...'), nonl=1) - - # the global general index - - if self.config.html_use_index: - # the total count of lines for each index letter, used to distribute - # the entries into two columns - genindex = self.env.create_index(self) - indexcounts = [] - for _, entries in genindex: - indexcounts.append(sum(1 + len(subitems) - for _, (_, subitems) in entries)) - - genindexcontext = dict( - genindexentries = genindex, - genindexcounts = indexcounts, - split_index = self.config.html_split_index, - ) - self.info(' genindex', nonl=1) - - if self.config.html_split_index: - self.handle_page('genindex', genindexcontext, 'genindex-split.html') - self.handle_page('genindex-all', genindexcontext, 'genindex.html') - for (key, entries), count in zip(genindex, indexcounts): - ctx = {'key': key, 'entries': entries, 'count': count, - 'genindexentries': genindex} - self.handle_page('genindex-' + key, ctx, 'genindex-single.html') - else: - self.handle_page('genindex', genindexcontext, 'genindex.html') - - # the global module index - - if self.config.html_use_modindex and self.env.modules: - # the sorted list of all modules, for the global module index - modules = sorted(((mn, (self.get_relative_uri('modindex', fn) + - '#module-' + mn, sy, pl, dep)) - for (mn, (fn, sy, pl, dep)) in - self.env.modules.iteritems()), - key=lambda x: x[0].lower()) - # collect all platforms - platforms = set() - # sort out collapsable modules - modindexentries = [] - letters = [] - pmn = '' - num_toplevels = 0 - num_collapsables = 0 - cg = 0 # collapse group - fl = '' # first letter - for mn, (fn, sy, pl, dep) in modules: - pl = pl and pl.split(', ') or [] - platforms.update(pl) - if fl != mn[0].lower() and mn[0] != '_': - # heading - modindexentries.append(['', False, 0, False, - mn[0].upper(), '', [], False]) - letters.append(mn[0].upper()) - tn = mn.split('.')[0] - if tn != mn: - # submodule - if pmn == tn: - # first submodule - make parent collapsable - modindexentries[-1][1] = True - num_collapsables += 1 - elif not pmn.startswith(tn): - # submodule without parent in list, add dummy entry - cg += 1 - modindexentries.append([tn, True, cg, False, '', '', [], False]) - else: - num_toplevels += 1 - cg += 1 - modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl, dep]) - pmn = mn - fl = mn[0].lower() - platforms = sorted(platforms) - - # apply heuristics when to collapse modindex at page load: - # only collapse if number of toplevel modules is larger than - # number of submodules - collapse = len(modules) - num_toplevels < num_toplevels - - modindexcontext = dict( - modindexentries = modindexentries, - platforms = platforms, - letters = letters, - collapse_modindex = collapse, - ) - self.info(' modindex', nonl=1) - self.handle_page('modindex', modindexcontext, 'modindex.html') - - # the search page - if self.name != 'htmlhelp': - self.info(' search', nonl=1) - self.handle_page('search', {}, 'search.html') - - # additional pages from conf.py - for pagename, template in self.config.html_additional_pages.items(): - self.info(' '+pagename, nonl=1) - self.handle_page(pagename, {}, template) - - if self.config.html_use_opensearch and self.name != 'htmlhelp': - self.info(' opensearch', nonl=1) - fn = path.join(self.outdir, '_static', 'opensearch.xml') - self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn) - - self.info() - - # copy image files - if self.images: - self.info(bold('copying images...'), nonl=True) - ensuredir(path.join(self.outdir, '_images')) - for src, dest in self.images.iteritems(): - self.info(' '+src, nonl=1) - shutil.copyfile(path.join(self.srcdir, src), - path.join(self.outdir, '_images', dest)) - self.info() - - # copy static files - self.info(bold('copying static files... '), nonl=True) - ensuredir(path.join(self.outdir, '_static')) - # first, create pygments style file - f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w') - f.write(PygmentsBridge('html', self.config.pygments_style).get_stylesheet()) - f.close() - # then, copy translations JavaScript file - if self.config.language is not None: - jsfile = path.join(path.dirname(__file__), 'locale', self.config.language, - 'LC_MESSAGES', 'sphinx.js') - if path.isfile(jsfile): - shutil.copyfile(jsfile, path.join(self.outdir, '_static', - 'translations.js')) - # then, copy over all user-supplied static files - staticdirnames = [path.join(path.dirname(__file__), 'static')] + \ - [path.join(self.confdir, spath) - for spath in self.config.html_static_path] - for staticdirname in staticdirnames: - for filename in os.listdir(staticdirname): - if filename.startswith('.'): - continue - fullname = path.join(staticdirname, filename) - targetname = path.join(self.outdir, '_static', filename) - if path.isfile(fullname): - shutil.copyfile(fullname, targetname) - elif path.isdir(fullname): - if filename in self.config.exclude_dirnames: - continue - if path.exists(targetname): - shutil.rmtree(targetname) - shutil.copytree(fullname, targetname) - # last, copy logo file (handled differently) - if self.config.html_logo: - logobase = path.basename(self.config.html_logo) - shutil.copyfile(path.join(self.confdir, self.config.html_logo), - path.join(self.outdir, '_static', logobase)) - self.info('done') - - # dump the search index - self.handle_finish() - - def get_outdated_docs(self): - if self.templates: - template_mtime = self.templates.newest_template_mtime() - else: - template_mtime = 0 - for docname in self.env.found_docs: - if docname not in self.env.all_docs: - yield docname - continue - targetname = self.env.doc2path(docname, self.outdir, self.out_suffix) - try: - targetmtime = path.getmtime(targetname) - except Exception: - targetmtime = 0 - try: - srcmtime = max(path.getmtime(self.env.doc2path(docname)), - template_mtime) - if srcmtime > targetmtime: - yield docname - except EnvironmentError: - # source doesn't exist anymore - pass - - def load_indexer(self, docnames): - keep = set(self.env.all_docs) - set(docnames) - try: - f = open(path.join(self.outdir, self.searchindex_filename), 'rb') - try: - self.indexer.load(f, self.indexer_format) - finally: - f.close() - except (IOError, OSError, ValueError): - if keep: - self.warn("search index couldn't be loaded, but not all documents " - "will be built: the index will be incomplete.") - # delete all entries for files that will be rebuilt - self.indexer.prune(keep) - - def index_page(self, pagename, doctree, title): - # only index pages with title - if self.indexer is not None and title: - self.indexer.feed(pagename, title, doctree) - - # --------- these are overwritten by the serialization builder - - def get_target_uri(self, docname, typ=None): - return docname + self.out_suffix - - def handle_page(self, pagename, addctx, templatename='page.html', - outfilename=None, event_arg=None): - ctx = self.globalcontext.copy() - # current_page_name is backwards compatibility - ctx['pagename'] = ctx['current_page_name'] = pagename - - def pathto(otheruri, resource=False, - baseuri=self.get_target_uri(pagename)): - if not resource: - otheruri = self.get_target_uri(otheruri) - return relative_uri(baseuri, otheruri) - ctx['pathto'] = pathto - ctx['hasdoc'] = lambda name: name in self.env.all_docs - ctx['customsidebar'] = self.config.html_sidebars.get(pagename) - ctx.update(addctx) - - self.app.emit('html-page-context', pagename, templatename, ctx, event_arg) - - output = self.templates.render(templatename, ctx) - if not outfilename: - outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix) - ensuredir(path.dirname(outfilename)) # normally different from self.outdir - try: - f = codecs.open(outfilename, 'w', 'utf-8') - try: - f.write(output) - finally: - f.close() - except (IOError, OSError), err: - self.warn("Error writing file %s: %s" % (outfilename, err)) - if self.copysource and ctx.get('sourcename'): - # copy the source file for the "show source" link - source_name = path.join(self.outdir, '_sources', os_path(ctx['sourcename'])) - ensuredir(path.dirname(source_name)) - shutil.copyfile(self.env.doc2path(pagename), source_name) - - def handle_finish(self): - self.info(bold('dumping search index... '), nonl=True) - self.indexer.prune(self.env.all_docs) - f = open(path.join(self.outdir, self.searchindex_filename), 'wb') - try: - self.indexer.dump(f, self.indexer_format) - finally: - f.close() - self.info('done') - - self.info(bold('dumping object inventory... '), nonl=True) - f = open(path.join(self.outdir, INVENTORY_FILENAME), 'w') - try: - f.write('# Sphinx inventory version 1\n') - f.write('# Project: %s\n' % self.config.project.encode('utf-8')) - f.write('# Version: %s\n' % self.config.version) - for modname, info in self.env.modules.iteritems(): - f.write('%s mod %s\n' % (modname, self.get_target_uri(info[0]))) - for refname, (docname, desctype) in self.env.descrefs.iteritems(): - f.write('%s %s %s\n' % (refname, desctype, self.get_target_uri(docname))) - finally: - f.close() - self.info('done') - - -class SerializingHTMLBuilder(StandaloneHTMLBuilder): - """ - An abstract builder that serializes the HTML generated. - """ - #: the serializing implementation to use. Set this to a module that - #: implements a `dump`, `load`, `dumps` and `loads` functions - #: (pickle, simplejson etc.) - implementation = None - - #: the filename for the global context file - globalcontext_filename = None - - supported_image_types = ('image/svg+xml', 'image/png', 'image/gif', - 'image/jpeg') - - def init(self): - self.init_translator_class() - self.templates = None # no template bridge necessary - - def get_target_uri(self, docname, typ=None): - if docname == 'index': - return '' - if docname.endswith(SEP + 'index'): - return docname[:-5] # up to sep - return docname + SEP - - def handle_page(self, pagename, ctx, templatename='page.html', - outfilename=None, event_arg=None): - ctx['current_page_name'] = pagename - sidebarfile = self.config.html_sidebars.get(pagename) - if sidebarfile: - ctx['customsidebar'] = sidebarfile - - if not outfilename: - outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix) - - self.app.emit('html-page-context', pagename, templatename, ctx, event_arg) - - ensuredir(path.dirname(outfilename)) - f = open(outfilename, 'wb') - try: - self.implementation.dump(ctx, f, 2) - finally: - f.close() - - # if there is a source file, copy the source file for the - # "show source" link - if ctx.get('sourcename'): - source_name = path.join(self.outdir, '_sources', - os_path(ctx['sourcename'])) - ensuredir(path.dirname(source_name)) - shutil.copyfile(self.env.doc2path(pagename), source_name) - - def handle_finish(self): - # dump the global context - outfilename = path.join(self.outdir, self.globalcontext_filename) - f = open(outfilename, 'wb') - try: - self.implementation.dump(self.globalcontext, f, 2) - finally: - f.close() - - # super here to dump the search index - StandaloneHTMLBuilder.handle_finish(self) - - # copy the environment file from the doctree dir to the output dir - # as needed by the web app - shutil.copyfile(path.join(self.doctreedir, ENV_PICKLE_FILENAME), - path.join(self.outdir, ENV_PICKLE_FILENAME)) - - # touch 'last build' file, used by the web application to determine - # when to reload its environment and clear the cache - open(path.join(self.outdir, LAST_BUILD_FILENAME), 'w').close() - - -class PickleHTMLBuilder(SerializingHTMLBuilder): - """ - A Builder that dumps the generated HTML into pickle files. - """ - implementation = pickle - indexer_format = pickle - name = 'pickle' - out_suffix = '.fpickle' - globalcontext_filename = 'globalcontext.pickle' - searchindex_filename = 'searchindex.pickle' - - -class JSONHTMLBuilder(SerializingHTMLBuilder): - """ - A builder that dumps the generated HTML into JSON files. - """ - implementation = json - indexer_format = json - name = 'json' - out_suffix = '.fjson' - globalcontext_filename = 'globalcontext.json' - searchindex_filename = 'searchindex.json' - - def init(self): - if json is None: - from sphinx.application import SphinxError - raise SphinxError('The module simplejson (or json in Python >= 2.6) ' - 'is not available. The JSONHTMLBuilder builder ' - 'will not work.') - SerializingHTMLBuilder.init(self) - - -class HTMLHelpBuilder(StandaloneHTMLBuilder): - """ - Builder that also outputs Windows HTML help project, contents and index files. - Adapted from the original Doc/tools/prechm.py. - """ - name = 'htmlhelp' - - # don't copy the reST source - copysource = False - supported_image_types = ['image/png', 'image/gif', 'image/jpeg'] - - # don't add links - add_header_links = False - add_definition_links = False - - def init(self): - StandaloneHTMLBuilder.init(self) - # the output files for HTML help must be .html only - self.out_suffix = '.html' - - def handle_finish(self): - build_hhx(self, self.outdir, self.config.htmlhelp_basename) - - -class LaTeXBuilder(Builder): - """ - Builds LaTeX output to create PDF. - """ - name = 'latex' - supported_image_types = ['application/pdf', 'image/png', 'image/gif', - 'image/jpeg'] - - def init(self): - self.docnames = [] - self.document_data = [] - texescape.init() - - def get_outdated_docs(self): - return 'all documents' # for now - - def get_target_uri(self, docname, typ=None): - if typ == 'token': - # token references are always inside production lists and must be - # replaced by \token{} in LaTeX - return '@token' - if docname not in self.docnames: - raise NoUri - else: - return '' - - def init_document_data(self): - preliminary_document_data = map(list, self.config.latex_documents) - if not preliminary_document_data: - self.warn('No "latex_documents" config value found; no documents ' - 'will be written.') - return - # assign subdirs to titles - self.titles = [] - for entry in preliminary_document_data: - docname = entry[0] - if docname not in self.env.all_docs: - self.warn('"latex_documents" config value references unknown ' - 'document %s' % docname) - continue - self.document_data.append(entry) - if docname.endswith(SEP+'index'): - docname = docname[:-5] - self.titles.append((docname, entry[2])) - - def write(self, *ignored): - # first, assemble the "appendix" docs that are in every PDF - appendices = [] - for fname in self.config.latex_appendices: - appendices.append(self.env.get_doctree(fname)) - - docwriter = LaTeXWriter(self) - docsettings = OptionParser( - defaults=self.env.settings, - components=(docwriter,)).get_default_values() - - self.init_document_data() - - for entry in self.document_data: - docname, targetname, title, author, docclass = entry[:5] - toctree_only = False - if len(entry) > 5: - toctree_only = entry[5] - destination = FileOutput( - destination_path=path.join(self.outdir, targetname), - encoding='utf-8') - self.info("processing " + targetname + "... ", nonl=1) - doctree = self.assemble_doctree(docname, toctree_only, - appendices=(docclass == 'manual') and appendices or []) - self.post_process_images(doctree) - self.info("writing... ", nonl=1) - doctree.settings = docsettings - doctree.settings.author = author - doctree.settings.title = title - doctree.settings.docname = docname - doctree.settings.docclass = docclass - docwriter.write(doctree, destination) - self.info("done") - - def assemble_doctree(self, indexfile, toctree_only, appendices): - self.docnames = set([indexfile] + appendices) - self.info(darkgreen(indexfile) + " ", nonl=1) - def process_tree(docname, tree): - tree = tree.deepcopy() - for toctreenode in tree.traverse(addnodes.toctree): - newnodes = [] - includefiles = map(str, toctreenode['includefiles']) - for includefile in includefiles: - try: - self.info(darkgreen(includefile) + " ", nonl=1) - subtree = process_tree(includefile, - self.env.get_doctree(includefile)) - self.docnames.add(includefile) - except Exception: - self.warn('%s: toctree contains ref to nonexisting file %r' % - (docname, includefile)) - else: - sof = addnodes.start_of_file() - sof.children = subtree.children - newnodes.append(sof) - toctreenode.parent.replace(toctreenode, newnodes) - return tree - tree = self.env.get_doctree(indexfile) - if toctree_only: - # extract toctree nodes from the tree and put them in a fresh document - new_tree = new_document('') - new_sect = nodes.section() - new_sect += nodes.title(u'', u'') - new_tree += new_sect - for node in tree.traverse(addnodes.toctree): - new_sect += node - tree = new_tree - largetree = process_tree(indexfile, tree) - largetree.extend(appendices) - self.info() - self.info("resolving references...") - self.env.resolve_references(largetree, indexfile, self) - # resolve :ref:s to distant tex files -- we can't add a cross-reference, - # but append the document name - for pendingnode in largetree.traverse(addnodes.pending_xref): - docname = pendingnode['refdocname'] - sectname = pendingnode['refsectname'] - newnodes = [nodes.emphasis(sectname, sectname)] - for subdir, title in self.titles: - if docname.startswith(subdir): - newnodes.append(nodes.Text(_(' (in '), _(' (in '))) - newnodes.append(nodes.emphasis(title, title)) - newnodes.append(nodes.Text(')', ')')) - break - else: - pass - pendingnode.replace_self(newnodes) - return largetree - - def finish(self): - # copy image files - if self.images: - self.info(bold('copying images...'), nonl=1) - for src, dest in self.images.iteritems(): - self.info(' '+src, nonl=1) - shutil.copyfile(path.join(self.srcdir, src), - path.join(self.outdir, dest)) - self.info() - - # the logo is handled differently - if self.config.latex_logo: - logobase = path.basename(self.config.latex_logo) - shutil.copyfile(path.join(self.confdir, self.config.latex_logo), - path.join(self.outdir, logobase)) - - self.info(bold('copying TeX support files... '), nonl=True) - staticdirname = path.join(path.dirname(__file__), 'texinputs') - for filename in os.listdir(staticdirname): - if not filename.startswith('.'): - shutil.copyfile(path.join(staticdirname, filename), - path.join(self.outdir, filename)) - self.info('done') - - -class ChangesBuilder(Builder): - """ - Write a summary with all versionadded/changed directives. - """ - name = 'changes' - - def init(self): - self.init_templates() - - def get_outdated_docs(self): - return self.outdir - - typemap = { - 'versionadded': 'added', - 'versionchanged': 'changed', - 'deprecated': 'deprecated', - } - - def write(self, *ignored): - version = self.config.version - libchanges = {} - apichanges = [] - otherchanges = {} - if version not in self.env.versionchanges: - self.info(bold('no changes in this version.')) - return - self.info(bold('writing summary file...')) - for type, docname, lineno, module, descname, content in \ - self.env.versionchanges[version]: - ttext = self.typemap[type] - context = content.replace('\n', ' ') - if descname and docname.startswith('c-api'): - if not descname: - continue - if context: - entry = '%s: %s: %s' % (descname, ttext, context) - else: - entry = '%s: %s.' % (descname, ttext) - apichanges.append((entry, docname, lineno)) - elif descname or module: - if not module: - module = _('Builtins') - if not descname: - descname = _('Module level') - if context: - entry = '%s: %s: %s' % (descname, ttext, context) - else: - entry = '%s: %s.' % (descname, ttext) - libchanges.setdefault(module, []).append((entry, docname, lineno)) - else: - if not context: - continue - entry = '%s: %s' % (ttext.capitalize(), context) - title = self.env.titles[docname].astext() - otherchanges.setdefault((docname, title), []).append( - (entry, docname, lineno)) - - ctx = { - 'project': self.config.project, - 'version': version, - 'docstitle': self.config.html_title, - 'shorttitle': self.config.html_short_title, - 'libchanges': sorted(libchanges.iteritems()), - 'apichanges': sorted(apichanges), - 'otherchanges': sorted(otherchanges.iteritems()), - 'show_sphinx': self.config.html_show_sphinx, - } - f = open(path.join(self.outdir, 'index.html'), 'w') - try: - f.write(self.templates.render('changes/frameset.html', ctx)) - finally: - f.close() - f = open(path.join(self.outdir, 'changes.html'), 'w') - try: - f.write(self.templates.render('changes/versionchanges.html', ctx)) - finally: - f.close() - - hltext = ['.. versionadded:: %s' % version, - '.. versionchanged:: %s' % version, - '.. deprecated:: %s' % version] - - def hl(no, line): - line = ' ' % no + escape(line) - for x in hltext: - if x in line: - line = '%s' % line - break - return line - - self.info(bold('copying source files...')) - for docname in self.env.all_docs: - f = open(self.env.doc2path(docname)) - lines = f.readlines() - targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html' - ensuredir(path.dirname(targetfn)) - f = codecs.open(targetfn, 'w', 'utf8') - try: - text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines)) - ctx = {'filename': self.env.doc2path(docname, None), 'text': text} - f.write(self.templates.render('changes/rstsource.html', ctx)) - finally: - f.close() - shutil.copyfile(path.join(path.dirname(__file__), 'static', 'default.css'), - path.join(self.outdir, 'default.css')) - - def hl(self, text, version): - text = escape(text) - for directive in ['versionchanged', 'versionadded', 'deprecated']: - text = text.replace('.. %s:: %s' % (directive, version), - '.. %s:: %s' % (directive, version)) - return text - - def finish(self): - pass - - -class TextBuilder(Builder): - name = 'text' - out_suffix = '.txt' - - def init(self): - pass - - def get_outdated_docs(self): - for docname in self.env.found_docs: - if docname not in self.env.all_docs: - yield docname - continue - targetname = self.env.doc2path(docname, self.outdir, self.out_suffix) - try: - targetmtime = path.getmtime(targetname) - except Exception: - targetmtime = 0 - try: - srcmtime = path.getmtime(self.env.doc2path(docname)) - if srcmtime > targetmtime: - yield docname - except EnvironmentError: - # source doesn't exist anymore - pass - - def get_target_uri(self, docname, typ=None): - return '' - - def prepare_writing(self, docnames): - self.writer = TextWriter(self) - - def write_doc(self, docname, doctree): - destination = StringOutput(encoding='utf-8') - self.writer.write(doctree, destination) - outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix) - ensuredir(path.dirname(outfilename)) # normally different from self.outdir - try: - f = codecs.open(outfilename, 'w', 'utf-8') - try: - f.write(self.writer.output) - finally: - f.close() - except (IOError, OSError), err: - self.warn("Error writing file %s: %s" % (outfilename, err)) - - def finish(self): - pass - - -# compatibility alias -WebHTMLBuilder = PickleHTMLBuilder - - -from sphinx.linkcheck import CheckExternalLinksBuilder - -builtin_builders = { - 'html': StandaloneHTMLBuilder, - 'pickle': PickleHTMLBuilder, - 'json': JSONHTMLBuilder, - 'web': PickleHTMLBuilder, - 'htmlhelp': HTMLHelpBuilder, - 'latex': LaTeXBuilder, - 'text': TextBuilder, - 'changes': ChangesBuilder, - 'linkcheck': CheckExternalLinksBuilder, -} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/cmdline.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/cmdline.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.cmdline - ~~~~~~~~~~~~~~ - - sphinx-build command-line handling. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -import os -import sys -import getopt -import traceback -from os import path - -from docutils.utils import SystemMessage - -from sphinx import __version__ -from sphinx.application import Sphinx, SphinxError -from sphinx.util import format_exception_cut_frames, save_traceback -from sphinx.util.console import darkred, nocolor, color_terminal - - -def usage(argv, msg=None): - if msg: - print >>sys.stderr, msg - print >>sys.stderr - print >>sys.stderr, """\ -Sphinx v%s -Usage: %s [options] sourcedir outdir [filenames...] -Options: -b -- builder to use; default is html - -a -- write all files; default is to only write new and changed files - -E -- don't use a saved environment, always read all files - -d -- path for the cached environment and doctree files - (default: outdir/.doctrees) - -c -- path where configuration file (conf.py) is located - (default: same as sourcedir) - -C -- use no config file at all, only -D options - -D -- override a setting in configuration - -A -- pass a value into the templates, for HTML builder - -N -- do not do colored output - -q -- no output on stdout, just warnings on stderr - -Q -- no output at all, not even warnings - -P -- run Pdb on exception -Modi: -* without -a and without filenames, write new and changed files. -* with -a, write all files. -* with filenames, write these.""" % (__version__, argv[0]) - - -def main(argv): - if not sys.stdout.isatty() or not color_terminal(): - # Windows' poor cmd box doesn't understand ANSI sequences - nocolor() - - try: - opts, args = getopt.getopt(argv[1:], 'ab:d:c:CD:A:NEqP') - allopts = set(opt[0] for opt in opts) - srcdir = confdir = path.abspath(args[0]) - if not path.isdir(srcdir): - print >>sys.stderr, 'Error: Cannot find source directory.' - return 1 - if not path.isfile(path.join(srcdir, 'conf.py')) and \ - '-c' not in allopts and '-C' not in allopts: - print >>sys.stderr, 'Error: Source directory doesn\'t contain conf.py file.' - return 1 - outdir = path.abspath(args[1]) - if not path.isdir(outdir): - print >>sys.stderr, 'Making output directory...' - os.makedirs(outdir) - except (IndexError, getopt.error): - usage(argv) - return 1 - - filenames = args[2:] - err = 0 - for filename in filenames: - if not path.isfile(filename): - print >>sys.stderr, 'Cannot find file %r.' % filename - err = 1 - if err: - return 1 - - buildername = all_files = None - freshenv = use_pdb = False - status = sys.stdout - warning = sys.stderr - confoverrides = {} - htmlcontext = {} - doctreedir = path.join(outdir, '.doctrees') - for opt, val in opts: - if opt == '-b': - buildername = val - elif opt == '-a': - if filenames: - usage(argv, 'Cannot combine -a option and filenames.') - return 1 - all_files = True - elif opt == '-d': - doctreedir = path.abspath(val) - elif opt == '-c': - confdir = path.abspath(val) - if not path.isfile(path.join(confdir, 'conf.py')): - print >>sys.stderr, \ - 'Error: Configuration directory doesn\'t contain conf.py file.' - return 1 - elif opt == '-C': - confdir = None - elif opt == '-D': - try: - key, val = val.split('=') - except ValueError: - print >>sys.stderr, \ - 'Error: -D option argument must be in the form name=value.' - return 1 - try: - val = int(val) - except ValueError: - pass - confoverrides[key] = val - elif opt == '-A': - try: - key, val = val.split('=') - except ValueError: - print >>sys.stderr, \ - 'Error: -A option argument must be in the form name=value.' - return 1 - try: - val = int(val) - except ValueError: - pass - htmlcontext[key] = val - elif opt == '-N': - nocolor() - elif opt == '-E': - freshenv = True - elif opt == '-q': - status = None - elif opt == '-Q': - status = None - warning = None - elif opt == '-P': - use_pdb = True - confoverrides['html_context'] = htmlcontext - - try: - app = Sphinx(srcdir, confdir, outdir, doctreedir, buildername, - confoverrides, status, warning, freshenv) - app.build(all_files, filenames) - return app.statuscode - except KeyboardInterrupt: - if use_pdb: - import pdb - print >>sys.stderr, darkred('Interrupted while building, starting debugger:') - traceback.print_exc() - pdb.post_mortem(sys.exc_info()[2]) - return 1 - except Exception, err: - if use_pdb: - import pdb - print >>sys.stderr, darkred('Exception occurred while building, ' - 'starting debugger:') - traceback.print_exc() - pdb.post_mortem(sys.exc_info()[2]) - else: - if isinstance(err, SystemMessage): - print >>sys.stderr, darkred('reST markup error:') - print >>sys.stderr, err.args[0].encode('ascii', 'backslashreplace') - elif isinstance(err, SphinxError): - print >>sys.stderr, darkred('%s:' % err.category) - print >>sys.stderr, err - else: - print >>sys.stderr, darkred('Exception occurred:') - print >>sys.stderr, format_exception_cut_frames().rstrip() - tbpath = save_traceback() - print >>sys.stderr, darkred('The full traceback has been saved ' - 'in %s, if you want to report the ' - 'issue to the author.' % tbpath) - print >>sys.stderr, ('Please also report this if it was a user ' - 'error, so that a better error message ' - 'can be provided next time.') - print >>sys.stderr, ('Send reports to sphinx-dev@googlegroups.com. ' - 'Thanks!') - return 1 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/config.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/config.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.config - ~~~~~~~~~~~~~ - - Build configuration file handling. - - :copyright: 2008 by Georg Brandl. - :license: BSD license. -""" - -import os -from os import path - - -class Config(object): - """Configuration file abstraction.""" - - # the values are: (default, needs fresh doctrees if changed) - - # If you add a value here, don't forget to include it in the - # quickstart.py file template as well as in the docs! - - config_values = dict( - # general options - project = ('Python', True), - copyright = ('', False), - version = ('', True), - release = ('', True), - today = ('', True), - today_fmt = (None, True), # the real default is locale-dependent - - language = (None, True), - locale_dirs = ([], True), - - master_doc = ('contents', True), - source_suffix = ('.rst', True), - source_encoding = ('utf-8', True), - unused_docs = ([], True), - exclude_dirs = ([], True), - exclude_trees = ([], True), - exclude_dirnames = ([], True), - default_role = (None, True), - add_function_parentheses = (True, True), - add_module_names = (True, True), - show_authors = (False, True), - pygments_style = ('sphinx', False), - highlight_language = ('python', False), - templates_path = ([], False), - template_bridge = (None, False), - keep_warnings = (False, True), - - # HTML options - html_title = (lambda self: '%s v%s documentation' % - (self.project, self.release), - False), - html_short_title = (lambda self: self.html_title, False), - html_style = ('default.css', False), - html_logo = (None, False), - html_favicon = (None, False), - html_static_path = ([], False), - html_last_updated_fmt = (None, False), # the real default is locale-dependent - html_use_smartypants = (True, False), - html_translator_class = (None, False), - html_sidebars = ({}, False), - html_additional_pages = ({}, False), - html_use_modindex = (True, False), - html_use_index = (True, False), - html_split_index = (False, False), - html_copy_source = (True, False), - html_use_opensearch = ('', False), - html_file_suffix = (None, False), - html_show_sphinx = (True, False), - html_context = ({}, False), - - # HTML help only options - htmlhelp_basename = ('pydoc', False), - - # LaTeX options - latex_documents = ([], False), - latex_logo = (None, False), - latex_appendices = ([], False), - latex_use_parts = (False, False), - latex_use_modindex = (True, False), - # paper_size and font_size are still separate values - # so that you can give them easily on the command line - latex_paper_size = ('letter', False), - latex_font_size = ('10pt', False), - latex_elements = ({}, False), - # now deprecated - use latex_elements - latex_preamble = ('', False), - ) - - def __init__(self, dirname, filename, overrides): - self.overrides = overrides - self.values = Config.config_values.copy() - config = {} - if dirname is not None: - config['__file__'] = path.join(dirname, filename) - olddir = os.getcwd() - try: - os.chdir(dirname) - execfile(config['__file__'], config) - finally: - os.chdir(olddir) - self._raw_config = config - # these two must be preinitialized because extensions can add their - # own config values - self.setup = config.get('setup', None) - self.extensions = config.get('extensions', []) - - def init_values(self): - config = self._raw_config - config.update(self.overrides) - for name in config: - if name in self.values: - self.__dict__[name] = config[name] - del self._raw_config - - def __getattr__(self, name): - if name.startswith('_'): - raise AttributeError(name) - if name not in self.values: - raise AttributeError('No such config value: %s' % name) - default = self.values[name][0] - if callable(default): - return default(self) - return default - - def __getitem__(self, name): - return getattr(self, name) - - def __setitem__(self, name, value): - setattr(self, name, value) - - def __delitem__(self, name): - delattr(self, name) - - def __contains__(self, name): - return name in self.values diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/__init__.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.directives - ~~~~~~~~~~~~~~~~~ - - Handlers for additional ReST directives. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -from sphinx.directives.desc import * -from sphinx.directives.code import * -from sphinx.directives.other import * diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/code.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/code.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.directives.code - ~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import sys -import codecs -from os import path - -from docutils import nodes -from docutils.parsers.rst import directives - -from sphinx import addnodes - - -# ------ highlight directive -------------------------------------------------------- - -def highlightlang_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - if 'linenothreshold' in options: - try: - linenothreshold = int(options['linenothreshold']) - except Exception: - linenothreshold = 10 - else: - linenothreshold = sys.maxint - return [addnodes.highlightlang(lang=arguments[0].strip(), - linenothreshold=linenothreshold)] - -highlightlang_directive.content = 0 -highlightlang_directive.arguments = (1, 0, 0) -highlightlang_directive.options = {'linenothreshold': directives.unchanged} -directives.register_directive('highlight', highlightlang_directive) -directives.register_directive('highlightlang', highlightlang_directive) # old name - - -# ------ code-block directive ------------------------------------------------------- - -def codeblock_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - code = u'\n'.join(content) - literal = nodes.literal_block(code, code) - literal['language'] = arguments[0] - literal['linenos'] = 'linenos' in options - return [literal] - -codeblock_directive.content = 1 -codeblock_directive.arguments = (1, 0, 0) -codeblock_directive.options = {'linenos': directives.flag} -directives.register_directive('code-block', codeblock_directive) -directives.register_directive('sourcecode', codeblock_directive) - - -# ------ literalinclude directive --------------------------------------------------- - -def literalinclude_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - """Like .. include:: :literal:, but only warns if the include file is not found.""" - if not state.document.settings.file_insertion_enabled: - return [state.document.reporter.warning('File insertion disabled', line=lineno)] - env = state.document.settings.env - rel_fn = arguments[0] - source_dir = path.dirname(path.abspath(state_machine.input_lines.source( - lineno - state_machine.input_offset - 1))) - fn = path.normpath(path.join(source_dir, rel_fn)) - - encoding = options.get('encoding', env.config.source_encoding) - try: - f = codecs.open(fn, 'r', encoding) - text = f.read() - f.close() - except (IOError, OSError): - retnode = state.document.reporter.warning( - 'Include file %r not found or reading it failed' % arguments[0], line=lineno) - except UnicodeError: - retnode = state.document.reporter.warning( - 'Encoding %r used for reading included file %r seems to ' - 'be wrong, try giving an :encoding: option' % - (encoding, arguments[0])) - else: - retnode = nodes.literal_block(text, text, source=fn) - retnode.line = 1 - if options.get('language', ''): - retnode['language'] = options['language'] - if 'linenos' in options: - retnode['linenos'] = True - state.document.settings.env.note_dependency(rel_fn) - return [retnode] - -literalinclude_directive.options = {'linenos': directives.flag, - 'language': directives.unchanged, - 'encoding': directives.encoding} -literalinclude_directive.content = 0 -literalinclude_directive.arguments = (1, 0, 0) -directives.register_directive('literalinclude', literalinclude_directive) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/desc.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/desc.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,592 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.directives.desc - ~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import re -import string - -from docutils import nodes -from docutils.parsers.rst import directives - -from sphinx import addnodes -from sphinx.util import ws_re - - -# ------ information units --------------------------------------------------------- - -def desc_index_text(desctype, module, name, add_modules): - if desctype == 'function': - if not module: - return _('%s() (built-in function)') % name - return _('%s() (in module %s)') % (name, module) - elif desctype == 'data': - if not module: - return _('%s (built-in variable)') % name - return _('%s (in module %s)') % (name, module) - elif desctype == 'class': - if not module: - return _('%s (built-in class)') % name - return _('%s (class in %s)') % (name, module) - elif desctype == 'exception': - return name - elif desctype == 'method': - try: - clsname, methname = name.rsplit('.', 1) - except ValueError: - if module: - return _('%s() (in module %s)') % (name, module) - else: - return '%s()' % name - if module and add_modules: - return _('%s() (%s.%s method)') % (methname, module, clsname) - else: - return _('%s() (%s method)') % (methname, clsname) - elif desctype == 'staticmethod': - try: - clsname, methname = name.rsplit('.', 1) - except ValueError: - if module: - return _('%s() (in module %s)') % (name, module) - else: - return '%s()' % name - if module and add_modules: - return _('%s() (%s.%s static method)') % (methname, module, clsname) - else: - return _('%s() (%s static method)') % (methname, clsname) - elif desctype == 'attribute': - try: - clsname, attrname = name.rsplit('.', 1) - except ValueError: - if module: - return _('%s (in module %s)') % (name, module) - else: - return name - if module and add_modules: - return _('%s (%s.%s attribute)') % (attrname, module, clsname) - else: - return _('%s (%s attribute)') % (attrname, clsname) - elif desctype == 'cfunction': - return _('%s (C function)') % name - elif desctype == 'cmember': - return _('%s (C member)') % name - elif desctype == 'cmacro': - return _('%s (C macro)') % name - elif desctype == 'ctype': - return _('%s (C type)') % name - elif desctype == 'cvar': - return _('%s (C variable)') % name - else: - raise ValueError('unhandled descenv: %s' % desctype) - - -# ------ make field lists (like :param foo:) in desc bodies prettier - -_ = lambda x: x # make gettext extraction in constants possible - -doc_fields_with_arg = { - 'param': '%param', - 'parameter': '%param', - 'arg': '%param', - 'argument': '%param', - 'keyword': '%param', - 'kwarg': '%param', - 'kwparam': '%param', - 'type': '%type', - 'raises': _('Raises'), - 'raise': 'Raises', - 'exception': 'Raises', - 'except': 'Raises', - 'var': _('Variable'), - 'ivar': 'Variable', - 'cvar': 'Variable', - 'returns': _('Returns'), - 'return': 'Returns', -} - -doc_fields_with_linked_arg = ('raises', 'raise', 'exception', 'except') - -doc_fields_without_arg = { - 'returns': 'Returns', - 'return': 'Returns', - 'rtype': _('Return type'), -} - -del _ - - -def _is_only_paragraph(node): - # determine if the node only contains one paragraph (and system messages) - if len(node) == 0: - return False - elif len(node) > 1: - for subnode in node[1:]: - if not isinstance(subnode, nodes.system_message): - return False - if isinstance(node[0], nodes.paragraph): - return True - return False - - -def handle_doc_fields(node, env): - # don't traverse, only handle field lists that are immediate children - for child in node.children: - if not isinstance(child, nodes.field_list): - continue - params = None - param_nodes = {} - param_types = {} - new_list = nodes.field_list() - for field in child: - fname, fbody = field - try: - typ, obj = fname.astext().split(None, 1) - typdesc = _(doc_fields_with_arg[typ]) - if _is_only_paragraph(fbody): - children = fbody.children[0].children - else: - children = fbody.children - if typdesc == '%param': - if not params: - pfield = nodes.field() - pfield += nodes.field_name('', _('Parameters')) - pfield += nodes.field_body() - params = nodes.bullet_list() - pfield[1] += params - new_list += pfield - dlitem = nodes.list_item() - dlpar = nodes.paragraph() - dlpar += nodes.emphasis(obj, obj) - dlpar += nodes.Text(' -- ', ' -- ') - dlpar += children - param_nodes[obj] = dlpar - dlitem += dlpar - params += dlitem - elif typdesc == '%type': - typenodes = fbody.children - if _is_only_paragraph(fbody): - typenodes = [nodes.Text(' (')] + \ - typenodes[0].children + [nodes.Text(')')] - param_types[obj] = typenodes - else: - fieldname = typdesc + ' ' - nfield = nodes.field() - nfieldname = nodes.field_name(fieldname, fieldname) - nfield += nfieldname - node = nfieldname - if typ in doc_fields_with_linked_arg: - node = addnodes.pending_xref(obj, reftype='obj', - refcaption=False, - reftarget=obj, - modname=env.currmodule, - classname=env.currclass) - nfieldname += node - node += nodes.Text(obj, obj) - nfield += nodes.field_body() - nfield[1] += fbody.children - new_list += nfield - except (KeyError, ValueError): - fnametext = fname.astext() - try: - typ = _(doc_fields_without_arg[fnametext]) - except KeyError: - # at least capitalize the field name - typ = fnametext.capitalize() - fname[0] = nodes.Text(typ) - new_list += field - for param, type in param_types.iteritems(): - if param in param_nodes: - param_nodes[param][1:1] = type - child.replace_self(new_list) - - -# ------ functions to parse a Python or C signature and create desc_* nodes. - -py_sig_re = re.compile( - r'''^ ([\w.]*\.)? # class name(s) - (\w+) \s* # thing name - (?: \((.*)\) # optional arguments - (\s* -> \s* .*)? )? $ # optional return annotation - ''', re.VERBOSE) - -py_paramlist_re = re.compile(r'([\[\],])') # split at '[', ']' and ',' - -def parse_py_signature(signode, sig, desctype, module, env): - """ - Transform a python signature into RST nodes. - Return (fully qualified name of the thing, classname if any). - - If inside a class, the current class name is handled intelligently: - * it is stripped from the displayed name if present - * it is added to the full name (return value) if not present - """ - m = py_sig_re.match(sig) - if m is None: - raise ValueError - classname, name, arglist, retann = m.groups() - - if retann: - retann = u' \N{RIGHTWARDS ARROW} ' + retann.strip()[2:] - - if env.currclass: - add_module = False - if classname and classname.startswith(env.currclass): - fullname = classname + name - # class name is given again in the signature - classname = classname[len(env.currclass):].lstrip('.') - elif classname: - # class name is given in the signature, but different - # (shouldn't happen) - fullname = env.currclass + '.' + classname + name - else: - # class name is not given in the signature - fullname = env.currclass + '.' + name - else: - add_module = True - fullname = classname and classname + name or name - - if desctype == 'staticmethod': - signode += addnodes.desc_annotation('static ', 'static ') - - if classname: - signode += addnodes.desc_addname(classname, classname) - # exceptions are a special case, since they are documented in the - # 'exceptions' module. - elif add_module and env.config.add_module_names and \ - module and module != 'exceptions': - nodetext = module + '.' - signode += addnodes.desc_addname(nodetext, nodetext) - - signode += addnodes.desc_name(name, name) - if not arglist: - if desctype in ('function', 'method', 'staticmethod'): - # for callables, add an empty parameter list - signode += addnodes.desc_parameterlist() - if retann: - signode += addnodes.desc_type(retann, retann) - return fullname, classname - signode += addnodes.desc_parameterlist() - - stack = [signode[-1]] - for token in py_paramlist_re.split(arglist): - if token == '[': - opt = addnodes.desc_optional() - stack[-1] += opt - stack.append(opt) - elif token == ']': - try: - stack.pop() - except IndexError: - raise ValueError - elif not token or token == ',' or token.isspace(): - pass - else: - token = token.strip() - stack[-1] += addnodes.desc_parameter(token, token) - if len(stack) != 1: - raise ValueError - if retann: - signode += addnodes.desc_type(retann, retann) - return fullname, classname - - -c_sig_re = re.compile( - r'''^([^(]*?) # return type - ([\w:]+) \s* # thing name (colon allowed for C++ class names) - (?: \((.*)\) )? # optionally arguments - (\s+const)? $ # const specifier - ''', re.VERBOSE) -c_funcptr_sig_re = re.compile( - r'''^([^(]+?) # return type - (\( [^()]+ \)) \s* # name in parentheses - \( (.*) \) # arguments - (\s+const)? $ # const specifier - ''', re.VERBOSE) -c_funcptr_name_re = re.compile(r'^\(\s*\*\s*(.*?)\s*\)$') - -# RE to split at word boundaries -wsplit_re = re.compile(r'(\W+)') - -# These C types aren't described in the reference, so don't try to create -# a cross-reference to them -stopwords = set(('const', 'void', 'char', 'int', 'long', 'FILE', 'struct')) - -def parse_c_type(node, ctype): - # add cross-ref nodes for all words - for part in filter(None, wsplit_re.split(ctype)): - tnode = nodes.Text(part, part) - if part[0] in string.letters+'_' and part not in stopwords: - pnode = addnodes.pending_xref( - '', reftype='ctype', reftarget=part, modname=None, classname=None) - pnode += tnode - node += pnode - else: - node += tnode - -def parse_c_signature(signode, sig, desctype): - """Transform a C (or C++) signature into RST nodes.""" - # first try the function pointer signature regex, it's more specific - m = c_funcptr_sig_re.match(sig) - if m is None: - m = c_sig_re.match(sig) - if m is None: - raise ValueError('no match') - rettype, name, arglist, const = m.groups() - - signode += addnodes.desc_type('', '') - parse_c_type(signode[-1], rettype) - try: - classname, funcname = name.split('::', 1) - classname += '::' - signode += addnodes.desc_addname(classname, classname) - signode += addnodes.desc_name(funcname, funcname) - # name (the full name) is still both parts - except ValueError: - signode += addnodes.desc_name(name, name) - # clean up parentheses from canonical name - m = c_funcptr_name_re.match(name) - if m: - name = m.group(1) - if not arglist: - if desctype == 'cfunction': - # for functions, add an empty parameter list - signode += addnodes.desc_parameterlist() - return name - - paramlist = addnodes.desc_parameterlist() - arglist = arglist.replace('`', '').replace('\\ ', '') # remove markup - # this messes up function pointer types, but not too badly ;) - args = arglist.split(',') - for arg in args: - arg = arg.strip() - param = addnodes.desc_parameter('', '', noemph=True) - try: - ctype, argname = arg.rsplit(' ', 1) - except ValueError: - # no argument name given, only the type - parse_c_type(param, arg) - else: - parse_c_type(param, ctype) - param += nodes.emphasis(' '+argname, ' '+argname) - paramlist += param - signode += paramlist - if const: - signode += addnodes.desc_addname(const, const) - return name - - -option_desc_re = re.compile( - r'((?:/|-|--)[-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)') - -def parse_option_desc(signode, sig): - """Transform an option description into RST nodes.""" - count = 0 - firstname = '' - for m in option_desc_re.finditer(sig): - optname, args = m.groups() - if count: - signode += addnodes.desc_addname(', ', ', ') - signode += addnodes.desc_name(optname, optname) - signode += addnodes.desc_addname(args, args) - if not count: - firstname = optname - count += 1 - if not firstname: - raise ValueError - return firstname - - -strip_backslash_re = re.compile(r'\\(?=[^\\])') - -def desc_directive(desctype, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - inode = addnodes.index(entries=[]) - node = addnodes.desc() - node['desctype'] = desctype - - noindex = ('noindex' in options) - node['noindex'] = noindex - # remove backslashes to support (dummy) escapes; helps Vim's highlighting - signatures = map(lambda s: strip_backslash_re.sub('', s.strip()), - arguments[0].split('\n')) - names = [] - clsname = None - module = options.get('module', env.currmodule) - for i, sig in enumerate(signatures): - # add a signature node for each signature in the current unit - # and add a reference target for it - sig = sig.strip() - signode = addnodes.desc_signature(sig, '') - signode['first'] = False - node.append(signode) - try: - if desctype in ('function', 'data', 'class', 'exception', - 'method', 'staticmethod', 'attribute'): - name, clsname = parse_py_signature(signode, sig, desctype, module, env) - elif desctype in ('cfunction', 'cmember', 'cmacro', 'ctype', 'cvar'): - name = parse_c_signature(signode, sig, desctype) - elif desctype == 'cmdoption': - optname = parse_option_desc(signode, sig) - if not noindex: - targetname = optname.replace('/', '-') - if env.currprogram: - targetname = '-' + env.currprogram + targetname - targetname = 'cmdoption' + targetname - signode['ids'].append(targetname) - state.document.note_explicit_target(signode) - inode['entries'].append( - ('pair', _('%scommand line option; %s') % - ((env.currprogram and env.currprogram + ' ' or ''), sig), - targetname, targetname)) - env.note_progoption(optname, targetname) - continue - elif desctype == 'describe': - signode.clear() - signode += addnodes.desc_name(sig, sig) - continue - else: - # another registered generic x-ref directive - rolename, indextemplate, parse_node = additional_xref_types[desctype] - if parse_node: - fullname = parse_node(env, sig, signode) - else: - signode.clear() - signode += addnodes.desc_name(sig, sig) - # normalize whitespace like xfileref_role does - fullname = ws_re.sub('', sig) - if not noindex: - targetname = '%s-%s' % (rolename, fullname) - signode['ids'].append(targetname) - state.document.note_explicit_target(signode) - if indextemplate: - indexentry = _(indextemplate) % (fullname,) - indextype = 'single' - colon = indexentry.find(':') - if colon != -1: - indextype = indexentry[:colon].strip() - indexentry = indexentry[colon+1:].strip() - inode['entries'].append((indextype, indexentry, - targetname, targetname)) - env.note_reftarget(rolename, fullname, targetname) - # don't use object indexing below - continue - except ValueError, err: - # signature parsing failed - signode.clear() - signode += addnodes.desc_name(sig, sig) - continue # we don't want an index entry here - # only add target and index entry if this is the first description of the - # function name in this desc block - if not noindex and name not in names: - fullname = (module and module + '.' or '') + name - # note target - if fullname not in state.document.ids: - signode['names'].append(fullname) - signode['ids'].append(fullname) - signode['first'] = (not names) - state.document.note_explicit_target(signode) - env.note_descref(fullname, desctype, lineno) - names.append(name) - - indextext = desc_index_text(desctype, module, name, - env.config.add_module_names) - inode['entries'].append(('single', indextext, fullname, fullname)) - - subnode = addnodes.desc_content() - # needed for automatic qualification of members - clsname_set = False - if desctype in ('class', 'exception') and names: - env.currclass = names[0] - clsname_set = True - elif desctype in ('method', 'staticmethod', 'attribute') and \ - clsname and not env.currclass: - env.currclass = clsname.strip('.') - clsname_set = True - # needed for association of version{added,changed} directives - if names: - env.currdesc = names[0] - state.nested_parse(content, content_offset, subnode) - handle_doc_fields(subnode, env) - if clsname_set: - env.currclass = None - env.currdesc = None - node.append(subnode) - return [inode, node] - -desc_directive.content = 1 -desc_directive.arguments = (1, 0, 1) -desc_directive.options = {'noindex': directives.flag, - 'module': directives.unchanged} - -desctypes = [ - # the Python ones - 'function', - 'data', - 'class', - 'method', - 'staticmethod', - 'attribute', - 'exception', - # the C ones - 'cfunction', - 'cmember', - 'cmacro', - 'ctype', - 'cvar', - # for command line options - 'cmdoption', - # the generic one - 'describe', - 'envvar', -] - -for _name in desctypes: - directives.register_directive(_name, desc_directive) - -_ = lambda x: x - -# Generic cross-reference types; they can be registered in the application; -# the directives are either desc_directive or target_directive -additional_xref_types = { - # directive name: (role name, index text, function to parse the desc node) - 'envvar': ('envvar', _('environment variable; %s'), None), -} - -del _ - - -# ------ target -------------------------------------------------------------------- - -def target_directive(targettype, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - """Generic target for user-defined cross-reference types.""" - env = state.document.settings.env - rolename, indextemplate, foo = additional_xref_types[targettype] - # normalize whitespace in fullname like xfileref_role does - fullname = ws_re.sub('', arguments[0].strip()) - targetname = '%s-%s' % (rolename, fullname) - node = nodes.target('', '', ids=[targetname]) - state.document.note_explicit_target(node) - ret = [node] - if indextemplate: - indexentry = indextemplate % (fullname,) - indextype = 'single' - colon = indexentry.find(':') - if colon != -1: - indextype = indexentry[:colon].strip() - indexentry = indexentry[colon+1:].strip() - inode = addnodes.index(entries=[(indextype, indexentry, targetname, targetname)]) - ret.insert(0, inode) - env.note_reftarget(rolename, fullname, targetname) - return ret - -target_directive.content = 0 -target_directive.arguments = (1, 0, 1) - -# note, the target directive is not registered here, it is used by the application -# when registering additional xref types diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/other.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/directives/other.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,411 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.directives.other - ~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import re -import posixpath - -from docutils import nodes -from docutils.parsers.rst import directives - -from sphinx import addnodes -from sphinx.locale import pairindextypes -from sphinx.util import patfilter, ws_re, caption_ref_re -from sphinx.util.compat import make_admonition - - -# ------ the TOC tree --------------------------------------------------------------- - -def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - suffix = env.config.source_suffix - dirname = posixpath.dirname(env.docname) - glob = 'glob' in options - - ret = [] - subnode = addnodes.toctree() - includefiles = [] - includetitles = {} - all_docnames = env.found_docs.copy() - # don't add the currently visited file in catch-all patterns - all_docnames.remove(env.docname) - for entry in content: - if not entry: - continue - if not glob: - # look for explicit titles and documents ("Some Title "). - m = caption_ref_re.match(entry) - if m: - docname = m.group(2) - includetitles[docname] = m.group(1) - else: - docname = entry - # remove suffixes (backwards compatibility) - if docname.endswith(suffix): - docname = docname[:-len(suffix)] - # absolutize filenames - docname = posixpath.normpath(posixpath.join(dirname, docname)) - if docname not in env.found_docs: - ret.append(state.document.reporter.warning( - 'toctree references unknown document %r' % docname, line=lineno)) - else: - includefiles.append(docname) - else: - patname = posixpath.normpath(posixpath.join(dirname, entry)) - docnames = sorted(patfilter(all_docnames, patname)) - for docname in docnames: - all_docnames.remove(docname) # don't include it again - includefiles.append(docname) - if not docnames: - ret.append(state.document.reporter.warning( - 'toctree glob pattern %r didn\'t match any documents' % entry, - line=lineno)) - subnode['includefiles'] = includefiles - subnode['includetitles'] = includetitles - subnode['maxdepth'] = options.get('maxdepth', -1) - subnode['glob'] = glob - ret.append(subnode) - return ret - -toctree_directive.content = 1 -toctree_directive.options = {'maxdepth': int, 'glob': directives.flag} -directives.register_directive('toctree', toctree_directive) - - -# ------ section metadata ---------------------------------------------------------- - -def module_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - modname = arguments[0].strip() - noindex = 'noindex' in options - env.currmodule = modname - env.note_module(modname, options.get('synopsis', ''), - options.get('platform', ''), - 'deprecated' in options) - modulenode = addnodes.module() - modulenode['modname'] = modname - modulenode['synopsis'] = options.get('synopsis', '') - targetnode = nodes.target('', '', ids=['module-' + modname]) - state.document.note_explicit_target(targetnode) - ret = [modulenode, targetnode] - if 'platform' in options: - modulenode['platform'] = options['platform'] - node = nodes.paragraph() - node += nodes.emphasis('', _('Platforms: ')) - node += nodes.Text(options['platform'], options['platform']) - ret.append(node) - # the synopsis isn't printed; in fact, it is only used in the modindex currently - if not noindex: - indextext = _('%s (module)') % modname - inode = addnodes.index(entries=[('single', indextext, - 'module-' + modname, modname)]) - ret.insert(0, inode) - return ret - -module_directive.arguments = (1, 0, 0) -module_directive.options = {'platform': lambda x: x, - 'synopsis': lambda x: x, - 'noindex': directives.flag, - 'deprecated': directives.flag} -directives.register_directive('module', module_directive) - - -def currentmodule_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - # This directive is just to tell people that we're documenting - # stuff in module foo, but links to module foo won't lead here. - env = state.document.settings.env - modname = arguments[0].strip() - if modname == 'None': - env.currmodule = None - else: - env.currmodule = modname - return [] - -currentmodule_directive.arguments = (1, 0, 0) -directives.register_directive('currentmodule', currentmodule_directive) - - -def author_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - # Show authors only if the show_authors option is on - env = state.document.settings.env - if not env.config.show_authors: - return [] - para = nodes.paragraph() - emph = nodes.emphasis() - para += emph - if name == 'sectionauthor': - text = _('Section author: ') - elif name == 'moduleauthor': - text = _('Module author: ') - else: - text = _('Author: ') - emph += nodes.Text(text, text) - inodes, messages = state.inline_text(arguments[0], lineno) - emph.extend(inodes) - return [para] + messages - -author_directive.arguments = (1, 0, 1) -directives.register_directive('sectionauthor', author_directive) -directives.register_directive('moduleauthor', author_directive) - - -def program_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - program = ws_re.sub('-', arguments[0].strip()) - if program == 'None': - env.currprogram = None - else: - env.currprogram = program - return [] - -program_directive.arguments = (1, 0, 1) -directives.register_directive('program', program_directive) - - -# ------ index markup -------------------------------------------------------------- - -indextypes = [ - 'single', 'pair', 'triple', -] - -def index_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - arguments = arguments[0].split('\n') - env = state.document.settings.env - targetid = 'index-%s' % env.index_num - env.index_num += 1 - targetnode = nodes.target('', '', ids=[targetid]) - state.document.note_explicit_target(targetnode) - indexnode = addnodes.index() - indexnode['entries'] = ne = [] - for entry in arguments: - entry = entry.strip() - for type in pairindextypes: - if entry.startswith(type+':'): - value = entry[len(type)+1:].strip() - value = pairindextypes[type] + '; ' + value - ne.append(('pair', value, targetid, value)) - break - else: - for type in indextypes: - if entry.startswith(type+':'): - value = entry[len(type)+1:].strip() - ne.append((type, value, targetid, value)) - break - # shorthand notation for single entries - else: - for value in entry.split(','): - value = value.strip() - if not value: - continue - ne.append(('single', value, targetid, value)) - return [indexnode, targetnode] - -index_directive.arguments = (1, 0, 1) -directives.register_directive('index', index_directive) - -# ------ versionadded/versionchanged ----------------------------------------------- - -def version_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - node = addnodes.versionmodified() - node['type'] = name - node['version'] = arguments[0] - if len(arguments) == 2: - inodes, messages = state.inline_text(arguments[1], lineno+1) - node.extend(inodes) - if content: - state.nested_parse(content, content_offset, node) - ret = [node] + messages - else: - ret = [node] - env = state.document.settings.env - env.note_versionchange(node['type'], node['version'], node, lineno) - return ret - -version_directive.arguments = (1, 1, 1) -version_directive.content = 1 - -directives.register_directive('deprecated', version_directive) -directives.register_directive('versionadded', version_directive) -directives.register_directive('versionchanged', version_directive) - - -# ------ see also ------------------------------------------------------------------ - -def seealso_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - ret = make_admonition( - addnodes.seealso, name, [_('See also')], options, content, - lineno, content_offset, block_text, state, state_machine) - if arguments: - argnodes, msgs = state.inline_text(arguments[0], lineno) - para = nodes.paragraph() - para += argnodes - para += msgs - ret[0].insert(1, para) - return ret - -seealso_directive.content = 1 -seealso_directive.arguments = (0, 1, 1) -directives.register_directive('seealso', seealso_directive) - - -# ------ production list (for the reference) --------------------------------------- - -token_re = re.compile('`([a-z_]+)`') - -def token_xrefs(text, env): - retnodes = [] - pos = 0 - for m in token_re.finditer(text): - if m.start() > pos: - txt = text[pos:m.start()] - retnodes.append(nodes.Text(txt, txt)) - refnode = addnodes.pending_xref(m.group(1)) - refnode['reftype'] = 'token' - refnode['reftarget'] = m.group(1) - refnode['modname'] = env.currmodule - refnode['classname'] = env.currclass - refnode += nodes.literal(m.group(1), m.group(1), classes=['xref']) - retnodes.append(refnode) - pos = m.end() - if pos < len(text): - retnodes.append(nodes.Text(text[pos:], text[pos:])) - return retnodes - -def productionlist_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - node = addnodes.productionlist() - messages = [] - i = 0 - - for rule in arguments[0].split('\n'): - if i == 0 and ':' not in rule: - # production group - continue - i += 1 - try: - name, tokens = rule.split(':', 1) - except ValueError: - break - subnode = addnodes.production() - subnode['tokenname'] = name.strip() - if subnode['tokenname']: - idname = 'grammar-token-%s' % subnode['tokenname'] - if idname not in state.document.ids: - subnode['ids'].append(idname) - state.document.note_implicit_target(subnode, subnode) - env.note_reftarget('token', subnode['tokenname'], idname) - subnode.extend(token_xrefs(tokens, env)) - node.append(subnode) - return [node] + messages - -productionlist_directive.content = 0 -productionlist_directive.arguments = (1, 0, 1) -directives.register_directive('productionlist', productionlist_directive) - - -# ------ glossary directive --------------------------------------------------------- - -def glossary_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - """Glossary with cross-reference targets for :term: roles.""" - env = state.document.settings.env - node = addnodes.glossary() - state.nested_parse(content, content_offset, node) - - # the content should be definition lists - dls = [child for child in node if isinstance(child, nodes.definition_list)] - # now, extract definition terms to enable cross-reference creation - for dl in dls: - dl['classes'].append('glossary') - for li in dl.children: - if not li.children or not isinstance(li[0], nodes.term): - continue - termtext = li.children[0].astext() - new_id = 'term-' + nodes.make_id(termtext) - if new_id in env.gloss_entries: - new_id = 'term-' + str(len(env.gloss_entries)) - env.gloss_entries.add(new_id) - li[0]['names'].append(new_id) - li[0]['ids'].append(new_id) - state.document.settings.env.note_reftarget('term', termtext.lower(), - new_id) - # add an index entry too - indexnode = addnodes.index() - indexnode['entries'] = [('single', termtext, new_id, termtext)] - li.insert(0, indexnode) - return [node] - -glossary_directive.content = 1 -glossary_directive.arguments = (0, 0, 0) -directives.register_directive('glossary', glossary_directive) - - -# ------ miscellaneous markup ------------------------------------------------------- - -def centered_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - if not arguments: - return [] - subnode = addnodes.centered() - inodes, messages = state.inline_text(arguments[0], lineno) - subnode.extend(inodes) - return [subnode] + messages - -centered_directive.arguments = (1, 0, 1) -directives.register_directive('centered', centered_directive) - - -def acks_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - node = addnodes.acks() - state.nested_parse(content, content_offset, node) - if len(node.children) != 1 or not isinstance(node.children[0], nodes.bullet_list): - return [state.document.reporter.warning('.. acks content is not a list', - line=lineno)] - return [node] - -acks_directive.content = 1 -acks_directive.arguments = (0, 0, 0) -directives.register_directive('acks', acks_directive) - - -def tabularcolumns_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - # support giving explicit tabulary column definition to latex - node = addnodes.tabular_col_spec() - node['spec'] = arguments[0] - return [node] - -tabularcolumns_directive.content = 0 -tabularcolumns_directive.arguments = (1, 0, 1) -directives.register_directive('tabularcolumns', tabularcolumns_directive) - - -# register the standard rst class directive under a different name - -try: - # docutils 0.4 - from docutils.parsers.rst.directives.misc import class_directive - directives.register_directive('cssclass', class_directive) -except ImportError: - try: - # docutils 0.5 - from docutils.parsers.rst.directives.misc import Class - directives.register_directive('cssclass', Class) - except ImportError: - # whatever :) - pass diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/environment.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/environment.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1387 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.environment - ~~~~~~~~~~~~~~~~~~ - - Global creation environment. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import re -import os -import time -import heapq -import types -import imghdr -import difflib -import cPickle as pickle -from os import path -from glob import glob -from string import uppercase -from itertools import izip, groupby -try: - import hashlib - md5 = hashlib.md5 -except ImportError: - # 2.4 compatibility - import md5 - md5 = md5.new - -from docutils import nodes -from docutils.io import FileInput, NullOutput -from docutils.core import Publisher -from docutils.utils import Reporter, relative_path -from docutils.readers import standalone -from docutils.parsers.rst import roles -from docutils.parsers.rst.languages import en as english -from docutils.parsers.rst.directives.html import MetaBody -from docutils.writers import UnfilteredWriter -from docutils.transforms import Transform -from docutils.transforms.parts import ContentsFilter - -from sphinx import addnodes -from sphinx.util import get_matching_docs, SEP, ustrftime -from sphinx.directives import additional_xref_types - -default_settings = { - 'embed_stylesheet': False, - 'cloak_email_addresses': True, - 'pep_base_url': 'http://www.python.org/dev/peps/', - 'rfc_base_url': 'http://rfc.net/', - 'input_encoding': 'utf-8', - 'doctitle_xform': False, - 'sectsubtitle_xform': False, -} - -# This is increased every time an environment attribute is added -# or changed to properly invalidate pickle files. -ENV_VERSION = 26 - - -default_substitutions = set([ - 'version', - 'release', - 'today', -]) - -dummy_reporter = Reporter('', 4, 4) - - -class RedirStream(object): - def __init__(self, writefunc): - self.writefunc = writefunc - def write(self, text): - if text.strip(): - self.writefunc(text) - - -class NoUri(Exception): - """Raised by get_relative_uri if there is no URI available.""" - pass - - -class DefaultSubstitutions(Transform): - """ - Replace some substitutions if they aren't defined in the document. - """ - # run before the default Substitutions - default_priority = 210 - - def apply(self): - config = self.document.settings.env.config - # only handle those not otherwise defined in the document - to_handle = default_substitutions - set(self.document.substitution_defs) - for ref in self.document.traverse(nodes.substitution_reference): - refname = ref['refname'] - if refname in to_handle: - text = config[refname] - if refname == 'today' and not text: - # special handling: can also specify a strftime format - text = ustrftime(config.today_fmt or _('%B %d, %Y')) - ref.replace_self(nodes.Text(text, text)) - - -class MoveModuleTargets(Transform): - """ - Move module targets to their nearest enclosing section title. - """ - default_priority = 210 - - def apply(self): - for node in self.document.traverse(nodes.target): - if not node['ids']: - continue - if node['ids'][0].startswith('module-') and \ - node.parent.__class__ is nodes.section: - node.parent['ids'] = node['ids'] - node.parent.remove(node) - - -class HandleCodeBlocks(Transform): - """ - Move doctest blocks out of blockquotes. - """ - default_priority = 210 - - def apply(self): - for node in self.document.traverse(nodes.block_quote): - if len(node.children) == 1 and isinstance(node.children[0], - nodes.doctest_block): - node.replace_self(node.children[0]) - -class CitationReferences(Transform): - """ - Handle citation references before the default docutils transform does. - """ - default_priority = 619 - - def apply(self): - for citnode in self.document.traverse(nodes.citation_reference): - cittext = citnode.astext() - refnode = addnodes.pending_xref(cittext, reftype='citation', - reftarget=cittext) - refnode += nodes.Text('[' + cittext + ']') - citnode.parent.replace(citnode, refnode) - - -class SphinxStandaloneReader(standalone.Reader): - """ - Add our own transforms. - """ - transforms = [CitationReferences, DefaultSubstitutions, MoveModuleTargets, - HandleCodeBlocks] - - def get_transforms(self): - return standalone.Reader.get_transforms(self) + self.transforms - - -class SphinxDummyWriter(UnfilteredWriter): - supported = ('html',) # needed to keep "meta" nodes - - def translate(self): - pass - - - -class SphinxContentsFilter(ContentsFilter): - """ - Used with BuildEnvironment.add_toc_from() to discard cross-file links - within table-of-contents link nodes. - """ - def visit_pending_xref(self, node): - text = node.astext() - self.parent.append(nodes.literal(text, text)) - raise nodes.SkipNode - - -class BuildEnvironment: - """ - The environment in which the ReST files are translated. - Stores an inventory of cross-file targets and provides doctree - transformations to resolve links to them. - """ - - # --------- ENVIRONMENT PERSISTENCE ---------------------------------------- - - @staticmethod - def frompickle(config, filename): - picklefile = open(filename, 'rb') - try: - env = pickle.load(picklefile) - finally: - picklefile.close() - env.config.values = config.values - if env.version != ENV_VERSION: - raise IOError('env version not current') - return env - - def topickle(self, filename): - # remove unpicklable attributes - warnfunc = self._warnfunc - self.set_warnfunc(None) - values = self.config.values - del self.config.values - picklefile = open(filename, 'wb') - # remove potentially pickling-problematic values from config - for key, val in vars(self.config).items(): - if key.startswith('_') or \ - isinstance(val, types.ModuleType) or \ - isinstance(val, types.FunctionType) or \ - isinstance(val, (type, types.ClassType)): - del self.config[key] - try: - pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL) - finally: - picklefile.close() - # reset attributes - self.config.values = values - self.set_warnfunc(warnfunc) - - # --------- ENVIRONMENT INITIALIZATION ------------------------------------- - - def __init__(self, srcdir, doctreedir, config): - self.doctreedir = doctreedir - self.srcdir = srcdir - self.config = config - - # the application object; only set while update() runs - self.app = None - - # the docutils settings for building - self.settings = default_settings.copy() - self.settings['env'] = self - - # the function to write warning messages with - self._warnfunc = None - - # this is to invalidate old pickles - self.version = ENV_VERSION - - # All "docnames" here are /-separated and relative and exclude the source suffix. - - self.found_docs = set() # contains all existing docnames - self.all_docs = {} # docname -> mtime at the time of build - # contains all built docnames - self.dependencies = {} # docname -> set of dependent file names, relative to - # documentation root - - # File metadata - self.metadata = {} # docname -> dict of metadata items - - # TOC inventory - self.titles = {} # docname -> title node - self.tocs = {} # docname -> table of contents nodetree - self.toc_num_entries = {} # docname -> number of real entries - # used to determine when to show the TOC in a sidebar - # (don't show if it's only one item) - self.toctree_includes = {} # docname -> list of toctree includefiles - self.files_to_rebuild = {} # docname -> set of files (containing its TOCs) - # to rebuild too - self.glob_toctrees = set() # docnames that have :glob: toctrees - - # X-ref target inventory - self.descrefs = {} # fullname -> docname, desctype - self.filemodules = {} # docname -> [modules] - self.modules = {} # modname -> docname, synopsis, platform, deprecated - self.labels = {} # labelname -> docname, labelid, sectionname - self.anonlabels = {} # labelname -> docname, labelid - self.progoptions = {} # (program, name) -> docname, labelid - self.reftargets = {} # (type, name) -> docname, labelid - # where type is term, token, envvar, citation - - # Other inventories - self.indexentries = {} # docname -> list of - # (type, string, target, aliasname) - self.versionchanges = {} # version -> list of - # (type, docname, lineno, module, descname, content) - self.images = {} # absolute path -> (docnames, unique filename) - - # These are set while parsing a file - self.docname = None # current document name - self.currmodule = None # current module name - self.currclass = None # current class name - self.currdesc = None # current descref name - self.currprogram = None # current program name - self.index_num = 0 # autonumber for index targets - self.gloss_entries = set() # existing definition labels - - # Some magically present labels - self.labels['genindex'] = ('genindex', '', _('Index')) - self.labels['modindex'] = ('modindex', '', _('Module Index')) - self.labels['search'] = ('search', '', _('Search Page')) - - def set_warnfunc(self, func): - self._warnfunc = func - self.settings['warning_stream'] = RedirStream(func) - - def warn(self, docname, msg, lineno=None): - if docname: - if lineno is None: - lineno = '' - self._warnfunc('%s:%s: %s' % (self.doc2path(docname), lineno, msg)) - else: - self._warnfunc('GLOBAL:: ' + msg) - - def clear_doc(self, docname): - """Remove all traces of a source file in the inventory.""" - if docname in self.all_docs: - self.all_docs.pop(docname, None) - self.metadata.pop(docname, None) - self.dependencies.pop(docname, None) - self.titles.pop(docname, None) - self.tocs.pop(docname, None) - self.toc_num_entries.pop(docname, None) - self.toctree_includes.pop(docname, None) - self.filemodules.pop(docname, None) - self.indexentries.pop(docname, None) - self.glob_toctrees.discard(docname) - - for subfn, fnset in self.files_to_rebuild.items(): - fnset.discard(docname) - if not fnset: - del self.files_to_rebuild[subfn] - for fullname, (fn, _) in self.descrefs.items(): - if fn == docname: - del self.descrefs[fullname] - for modname, (fn, _, _, _) in self.modules.items(): - if fn == docname: - del self.modules[modname] - for labelname, (fn, _, _) in self.labels.items(): - if fn == docname: - del self.labels[labelname] - for key, (fn, _) in self.reftargets.items(): - if fn == docname: - del self.reftargets[key] - for key, (fn, _) in self.progoptions.items(): - if fn == docname: - del self.progoptions[key] - for version, changes in self.versionchanges.items(): - new = [change for change in changes if change[1] != docname] - changes[:] = new - for fullpath, (docs, _) in self.images.items(): - docs.discard(docname) - if not docs: - del self.images[fullpath] - - def doc2path(self, docname, base=True, suffix=None): - """ - Return the filename for the document name. - If base is True, return absolute path under self.srcdir. - If base is None, return relative path to self.srcdir. - If base is a path string, return absolute path under that. - If suffix is not None, add it instead of config.source_suffix. - """ - suffix = suffix or self.config.source_suffix - if base is True: - return path.join(self.srcdir, docname.replace(SEP, path.sep)) + suffix - elif base is None: - return docname.replace(SEP, path.sep) + suffix - else: - return path.join(base, docname.replace(SEP, path.sep)) + suffix - - def find_files(self, config): - """ - Find all source files in the source dir and put them in self.found_docs. - """ - exclude_dirs = [d.replace(SEP, path.sep) for d in config.exclude_dirs] - exclude_trees = [d.replace(SEP, path.sep) for d in config.exclude_trees] - self.found_docs = set(get_matching_docs( - self.srcdir, config.source_suffix, exclude_docs=set(config.unused_docs), - exclude_dirs=exclude_dirs, exclude_trees=exclude_trees, - exclude_dirnames=['_sources'] + config.exclude_dirnames)) - - def get_outdated_files(self, config_changed): - """ - Return (added, changed, removed) sets. - """ - # clear all files no longer present - removed = set(self.all_docs) - self.found_docs - - added = set() - changed = set() - - if config_changed: - # config values affect e.g. substitutions - added = self.found_docs - else: - for docname in self.found_docs: - if docname not in self.all_docs: - added.add(docname) - continue - # if the doctree file is not there, rebuild - if not path.isfile(self.doc2path(docname, self.doctreedir, - '.doctree')): - changed.add(docname) - continue - # check the mtime of the document - mtime = self.all_docs[docname] - newmtime = path.getmtime(self.doc2path(docname)) - if newmtime > mtime: - changed.add(docname) - continue - # finally, check the mtime of dependencies - for dep in self.dependencies.get(docname, ()): - try: - # this will do the right thing when dep is absolute too - deppath = path.join(self.srcdir, dep) - if not path.isfile(deppath): - changed.add(docname) - break - depmtime = path.getmtime(deppath) - if depmtime > mtime: - changed.add(docname) - break - except EnvironmentError: - # give it another chance - changed.add(docname) - break - - return added, changed, removed - - def update(self, config, srcdir, doctreedir, app=None): - """(Re-)read all files new or changed since last update. Yields a summary - and then docnames as it processes them. Store all environment docnames - in the canonical format (ie using SEP as a separator in place of - os.path.sep).""" - config_changed = False - if self.config is None: - msg = '[new config] ' - config_changed = True - else: - # check if a config value was changed that affects how doctrees are read - for key, descr in config.config_values.iteritems(): - if not descr[1]: - continue - if self.config[key] != config[key]: - msg = '[config changed] ' - config_changed = True - break - else: - msg = '' - # this value is not covered by the above loop because it is handled - # specially by the config class - if self.config.extensions != config.extensions: - msg = '[extensions changed] ' - config_changed = True - # the source and doctree directories may have been relocated - self.srcdir = srcdir - self.doctreedir = doctreedir - self.find_files(config) - - added, changed, removed = self.get_outdated_files(config_changed) - - # if files were added or removed, all documents with globbed toctrees - # must be reread - if added or removed: - changed.update(self.glob_toctrees) - - msg += '%s added, %s changed, %s removed' % (len(added), len(changed), - len(removed)) - yield msg - - self.config = config - self.app = app - - # clear all files no longer present - for docname in removed: - if app: - app.emit('env-purge-doc', self, docname) - self.clear_doc(docname) - - # read all new and changed files - for docname in sorted(added | changed): - yield docname - self.read_doc(docname, app=app) - - if config.master_doc not in self.all_docs: - self.warn(None, 'master file %s not found' % - self.doc2path(config.master_doc)) - - self.app = None - - # remove all non-existing images from inventory - for imgsrc in self.images.keys(): - if not os.access(path.join(self.srcdir, imgsrc), os.R_OK): - del self.images[imgsrc] - - if app: - app.emit('env-updated', self) - - - # --------- SINGLE FILE READING -------------------------------------------- - - def read_doc(self, docname, src_path=None, save_parsed=True, app=None): - """ - Parse a file and add/update inventory entries for the doctree. - If srcpath is given, read from a different source file. - """ - # remove all inventory entries for that file - if app: - app.emit('env-purge-doc', self, docname) - self.clear_doc(docname) - - if src_path is None: - src_path = self.doc2path(docname) - - if self.config.default_role: - role_fn, messages = roles.role(self.config.default_role, english, - 0, dummy_reporter) - if role_fn: - roles._roles[''] = role_fn - else: - self.warn(docname, 'default role %s not found' % - self.config.default_role) - - self.docname = docname - self.settings['input_encoding'] = self.config.source_encoding - - class SphinxSourceClass(FileInput): - def read(self): - data = FileInput.read(self) - if app: - arg = [data] - app.emit('source-read', docname, arg) - data = arg[0] - return data - - # publish manually - pub = Publisher(reader=SphinxStandaloneReader(), - writer=SphinxDummyWriter(), - source_class=SphinxSourceClass, - destination_class=NullOutput) - pub.set_components(None, 'restructuredtext', None) - pub.process_programmatic_settings(None, self.settings, None) - pub.set_source(None, src_path) - pub.set_destination(None, None) - try: - pub.publish() - doctree = pub.document - except UnicodeError, err: - from sphinx.application import SphinxError - raise SphinxError(err.message) - self.filter_messages(doctree) - self.process_dependencies(docname, doctree) - self.process_images(docname, doctree) - self.process_metadata(docname, doctree) - self.create_title_from(docname, doctree) - self.note_labels_from(docname, doctree) - self.note_indexentries_from(docname, doctree) - self.note_citations_from(docname, doctree) - self.build_toc_from(docname, doctree) - - # store time of reading, used to find outdated files - self.all_docs[docname] = time.time() - - if app: - app.emit('doctree-read', doctree) - - # make it picklable - doctree.reporter = None - doctree.transformer = None - doctree.settings.warning_stream = None - doctree.settings.env = None - doctree.settings.record_dependencies = None - for metanode in doctree.traverse(MetaBody.meta): - # docutils' meta nodes aren't picklable because the class is nested - metanode.__class__ = addnodes.meta - - # cleanup - self.docname = None - self.currmodule = None - self.currclass = None - self.gloss_entries = set() - - if save_parsed: - # save the parsed doctree - doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree') - dirname = path.dirname(doctree_filename) - if not path.isdir(dirname): - os.makedirs(dirname) - f = open(doctree_filename, 'wb') - try: - pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) - finally: - f.close() - else: - return doctree - - def filter_messages(self, doctree): - """ - Filter system messages from a doctree. - """ - filterlevel = self.config.keep_warnings and 2 or 5 - for node in doctree.traverse(nodes.system_message): - if node['level'] < filterlevel: - node.parent.remove(node) - - def process_dependencies(self, docname, doctree): - """ - Process docutils-generated dependency info. - """ - deps = doctree.settings.record_dependencies - if not deps: - return - docdir = path.dirname(self.doc2path(docname, base=None)) - for dep in deps.list: - dep = path.join(docdir, dep) - self.dependencies.setdefault(docname, set()).add(dep) - - def process_images(self, docname, doctree): - """ - Process and rewrite image URIs. - """ - existing_names = set(v[1] for v in self.images.itervalues()) - docdir = path.dirname(self.doc2path(docname, base=None)) - for node in doctree.traverse(nodes.image): - # Map the mimetype to the corresponding image. The writer may - # choose the best image from these candidates. The special key * is - # set if there is only single candiate to be used by a writer. - # The special key ? is set for nonlocal URIs. - node['candidates'] = candidates = {} - imguri = node['uri'] - if imguri.find('://') != -1: - self.warn(docname, 'Nonlocal image URI found: %s' % imguri, node.line) - candidates['?'] = imguri - continue - # imgpath is the image path *from srcdir* - imgpath = path.normpath(path.join(docdir, imguri)) - # set imgpath as default URI - node['uri'] = imgpath - if imgpath.endswith(os.extsep + '*'): - for filename in glob(path.join(self.srcdir, imgpath)): - new_imgpath = relative_path(self.srcdir, filename) - if filename.lower().endswith('.pdf'): - candidates['application/pdf'] = new_imgpath - elif filename.lower().endswith('.svg'): - candidates['image/svg+xml'] = new_imgpath - else: - try: - f = open(filename, 'rb') - try: - imgtype = imghdr.what(f) - finally: - f.close() - except (OSError, IOError): - self.warn(docname, 'Image file %s not readable' % filename) - if imgtype: - candidates['image/' + imgtype] = new_imgpath - else: - candidates['*'] = imgpath - # map image paths to unique image names (so that they can be put - # into a single directory) - for imgpath in candidates.itervalues(): - self.dependencies.setdefault(docname, set()).add(imgpath) - if not os.access(path.join(self.srcdir, imgpath), os.R_OK): - self.warn(docname, 'Image file not readable: %s' % imgpath, - node.line) - if imgpath in self.images: - self.images[imgpath][0].add(docname) - continue - uniquename = path.basename(imgpath) - base, ext = path.splitext(uniquename) - i = 0 - while uniquename in existing_names: - i += 1 - uniquename = '%s%s%s' % (base, i, ext) - self.images[imgpath] = (set([docname]), uniquename) - existing_names.add(uniquename) - - def process_metadata(self, docname, doctree): - """ - Process the docinfo part of the doctree as metadata. - """ - self.metadata[docname] = md = {} - try: - docinfo = doctree[0] - except IndexError: - # probably an empty document - return - if docinfo.__class__ is not nodes.docinfo: - # nothing to see here - return - for node in docinfo: - if node.__class__ is nodes.author: - # handled specially by docutils - md['author'] = node.astext() - elif node.__class__ is nodes.field: - name, body = node - md[name.astext()] = body.astext() - del doctree[0] - - def create_title_from(self, docname, document): - """ - Add a title node to the document (just copy the first section title), - and store that title in the environment. - """ - for node in document.traverse(nodes.section): - titlenode = nodes.title() - visitor = SphinxContentsFilter(document) - node[0].walkabout(visitor) - titlenode += visitor.get_entry_text() - self.titles[docname] = titlenode - return - - def note_labels_from(self, docname, document): - for name, explicit in document.nametypes.iteritems(): - if not explicit: - continue - labelid = document.nameids[name] - if labelid is None: - continue - node = document.ids[labelid] - if name.isdigit() or node.has_key('refuri') or \ - node.tagname.startswith('desc_'): - # ignore footnote labels, labels automatically generated from a - # link and description units - continue - if name in self.labels: - self.warn(docname, 'duplicate label %s, ' % name + - 'other instance in %s' % self.doc2path(self.labels[name][0]), - node.line) - self.anonlabels[name] = docname, labelid - if node.tagname == 'section': - sectname = node[0].astext() # node[0] == title node - elif node.tagname == 'figure': - for n in node: - if n.tagname == 'caption': - sectname = n.astext() - break - else: - continue - else: - # anonymous-only labels - continue - self.labels[name] = docname, labelid, sectname - - def note_indexentries_from(self, docname, document): - entries = self.indexentries[docname] = [] - for node in document.traverse(addnodes.index): - entries.extend(node['entries']) - - def note_citations_from(self, docname, document): - for node in document.traverse(nodes.citation): - label = node[0].astext() - if ('citation', label) in self.reftargets: - self.warn(docname, 'duplicate citation %s, ' % label + - 'other instance in %s' % self.doc2path( - self.reftargets['citation', label][0]), node.line) - self.reftargets['citation', label] = (docname, node['ids'][0]) - - def note_toctree(self, docname, toctreenode): - """Note a TOC tree directive in a document and gather information about - file relations from it.""" - if toctreenode['glob']: - self.glob_toctrees.add(docname) - includefiles = toctreenode['includefiles'] - for includefile in includefiles: - # note that if the included file is rebuilt, this one must be - # too (since the TOC of the included file could have changed) - self.files_to_rebuild.setdefault(includefile, set()).add(docname) - self.toctree_includes.setdefault(docname, []).extend(includefiles) - - def build_toc_from(self, docname, document): - """Build a TOC from the doctree and store it in the inventory.""" - numentries = [0] # nonlocal again... - - try: - maxdepth = int(self.metadata[docname].get('tocdepth', 0)) - except ValueError: - maxdepth = 0 - - def build_toc(node, depth=1): - entries = [] - for subnode in node: - if isinstance(subnode, addnodes.toctree): - # just copy the toctree node which is then resolved - # in self.get_and_resolve_doctree - item = subnode.copy() - entries.append(item) - # do the inventory stuff - self.note_toctree(docname, subnode) - continue - if not isinstance(subnode, nodes.section): - continue - title = subnode[0] - # copy the contents of the section title, but without references - # and unnecessary stuff - visitor = SphinxContentsFilter(document) - title.walkabout(visitor) - nodetext = visitor.get_entry_text() - if not numentries[0]: - # for the very first toc entry, don't add an anchor - # as it is the file's title anyway - anchorname = '' - else: - anchorname = '#' + subnode['ids'][0] - numentries[0] += 1 - reference = nodes.reference('', '', refuri=docname, - anchorname=anchorname, - *nodetext) - para = addnodes.compact_paragraph('', '', reference) - item = nodes.list_item('', para) - if maxdepth == 0 or depth < maxdepth: - item += build_toc(subnode, depth+1) - entries.append(item) - if entries: - return nodes.bullet_list('', *entries) - return [] - toc = build_toc(document) - if toc: - self.tocs[docname] = toc - else: - self.tocs[docname] = nodes.bullet_list('') - self.toc_num_entries[docname] = numentries[0] - - def get_toc_for(self, docname): - """Return a TOC nodetree -- for use on the same page only!""" - toc = self.tocs[docname].deepcopy() - for node in toc.traverse(nodes.reference): - node['refuri'] = node['anchorname'] - return toc - - # ------- - # these are called from docutils directives and therefore use self.docname - # - def note_descref(self, fullname, desctype, line): - if fullname in self.descrefs: - self.warn(self.docname, - 'duplicate canonical description name %s, ' % fullname + - 'other instance in %s' % self.doc2path(self.descrefs[fullname][0]), - line) - self.descrefs[fullname] = (self.docname, desctype) - - def note_module(self, modname, synopsis, platform, deprecated): - self.modules[modname] = (self.docname, synopsis, platform, deprecated) - self.filemodules.setdefault(self.docname, []).append(modname) - - def note_progoption(self, optname, labelid): - self.progoptions[self.currprogram, optname] = (self.docname, labelid) - - def note_reftarget(self, type, name, labelid): - self.reftargets[type, name] = (self.docname, labelid) - - def note_versionchange(self, type, version, node, lineno): - self.versionchanges.setdefault(version, []).append( - (type, self.docname, lineno, self.currmodule, self.currdesc, node.astext())) - - def note_dependency(self, filename): - basename = path.dirname(self.doc2path(self.docname, base=None)) - # this will do the right thing when filename is absolute too - filename = path.join(basename, filename) - self.dependencies.setdefault(self.docname, set()).add(filename) - # ------- - - # --------- RESOLVING REFERENCES AND TOCTREES ------------------------------ - - def get_doctree(self, docname): - """Read the doctree for a file from the pickle and return it.""" - doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree') - f = open(doctree_filename, 'rb') - try: - doctree = pickle.load(f) - finally: - f.close() - doctree.settings.env = self - doctree.reporter = Reporter(self.doc2path(docname), 2, 4, - stream=RedirStream(self._warnfunc)) - return doctree - - - def get_and_resolve_doctree(self, docname, builder, doctree=None, - prune_toctrees=True): - """Read the doctree from the pickle, resolve cross-references and - toctrees and return it.""" - if doctree is None: - doctree = self.get_doctree(docname) - - # resolve all pending cross-references - self.resolve_references(doctree, docname, builder) - - # now, resolve all toctree nodes - for toctreenode in doctree.traverse(addnodes.toctree): - result = self.resolve_toctree(docname, builder, toctreenode, - prune=prune_toctrees) - if result is None: - toctreenode.replace_self([]) - else: - toctreenode.replace_self(result) - - return doctree - - def resolve_toctree(self, docname, builder, toctree, prune=True, maxdepth=0, - titles_only=False): - """ - Resolve a *toctree* node into individual bullet lists with titles - as items, returning None (if no containing titles are found) or - a new node. - - If *prune* is True, the tree is pruned to *maxdepth*, or if that is 0, - to the value of the *maxdepth* option on the *toctree* node. - If *titles_only* is True, only toplevel document titles will be in the - resulting tree. - """ - - def _walk_depth(node, depth, maxdepth, titleoverrides): - """Utility: Cut a TOC at a specified depth.""" - for subnode in node.children[:]: - if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): - subnode['classes'].append('toctree-l%d' % (depth-1)) - _walk_depth(subnode, depth, maxdepth, titleoverrides) - elif isinstance(subnode, nodes.bullet_list): - if maxdepth > 0 and depth > maxdepth: - subnode.parent.replace(subnode, []) - else: - _walk_depth(subnode, depth+1, maxdepth, titleoverrides) - - def _entries_from_toctree(toctreenode, separate=False): - """Return TOC entries for a toctree node.""" - includefiles = map(str, toctreenode['includefiles']) - - entries = [] - for includefile in includefiles: - try: - toc = self.tocs[includefile].deepcopy() - if not toc.children: - # empty toc means: no titles will show up in the toctree - self.warn(docname, 'toctree contains reference to document ' - '%r that doesn\'t have a title: no link will be ' - 'generated' % includefile) - except KeyError: - # this is raised if the included file does not exist - self.warn(docname, 'toctree contains reference to nonexisting ' - 'document %r' % includefile) - else: - # if titles_only is given, only keep the main title and - # sub-toctrees - if titles_only: - # delete everything but the toplevel title(s) and toctrees - for toplevel in toc: - # nodes with length 1 don't have any children anyway - if len(toplevel) > 1: - subtoctrees = toplevel.traverse(addnodes.toctree) - toplevel[1][:] = subtoctrees - # resolve all sub-toctrees - for toctreenode in toc.traverse(addnodes.toctree): - i = toctreenode.parent.index(toctreenode) + 1 - for item in _entries_from_toctree(toctreenode): - toctreenode.parent.insert(i, item) - i += 1 - toctreenode.parent.remove(toctreenode) - if separate: - entries.append(toc) - else: - entries.extend(toc.children) - return entries - - maxdepth = maxdepth or toctree.get('maxdepth', -1) - titleoverrides = toctree.get('includetitles', {}) - - tocentries = _entries_from_toctree(toctree, separate=True) - if not tocentries: - return None - - newnode = addnodes.compact_paragraph('', '', *tocentries) - newnode['toctree'] = True - # prune the tree to maxdepth and replace titles, also set level classes - _walk_depth(newnode, 1, prune and maxdepth or 0, titleoverrides) - # replace titles, if needed, and set the target paths in the - # toctrees (they are not known at TOC generation time) - for refnode in newnode.traverse(nodes.reference): - refnode['refuri'] = builder.get_relative_uri( - docname, refnode['refuri']) + refnode['anchorname'] - if titleoverrides and not refnode['anchorname'] \ - and refnode['refuri'] in titleoverrides: - newtitle = titleoverrides[refnode['refuri']] - refnode.children = [nodes.Text(newtitle)] - return newnode - - descroles = frozenset(('data', 'exc', 'func', 'class', 'const', 'attr', 'obj', - 'meth', 'cfunc', 'cmember', 'cdata', 'ctype', 'cmacro')) - - def resolve_references(self, doctree, fromdocname, builder): - reftarget_roles = set(('token', 'term', 'citation')) - # add all custom xref types too - reftarget_roles.update(i[0] for i in additional_xref_types.values()) - - for node in doctree.traverse(addnodes.pending_xref): - contnode = node[0].deepcopy() - newnode = None - - typ = node['reftype'] - target = node['reftarget'] - - try: - if typ == 'ref': - if node['refcaption']: - # reference to anonymous label; the reference uses the supplied - # link caption - docname, labelid = self.anonlabels.get(target, ('','')) - sectname = node.astext() - if not docname: - newnode = doctree.reporter.system_message( - 2, 'undefined label: %s' % target) - else: - # reference to the named label; the final node will contain the - # section name after the label - docname, labelid, sectname = self.labels.get(target, ('','','')) - if not docname: - newnode = doctree.reporter.system_message( - 2, 'undefined label: %s -- if you don\'t ' % target + - 'give a link caption the label must precede a section ' - 'header.') - if docname: - newnode = nodes.reference('', '') - innernode = nodes.emphasis(sectname, sectname) - if docname == fromdocname: - newnode['refid'] = labelid - else: - # set more info in contnode in case the get_relative_uri call - # raises NoUri, the builder will then have to resolve these - contnode = addnodes.pending_xref('') - contnode['refdocname'] = docname - contnode['refsectname'] = sectname - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname) - if labelid: - newnode['refuri'] += '#' + labelid - newnode.append(innernode) - elif typ == 'keyword': - # keywords are referenced by named labels - docname, labelid, _ = self.labels.get(target, ('','','')) - if not docname: - #self.warn(fromdocname, 'unknown keyword: %s' % target) - newnode = contnode - else: - newnode = nodes.reference('', '') - if docname == fromdocname: - newnode['refid'] = labelid - else: - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname) + '#' + labelid - newnode.append(contnode) - elif typ == 'option': - progname = node['refprogram'] - docname, labelid = self.progoptions.get((progname, target), ('', '')) - if not docname: - newnode = contnode - else: - newnode = nodes.reference('', '') - if docname == fromdocname: - newnode['refid'] = labelid - else: - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname) + '#' + labelid - newnode.append(contnode) - elif typ in reftarget_roles: - docname, labelid = self.reftargets.get((typ, target), ('', '')) - if not docname: - if typ == 'term': - self.warn(fromdocname, 'term not in glossary: %s' % target, - node.line) - elif typ == 'citation': - self.warn(fromdocname, 'citation not found: %s' % target, - node.line) - newnode = contnode - else: - newnode = nodes.reference('', '') - if docname == fromdocname: - newnode['refid'] = labelid - else: - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname, typ) + '#' + labelid - newnode.append(contnode) - elif typ == 'mod': - docname, synopsis, platform, deprecated = \ - self.modules.get(target, ('','','', '')) - if not docname: - newnode = builder.app.emit_firstresult('missing-reference', - self, node, contnode) - if not newnode: - newnode = contnode - elif docname == fromdocname: - # don't link to self - newnode = contnode - else: - newnode = nodes.reference('', '') - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname) + '#module-' + target - newnode['reftitle'] = '%s%s%s' % ( - (platform and '(%s) ' % platform), - synopsis, (deprecated and ' (deprecated)' or '')) - newnode.append(contnode) - elif typ in self.descroles: - # "descrefs" - modname = node['modname'] - clsname = node['classname'] - searchorder = node.hasattr('refspecific') and 1 or 0 - name, desc = self.find_desc(modname, clsname, - target, typ, searchorder) - if not desc: - newnode = builder.app.emit_firstresult('missing-reference', - self, node, contnode) - if not newnode: - newnode = contnode - else: - newnode = nodes.reference('', '') - if desc[0] == fromdocname: - newnode['refid'] = name - else: - newnode['refuri'] = ( - builder.get_relative_uri(fromdocname, desc[0]) - + '#' + name) - newnode['reftitle'] = name - newnode.append(contnode) - else: - raise RuntimeError('unknown xfileref node encountered: %s' % node) - except NoUri: - newnode = contnode - if newnode: - node.replace_self(newnode) - - # allow custom references to be resolved - builder.app.emit('doctree-resolved', doctree, fromdocname) - - def create_index(self, builder, _fixre=re.compile(r'(.*) ([(][^()]*[)])')): - """Create the real index from the collected index entries.""" - new = {} - - def add_entry(word, subword, dic=new): - entry = dic.get(word) - if not entry: - dic[word] = entry = [[], {}] - if subword: - add_entry(subword, '', dic=entry[1]) - else: - try: - entry[0].append(builder.get_relative_uri('genindex', fn) - + '#' + tid) - except NoUri: - pass - - for fn, entries in self.indexentries.iteritems(): - # new entry types must be listed in directives/other.py! - for type, string, tid, alias in entries: - if type == 'single': - try: - entry, subentry = string.split(';', 1) - except ValueError: - entry, subentry = string, '' - if not entry: - self.warn(fn, 'invalid index entry %r' % string) - continue - add_entry(entry.strip(), subentry.strip()) - elif type == 'pair': - try: - first, second = map(lambda x: x.strip(), - string.split(';', 1)) - if not first or not second: - raise ValueError - except ValueError: - self.warn(fn, 'invalid pair index entry %r' % string) - continue - add_entry(first, second) - add_entry(second, first) - elif type == 'triple': - try: - first, second, third = map(lambda x: x.strip(), - string.split(';', 2)) - if not first or not second or not third: - raise ValueError - except ValueError: - self.warn(fn, 'invalid triple index entry %r' % string) - continue - add_entry(first, second+' '+third) - add_entry(second, third+', '+first) - add_entry(third, first+' '+second) - else: - self.warn(fn, 'unknown index entry type %r' % type) - - newlist = new.items() - newlist.sort(key=lambda t: t[0].lower()) - - # fixup entries: transform - # func() (in module foo) - # func() (in module bar) - # into - # func() - # (in module foo) - # (in module bar) - oldkey = '' - oldsubitems = None - i = 0 - while i < len(newlist): - key, (targets, subitems) = newlist[i] - # cannot move if it hassubitems; structure gets too complex - if not subitems: - m = _fixre.match(key) - if m: - if oldkey == m.group(1): - # prefixes match: add entry as subitem of the previous entry - oldsubitems.setdefault(m.group(2), [[], {}])[0].extend(targets) - del newlist[i] - continue - oldkey = m.group(1) - else: - oldkey = key - oldsubitems = subitems - i += 1 - - # group the entries by letter - def keyfunc((k, v), ltrs=uppercase+'_'): - # hack: mutate the subitems dicts to a list in the keyfunc - v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems()) - # now calculate the key - letter = k[0].upper() - if letter in ltrs: - return letter - else: - # get all other symbols under one heading - return 'Symbols' - return [(key, list(group)) for (key, group) in groupby(newlist, keyfunc)] - - def collect_relations(self): - relations = {} - getinc = self.toctree_includes.get - def collect(parents, docname, previous, next): - includes = getinc(docname) - # previous - if not previous: - # if no previous sibling, go to parent - previous = parents[0][0] - else: - # else, go to previous sibling, or if it has children, to - # the last of its children, or if that has children, to the - # last of those, and so forth - while 1: - previncs = getinc(previous) - if previncs: - previous = previncs[-1] - else: - break - # next - if includes: - # if it has children, go to first of them - next = includes[0] - elif next: - # else, if next sibling, go to it - pass - else: - # else, go to the next sibling of the parent, if present, - # else the grandparent's sibling, if present, and so forth - for parname, parindex in parents: - parincs = getinc(parname) - if parincs and parindex + 1 < len(parincs): - next = parincs[parindex+1] - break - # else it will stay None - # same for children - if includes: - for subindex, args in enumerate(izip(includes, [None] + includes, - includes[1:] + [None])): - collect([(docname, subindex)] + parents, *args) - relations[docname] = [parents[0][0], previous, next] - collect([(None, 0)], self.config.master_doc, None, None) - return relations - - def check_consistency(self): - """Do consistency checks.""" - - for docname in sorted(self.all_docs): - if docname not in self.files_to_rebuild: - if docname == self.config.master_doc: - # the master file is not included anywhere ;) - continue - self.warn(docname, 'document isn\'t included in any toctree') - - # --------- QUERYING ------------------------------------------------------- - - def find_desc(self, modname, classname, name, type, searchorder=0): - """Find a description node matching "name", perhaps using - the given module and/or classname.""" - # skip parens - if name[-2:] == '()': - name = name[:-2] - - if not name: - return None, None - - # don't add module and class names for C things - if type[0] == 'c' and type not in ('class', 'const'): - # skip trailing star and whitespace - name = name.rstrip(' *') - if name in self.descrefs and self.descrefs[name][1][0] == 'c': - return name, self.descrefs[name] - return None, None - - newname = None - if searchorder == 1: - if modname and classname and \ - modname + '.' + classname + '.' + name in self.descrefs: - newname = modname + '.' + classname + '.' + name - elif modname and modname + '.' + name in self.descrefs: - newname = modname + '.' + name - elif name in self.descrefs: - newname = name - else: - if name in self.descrefs: - newname = name - elif modname and modname + '.' + name in self.descrefs: - newname = modname + '.' + name - elif modname and classname and \ - modname + '.' + classname + '.' + name in self.descrefs: - newname = modname + '.' + classname + '.' + name - # special case: builtin exceptions have module "exceptions" set - elif type == 'exc' and '.' not in name and \ - 'exceptions.' + name in self.descrefs: - newname = 'exceptions.' + name - # special case: object methods - elif type in ('func', 'meth') and '.' not in name and \ - 'object.' + name in self.descrefs: - newname = 'object.' + name - if newname is None: - return None, None - return newname, self.descrefs[newname] - - def find_keyword(self, keyword, avoid_fuzzy=False, cutoff=0.6, n=20): - """ - Find keyword matches for a keyword. If there's an exact match, just return - it, else return a list of fuzzy matches if avoid_fuzzy isn't True. - - Keywords searched are: first modules, then descrefs. - - Returns: None if nothing found - (type, docname, anchorname) if exact match found - list of (quality, type, docname, anchorname, description) if fuzzy - """ - - if keyword in self.modules: - docname, title, system, deprecated = self.modules[keyword] - return 'module', docname, 'module-' + keyword - if keyword in self.descrefs: - docname, ref_type = self.descrefs[keyword] - return ref_type, docname, keyword - # special cases - if '.' not in keyword: - # exceptions are documented in the exceptions module - if 'exceptions.'+keyword in self.descrefs: - docname, ref_type = self.descrefs['exceptions.'+keyword] - return ref_type, docname, 'exceptions.'+keyword - # special methods are documented as object methods - if 'object.'+keyword in self.descrefs: - docname, ref_type = self.descrefs['object.'+keyword] - return ref_type, docname, 'object.'+keyword - - if avoid_fuzzy: - return - - # find fuzzy matches - s = difflib.SequenceMatcher() - s.set_seq2(keyword.lower()) - - def possibilities(): - for title, (fn, desc, _, _) in self.modules.iteritems(): - yield ('module', fn, 'module-'+title, desc) - for title, (fn, desctype) in self.descrefs.iteritems(): - yield (desctype, fn, title, '') - - def dotsearch(string): - parts = string.lower().split('.') - for idx in xrange(0, len(parts)): - yield '.'.join(parts[idx:]) - - result = [] - for type, docname, title, desc in possibilities(): - best_res = 0 - for part in dotsearch(title): - s.set_seq1(part) - if s.real_quick_ratio() >= cutoff and \ - s.quick_ratio() >= cutoff and \ - s.ratio() >= cutoff and \ - s.ratio() > best_res: - best_res = s.ratio() - if best_res: - result.append((best_res, type, docname, title, desc)) - - return heapq.nlargest(n, result) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/__init__.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext - ~~~~~~~~~~ - - Contains Sphinx features not activated by default. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/autodoc.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/autodoc.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,655 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.autodoc - ~~~~~~~~~~~~~~~~~~ - - Automatically insert docstrings for functions, classes or whole modules into - the doctree, thus avoiding duplication between docstrings and documentation - for those who like elaborate docstrings. - - :copyright: 2008 by Georg Brandl, Pauli Virtanen, Martin Hans. - :license: BSD. -""" - -import re -import sys -import types -import inspect -import linecache -from types import FunctionType, BuiltinMethodType, MethodType - -from docutils import nodes -from docutils.parsers.rst import directives -from docutils.statemachine import ViewList - -from sphinx.util import rpartition, nested_parse_with_titles -from sphinx.directives.desc import py_sig_re - -try: - base_exception = BaseException -except NameError: - base_exception = Exception - -_charset_re = re.compile(r'coding[:=]\s*([-\w.]+)') -_module_charsets = {} - - -class Options(object): - pass - - -def is_static_method(obj): - """Check if the object given is a static method.""" - if isinstance(obj, (FunctionType, classmethod)): - return True - elif isinstance(obj, BuiltinMethodType): - return obj.__self__ is not None - elif isinstance(obj, MethodType): - return obj.im_self is not None - return False - - -class AutodocReporter(object): - """ - A reporter replacement that assigns the correct source name - and line number to a system message, as recorded in a ViewList. - """ - def __init__(self, viewlist, reporter): - self.viewlist = viewlist - self.reporter = reporter - - def __getattr__(self, name): - return getattr(self.reporter, name) - - def system_message(self, level, message, *children, **kwargs): - if 'line' in kwargs: - try: - source, line = self.viewlist.items[kwargs['line']] - except IndexError: - pass - else: - kwargs['source'] = source - kwargs['line'] = line - return self.reporter.system_message(level, message, - *children, **kwargs) - - def debug(self, *args, **kwargs): - if self.reporter.debug_flag: - return self.system_message(0, *args, **kwargs) - - def info(self, *args, **kwargs): - return self.system_message(1, *args, **kwargs) - - def warning(self, *args, **kwargs): - return self.system_message(2, *args, **kwargs) - - def error(self, *args, **kwargs): - return self.system_message(3, *args, **kwargs) - - def severe(self, *args, **kwargs): - return self.system_message(4, *args, **kwargs) - - -# Some useful event listener factories for autodoc-process-docstring. - -def cut_lines(pre, post=0, what=None): - """ - Return a listener that removes the first *pre* and last *post* - lines of every docstring. If *what* is a sequence of strings, - only docstrings of a type in *what* will be processed. - - Use like this (e.g. in the ``setup()`` function of :file:`conf.py`):: - - from sphinx.ext.autodoc import cut_lines - app.connect('autodoc-process-docstring', cut_lines(4, what=['module'])) - - This can (and should) be used in place of :confval:`automodule_skip_lines`. - """ - def process(app, what_, name, obj, options, lines): - if what and what_ not in what: - return - del lines[:pre] - if post: - # remove one trailing blank line. - if lines and not lines[-1]: - lines.pop(-1) - del lines[-post:] - # make sure there is a blank line at the end - if lines and lines[-1]: - lines.append('') - return process - -def between(marker, what=None, keepempty=False): - """ - Return a listener that only keeps lines between lines that match the - *marker* regular expression. If no line matches, the resulting docstring - would be empty, so no change will be made unless *keepempty* is true. - - If *what* is a sequence of strings, only docstrings of a type in *what* will - be processed. - """ - marker_re = re.compile(marker) - def process(app, what_, name, obj, options, lines): - if what and what_ not in what: - return - deleted = 0 - delete = True - orig_lines = lines[:] - for i, line in enumerate(orig_lines): - if delete: - lines.pop(i - deleted) - deleted += 1 - if marker_re.match(line): - delete = not delete - if delete: - lines.pop(i - deleted) - deleted += 1 - if not lines and not keepempty: - lines[:] = orig_lines - # make sure there is a blank line at the end - if lines and lines[-1]: - lines.append('') - return process - - -def isdescriptor(x): - """Check if the object is some kind of descriptor.""" - for item in '__get__', '__set__', '__delete__': - if callable(getattr(x, item, None)): - return True - return False - - -def prepare_docstring(s): - """ - Convert a docstring into lines of parseable reST. Return it as a list of - lines usable for inserting into a docutils ViewList (used as argument - of nested_parse().) An empty line is added to act as a separator between - this docstring and following content. - """ - lines = s.expandtabs().splitlines() - # Find minimum indentation of any non-blank lines after first line. - margin = sys.maxint - for line in lines[1:]: - content = len(line.lstrip()) - if content: - indent = len(line) - content - margin = min(margin, indent) - # Remove indentation. - if lines: - lines[0] = lines[0].lstrip() - if margin < sys.maxint: - for i in range(1, len(lines)): lines[i] = lines[i][margin:] - # Remove any leading blank lines. - while lines and not lines[0]: - lines.pop(0) - # make sure there is an empty line at the end - if lines and lines[-1]: - lines.append('') - return lines - - -def get_module_charset(module): - """Return the charset of the given module (cached in _module_charsets).""" - if module in _module_charsets: - return _module_charsets[module] - try: - filename = __import__(module, None, None, ['foo']).__file__ - except (ImportError, AttributeError): - return None - if filename[-4:].lower() in ('.pyc', '.pyo'): - filename = filename[:-1] - for line in [linecache.getline(filename, x) for x in (1, 2)]: - match = _charset_re.search(line) - if match is not None: - charset = match.group(1) - break - else: - charset = 'ascii' - _module_charsets[module] = charset - return charset - - -class RstGenerator(object): - def __init__(self, options, document, lineno): - self.options = options - self.env = document.settings.env - self.reporter = document.reporter - self.lineno = lineno - self.filename_set = set() - self.warnings = [] - self.result = ViewList() - - def warn(self, msg): - self.warnings.append(self.reporter.warning(msg, line=self.lineno)) - - def get_doc(self, what, name, obj): - """Format and yield lines of the docstring(s) for the object.""" - docstrings = [] - if getattr(obj, '__doc__', None): - docstrings.append(obj.__doc__) - # skip some lines in module docstrings if configured - if what == 'module' and self.env.config.automodule_skip_lines and docstrings: - docstrings[0] = '\n'.join(docstrings[0].splitlines() - [self.env.config.automodule_skip_lines:]) - # for classes, what the "docstring" is can be controlled via an option - if what in ('class', 'exception'): - content = self.env.config.autoclass_content - if content in ('both', 'init'): - initdocstring = getattr(obj, '__init__', None).__doc__ - # for new-style classes, no __init__ means default __init__ - if initdocstring == object.__init__.__doc__: - initdocstring = None - if initdocstring: - if content == 'init': - docstrings = [initdocstring] - else: - docstrings.append(initdocstring) - # the default is only the class docstring - - # decode the docstrings using the module's source encoding - charset = None - module = getattr(obj, '__module__', None) - if module is not None: - charset = get_module_charset(module) - - for docstring in docstrings: - if isinstance(docstring, str): - if charset: - docstring = docstring.decode(charset) - else: - try: - # try decoding with utf-8, should only work for real UTF-8 - docstring = docstring.decode('utf-8') - except UnicodeError: - # last resort -- can't fail - docstring = docstring.decode('latin1') - docstringlines = prepare_docstring(docstring) - if self.env.app: - # let extensions preprocess docstrings - self.env.app.emit('autodoc-process-docstring', - what, name, obj, self.options, docstringlines) - for line in docstringlines: - yield line - - def resolve_name(self, what, name): - """ - Determine what module to import and what attribute to document. - - Returns a tuple of: the full name, the module name, a path of - names to get via getattr, the signature and return annotation. - """ - # first, parse the definition -- auto directives for classes and functions - # can contain a signature which is then used instead of an autogenerated one - try: - path, base, args, retann = py_sig_re.match(name).groups() - except: - self.warn('invalid signature for auto%s (%r)' % (what, name)) - return - # fullname is the fully qualified name, base the name after the last dot - fullname = (path or '') + base - - if what == 'module': - if args or retann: - self.warn('ignoring signature arguments and return annotation ' - 'for automodule %s' % fullname) - return fullname, fullname, [], None, None - - elif what in ('class', 'exception', 'function'): - if path: - mod = path.rstrip('.') - else: - mod = None - # if documenting a toplevel object without explicit module, it can - # be contained in another auto directive ... - if hasattr(self.env, 'autodoc_current_module'): - mod = self.env.autodoc_current_module - # ... or in the scope of a module directive - if not mod: - mod = self.env.currmodule - return fullname, mod, [base], args, retann - - else: - if path: - mod_cls = path.rstrip('.') - else: - mod_cls = None - # if documenting a class-level object without path, there must be a - # current class, either from a parent auto directive ... - if hasattr(self.env, 'autodoc_current_class'): - mod_cls = self.env.autodoc_current_class - # ... or from a class directive - if mod_cls is None: - mod_cls = self.env.currclass - # ... if still None, there's no way to know - if mod_cls is None: - return fullname, None, [], args, retann - mod, cls = rpartition(mod_cls, '.') - # if the module name is still missing, get it like above - if not mod and hasattr(self.env, 'autodoc_current_module'): - mod = self.env.autodoc_current_module - if not mod: - mod = self.env.currmodule - return fullname, mod, [cls, base], args, retann - - def format_signature(self, what, name, obj, args, retann): - """ - Return the signature of the object, formatted for display. - """ - if what not in ('class', 'method', 'function'): - return '' - - err = None - if args is not None: - # signature given explicitly - args = "(%s)" % args - else: - # try to introspect the signature - try: - args = None - getargs = True - if what == 'class': - # for classes, the relevant signature is the __init__ method's - obj = getattr(obj, '__init__', None) - # classes without __init__ method, default __init__ or - # __init__ written in C? - if obj is None or obj is object.__init__ or not \ - (inspect.ismethod(obj) or inspect.isfunction(obj)): - getargs = False - elif inspect.isbuiltin(obj) or inspect.ismethoddescriptor(obj): - # can never get arguments of a C function or method - getargs = False - if getargs: - argspec = inspect.getargspec(obj) - if what in ('class', 'method') and argspec[0] and \ - argspec[0][0] in ('cls', 'self'): - del argspec[0][0] - args = inspect.formatargspec(*argspec) - except Exception, e: - args = None - err = e - - result = self.env.app.emit_firstresult('autodoc-process-signature', what, - name, obj, self.options, args, retann) - if result: - args, retann = result - - if args is not None: - return '%s%s' % (args, retann or '') - elif err: - # re-raise the error for perusal of the handler in generate() - raise RuntimeError(err) - else: - return '' - - def generate(self, what, name, members, add_content, indent=u'', check_module=False): - """ - Generate reST for the object in self.result. - """ - fullname, mod, objpath, args, retann = self.resolve_name(what, name) - if not mod: - # need a module to import - self.warn('don\'t know which module to import for autodocumenting %r ' - '(try placing a "module" or "currentmodule" directive in the ' - 'document, or giving an explicit module name)' % fullname) - return - - # the name to put into the generated directive -- doesn't contain the module - name_in_directive = '.'.join(objpath) or mod - - # now, import the module and get object to document - try: - todoc = module = __import__(mod, None, None, ['foo']) - if hasattr(module, '__file__') and module.__file__: - modfile = module.__file__ - if modfile[-4:].lower() in ('.pyc', '.pyo'): - modfile = modfile[:-1] - self.filename_set.add(modfile) - else: - modfile = None # e.g. for builtin and C modules - for part in objpath: - todoc = getattr(todoc, part) - except (ImportError, AttributeError), err: - self.warn('autodoc can\'t import/find %s %r, it reported error: "%s", ' - 'please check your spelling and sys.path' % - (what, str(fullname), err)) - return - - # check __module__ of object if wanted (for members not given explicitly) - if check_module: - if hasattr(todoc, '__module__'): - if todoc.__module__ != mod: - return - - # format the object's signature, if any - try: - sig = self.format_signature(what, name, todoc, args, retann) - except Exception, err: - self.warn('error while formatting signature for %s: %s' % - (fullname, err)) - sig = '' - - # make sure that the result starts with an empty line. This is - # necessary for some situations where another directive preprocesses - # reST and no starting newline is present - self.result.append(u'', '') - - # now, create the directive header - directive = (what == 'method' and is_static_method(todoc)) \ - and 'staticmethod' or what - self.result.append(indent + u'.. %s:: %s%s' % - (directive, name_in_directive, sig), '') - if what == 'module': - # Add some module-specific options - if self.options.synopsis: - self.result.append(indent + u' :synopsis: ' + self.options.synopsis, - '') - if self.options.platform: - self.result.append(indent + u' :platform: ' + self.options.platform, - '') - if self.options.deprecated: - self.result.append(indent + u' :deprecated:', '') - else: - # Be explicit about the module, this is necessary since .. class:: doesn't - # support a prepended module name - self.result.append(indent + u' :module: %s' % mod, '') - if self.options.noindex: - self.result.append(indent + u' :noindex:', '') - self.result.append(u'', '') - - if self.options.show_inheritance and what in ('class', 'exception'): - if len(todoc.__bases__): - bases = [b.__module__ == '__builtin__' and - u':class:`%s`' % b.__name__ or - u':class:`%s.%s`' % (b.__module__, b.__name__) - for b in todoc.__bases__] - self.result.append(indent + u' Bases: %s' % ', '.join(bases), - '') - self.result.append(u'', '') - - # the module directive doesn't have content - if what != 'module': - indent += u' ' - - if modfile: - sourcename = '%s:docstring of %s' % (modfile, fullname) - else: - sourcename = 'docstring of %s' % fullname - - # add content from docstrings - for i, line in enumerate(self.get_doc(what, fullname, todoc)): - self.result.append(indent + line, sourcename, i) - - # add source content, if present - if add_content: - for line, src in zip(add_content.data, add_content.items): - self.result.append(indent + line, src[0], src[1]) - - # document members? - if not members or what in ('function', 'method', 'attribute'): - return - - # set current namespace for finding members - self.env.autodoc_current_module = mod - if objpath: - self.env.autodoc_current_class = objpath[0] - - # add members, if possible - _all = members == ['__all__'] - members_check_module = False - if _all: - # unqualified :members: given - if what == 'module': - if hasattr(todoc, '__all__'): - members_check_module = False - all_members = [] - for mname in todoc.__all__: - try: - all_members.append((mname, getattr(todoc, mname))) - except AttributeError: - self.warn('missing attribute mentioned in __all__: ' - 'module %s, attribute %s' % - (todoc.__name__, mname)) - else: - # for implicit module members, check __module__ to avoid - # documenting imported objects - members_check_module = True - all_members = inspect.getmembers(todoc) - else: - if self.options.inherited_members: - # getmembers() uses dir() which pulls in members from all - # base classes - all_members = inspect.getmembers(todoc) - else: - # __dict__ contains only the members directly defined in the class - all_members = sorted(todoc.__dict__.iteritems()) - else: - all_members = [(mname, getattr(todoc, mname)) for mname in members] - for (membername, member) in all_members: - # ignore members whose name starts with _ by default - if _all and membername.startswith('_'): - continue - - # ignore undocumented members if :undoc-members: is not given - doc = getattr(member, '__doc__', None) - skip = not self.options.undoc_members and not doc - # give the user a chance to decide whether this member should be skipped - if self.env.app: - # let extensions preprocess docstrings - skip_user = self.env.app.emit_firstresult( - 'autodoc-skip-member', what, membername, member, skip, self.options) - if skip_user is not None: - skip = skip_user - if skip: - continue - - if what == 'module': - if isinstance(member, (types.FunctionType, - types.BuiltinFunctionType)): - memberwhat = 'function' - elif isinstance(member, types.ClassType) or \ - isinstance(member, type): - if issubclass(member, base_exception): - memberwhat = 'exception' - else: - memberwhat = 'class' - else: - # XXX: todo -- attribute docs - continue - else: - if callable(member): - memberwhat = 'method' - elif isdescriptor(member): - memberwhat = 'attribute' - else: - # XXX: todo -- attribute docs - continue - full_membername = fullname + '.' + membername - self.generate(memberwhat, full_membername, ['__all__'], None, indent, - check_module=members_check_module) - - self.env.autodoc_current_module = None - self.env.autodoc_current_class = None - - -def _auto_directive(dirname, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - what = dirname[4:] # strip "auto" - name = arguments[0] - genopt = Options() - members = options.get('members', []) - genopt.inherited_members = 'inherited-members' in options - if genopt.inherited_members and not members: - # :inherited-members: implies :members: - members = ['__all__'] - genopt.undoc_members = 'undoc-members' in options - genopt.show_inheritance = 'show-inheritance' in options - genopt.noindex = 'noindex' in options - genopt.synopsis = options.get('synopsis', '') - genopt.platform = options.get('platform', '') - genopt.deprecated = 'deprecated' in options - - generator = RstGenerator(genopt, state.document, lineno) - generator.generate(what, name, members, content) - if not generator.result: - return generator.warnings - - # record all filenames as dependencies -- this will at least partially make - # automatic invalidation possible - for fn in generator.filename_set: - state.document.settings.env.note_dependency(fn) - - # use a custom reporter that correctly assigns lines to source and lineno - old_reporter = state.memo.reporter - state.memo.reporter = AutodocReporter(generator.result, state.memo.reporter) - if dirname == 'automodule': - node = nodes.section() - nested_parse_with_titles(state, generator.result, node) - else: - node = nodes.paragraph() - state.nested_parse(generator.result, 0, node) - state.memo.reporter = old_reporter - return generator.warnings + node.children - -def auto_directive(*args, **kwds): - return _auto_directive(*args, **kwds) - -def automodule_directive(*args, **kwds): - return _auto_directive(*args, **kwds) - -def autoclass_directive(*args, **kwds): - return _auto_directive(*args, **kwds) - - -def members_option(arg): - if arg is None: - return ['__all__'] - return [x.strip() for x in arg.split(',')] - - -def setup(app): - mod_options = {'members': members_option, 'undoc-members': directives.flag, - 'noindex': directives.flag, 'inherited-members': directives.flag, - 'show-inheritance': directives.flag, 'synopsis': lambda x: x, - 'platform': lambda x: x, 'deprecated': directives.flag} - cls_options = {'members': members_option, 'undoc-members': directives.flag, - 'noindex': directives.flag, 'inherited-members': directives.flag, - 'show-inheritance': directives.flag} - app.add_directive('automodule', automodule_directive, - 1, (1, 0, 1), **mod_options) - app.add_directive('autoclass', autoclass_directive, - 1, (1, 0, 1), **cls_options) - app.add_directive('autoexception', autoclass_directive, - 1, (1, 0, 1), **cls_options) - app.add_directive('autofunction', auto_directive, 1, (1, 0, 1), - noindex=directives.flag) - app.add_directive('automethod', auto_directive, 1, (1, 0, 1), - noindex=directives.flag) - app.add_directive('autoattribute', auto_directive, 1, (1, 0, 1), - noindex=directives.flag) - # deprecated: remove in some future version. - app.add_config_value('automodule_skip_lines', 0, True) - app.add_config_value('autoclass_content', 'class', True) - app.add_event('autodoc-process-docstring') - app.add_event('autodoc-process-signature') - app.add_event('autodoc-skip-member') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/coverage.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/coverage.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,241 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.coverage - ~~~~~~~~~~~~~~~~~~~ - - Check Python modules and C API for coverage. Mostly written by Josip - Dzolonga for the Google Highly Open Participation contest. - - :copyright: 2008 by Josip Dzolonga, Georg Brandl. - :license: BSD. -""" - -import re -import glob -import inspect -import cPickle as pickle -from os import path - -from sphinx.builder import Builder - - -# utility -def write_header(f, text, char='-'): - f.write(text + '\n') - f.write(char * len(text) + '\n') - -def compile_regex_list(name, exps, warnfunc): - lst = [] - for exp in exps: - try: - lst.append(re.compile(exp)) - except Exception: - warnfunc('invalid regex %r in %s' % (exp, name)) - return lst - - -class CoverageBuilder(Builder): - - name = 'coverage' - - def init(self): - self.c_sourcefiles = [] - for pattern in self.config.coverage_c_path: - pattern = path.join(self.srcdir, pattern) - self.c_sourcefiles.extend(glob.glob(pattern)) - - self.c_regexes = [] - for (name, exp) in self.config.coverage_c_regexes.items(): - try: - self.c_regexes.append((name, re.compile(exp))) - except Exception: - self.warn('invalid regex %r in coverage_c_regexes' % exp) - - self.c_ignorexps = {} - for (name, exps) in self.config.coverage_ignore_c_items.iteritems(): - self.c_ignorexps[name] = compile_regex_list('coverage_ignore_c_items', - exps, self.warn) - self.mod_ignorexps = compile_regex_list('coverage_ignore_modules', - self.config.coverage_ignore_modules, - self.warn) - self.cls_ignorexps = compile_regex_list('coverage_ignore_classes', - self.config.coverage_ignore_classes, - self.warn) - self.fun_ignorexps = compile_regex_list('coverage_ignore_functions', - self.config.coverage_ignore_functions, - self.warn) - - def get_outdated_docs(self): - return 'coverage overview' - - def write(self, *ignored): - self.py_undoc = {} - self.build_py_coverage() - self.write_py_coverage() - - self.c_undoc = {} - self.build_c_coverage() - self.write_c_coverage() - - def build_c_coverage(self): - # Fetch all the info from the header files - for filename in self.c_sourcefiles: - undoc = [] - f = open(filename, 'r') - try: - for line in f: - for key, regex in self.c_regexes: - match = regex.match(line) - if match: - name = match.groups()[0] - if name not in self.env.descrefs: - for exp in self.c_ignorexps.get(key, ()): - if exp.match(name): - break - else: - undoc.append((key, name)) - continue - finally: - f.close() - if undoc: - self.c_undoc[filename] = undoc - - def write_c_coverage(self): - output_file = path.join(self.outdir, 'c.txt') - op = open(output_file, 'w') - try: - write_header(op, 'Undocumented C API elements', '=') - op.write('\n') - - for filename, undoc in self.c_undoc.iteritems(): - write_header(op, filename) - for typ, name in undoc: - op.write(' * %-50s [%9s]\n' % (name, typ)) - op.write('\n') - finally: - op.close() - - def build_py_coverage(self): - for mod_name in self.env.modules: - ignore = False - for exp in self.mod_ignorexps: - if exp.match(mod_name): - ignore = True - break - if ignore: - continue - - try: - mod = __import__(mod_name, fromlist=['foo']) - except ImportError, err: - self.warn('module %s could not be imported: %s' % (mod_name, err)) - self.py_undoc[mod_name] = {'error': err} - continue - - funcs = [] - classes = {} - - for name, obj in inspect.getmembers(mod): - # diverse module attributes are ignored: - if name[0] == '_': - # begins in an underscore - continue - if not hasattr(obj, '__module__'): - # cannot be attributed to a module - continue - if obj.__module__ != mod_name: - # is not defined in this module - continue - - full_name = '%s.%s' % (mod_name, name) - - if inspect.isfunction(obj): - if full_name not in self.env.descrefs: - for exp in self.fun_ignorexps: - if exp.match(name): - break - else: - funcs.append(name) - elif inspect.isclass(obj): - for exp in self.cls_ignorexps: - if exp.match(name): - break - else: - if full_name not in self.env.descrefs: - # not documented at all - classes[name] = [] - continue - - attrs = [] - - for attr_name, attr in inspect.getmembers(obj, inspect.ismethod): - if attr_name[0] == '_': - # starts with an underscore, ignore it - continue - - full_attr_name = '%s.%s' % (full_name, attr_name) - if full_attr_name not in self.env.descrefs: - attrs.append(attr_name) - - if attrs: - # some attributes are undocumented - classes[name] = attrs - - self.py_undoc[mod_name] = {'funcs': funcs, 'classes': classes} - - def write_py_coverage(self): - output_file = path.join(self.outdir, 'python.txt') - op = open(output_file, 'w') - failed = [] - try: - write_header(op, 'Undocumented Python objects', '=') - - keys = self.py_undoc.keys() - keys.sort() - for name in keys: - undoc = self.py_undoc[name] - if 'error' in undoc: - failed.append((name, undoc['error'])) - else: - if not undoc['classes'] and not undoc['funcs']: - continue - - write_header(op, name) - if undoc['funcs']: - op.write('Functions:\n') - op.writelines(' * %s\n' % x for x in undoc['funcs']) - op.write('\n') - if undoc['classes']: - op.write('Classes:\n') - for name, methods in undoc['classes'].iteritems(): - if not methods: - op.write(' * %s\n' % name) - else: - op.write(' * %s -- missing methods:\n' % name) - op.writelines(' - %s\n' % x for x in methods) - op.write('\n') - - if failed: - write_header(op, 'Modules that failed to import') - op.writelines(' * %s -- %s\n' % x for x in failed) - finally: - op.close() - - def finish(self): - # dump the coverage data to a pickle file too - picklepath = path.join(self.outdir, 'undoc.pickle') - dumpfile = open(picklepath, 'wb') - try: - pickle.dump((self.py_undoc, self.c_undoc), dumpfile) - finally: - dumpfile.close() - - -def setup(app): - app.add_builder(CoverageBuilder) - app.add_config_value('coverage_ignore_modules', [], False) - app.add_config_value('coverage_ignore_functions', [], False) - app.add_config_value('coverage_ignore_classes', [], False) - app.add_config_value('coverage_c_path', [], False) - app.add_config_value('coverage_c_regexes', {}, False) - app.add_config_value('coverage_ignore_c_items', {}, False) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/doctest.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/doctest.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,337 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.doctest - ~~~~~~~~~~~~~~~~~~ - - Mimic doctest by automatically executing code snippets and checking - their results. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -import re -import sys -import time -import codecs -import StringIO -from os import path -# circumvent relative import -doctest = __import__('doctest') - -from docutils import nodes -from docutils.parsers.rst import directives - -from sphinx.builder import Builder -from sphinx.util.console import bold - -blankline_re = re.compile(r'^\s*', re.MULTILINE) -doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE) - -# set up the necessary directives - -def test_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - # use ordinary docutils nodes for test code: they get special attributes - # so that our builder recognizes them, and the other builders are happy. - code = '\n'.join(content) - test = None - if name == 'doctest': - if '' in code: - # convert s to ordinary blank lines for presentation - test = code - code = blankline_re.sub('', code) - if doctestopt_re.search(code): - if not test: - test = code - code = doctestopt_re.sub('', code) - nodetype = nodes.literal_block - if name == 'testsetup' or 'hide' in options: - nodetype = nodes.comment - if arguments: - groups = [x.strip() for x in arguments[0].split(',')] - else: - groups = ['default'] - node = nodetype(code, code, testnodetype=name, groups=groups) - node.line = lineno - if test is not None: - # only save if it differs from code - node['test'] = test - if name == 'testoutput': - # don't try to highlight output - node['language'] = 'none' - node['options'] = {} - if name in ('doctest', 'testoutput') and 'options' in options: - # parse doctest-like output comparison flags - option_strings = options['options'].replace(',', ' ').split() - for option in option_strings: - if (option[0] not in '+-' or option[1:] not in - doctest.OPTIONFLAGS_BY_NAME): - # XXX warn? - continue - flag = doctest.OPTIONFLAGS_BY_NAME[option[1:]] - node['options'][flag] = (option[0] == '+') - return [node] - -# need to have individual functions for each directive due to different -# options they accept - -def testsetup_directive(*args): - return test_directive(*args) - -def doctest_directive(*args): - return test_directive(*args) - -def testcode_directive(*args): - return test_directive(*args) - -def testoutput_directive(*args): - return test_directive(*args) - - -parser = doctest.DocTestParser() - -# helper classes - -class TestGroup(object): - def __init__(self, name): - self.name = name - self.setup = [] - self.tests = [] - - def add_code(self, code): - if code.type == 'testsetup': - self.setup.append(code) - elif code.type == 'doctest': - self.tests.append([code]) - elif code.type == 'testcode': - self.tests.append([code, None]) - elif code.type == 'testoutput': - if self.tests and len(self.tests[-1]) == 2: - self.tests[-1][1] = code - else: - raise RuntimeError('invalid TestCode type') - - def __repr__(self): - return 'TestGroup(name=%r, setup=%r, tests=%r)' % ( - self.name, self.setup, self.tests) - - -class TestCode(object): - def __init__(self, code, type, lineno, options=None): - self.code = code - self.type = type - self.lineno = lineno - self.options = options or {} - - def __repr__(self): - return 'TestCode(%r, %r, %r, options=%r)' % ( - self.code, self.type, self.lineno, self.options) - - -class SphinxDocTestRunner(doctest.DocTestRunner): - def summarize(self, out, verbose=None): - io = StringIO.StringIO() - old_stdout = sys.stdout - sys.stdout = io - try: - res = doctest.DocTestRunner.summarize(self, verbose) - finally: - sys.stdout = old_stdout - out(io.getvalue()) - return res - -# the new builder -- use sphinx-build.py -b doctest to run - -class DocTestBuilder(Builder): - """ - Runs test snippets in the documentation. - """ - name = 'doctest' - - def init(self): - # default options - self.opt = doctest.DONT_ACCEPT_TRUE_FOR_1 | doctest.ELLIPSIS | \ - doctest.IGNORE_EXCEPTION_DETAIL - - # HACK HACK HACK - # doctest compiles its snippets with type 'single'. That is nice - # for doctest examples but unusable for multi-statement code such - # as setup code -- to be able to use doctest error reporting with - # that code nevertheless, we monkey-patch the "compile" it uses. - doctest.compile = self.compile - - self.type = 'single' - - self.total_failures = 0 - self.total_tries = 0 - self.setup_failures = 0 - self.setup_tries = 0 - - date = time.strftime('%Y-%m-%d %H:%M:%S') - - self.outfile = codecs.open(path.join(self.outdir, 'output.txt'), - 'w', encoding='utf-8') - self.outfile.write('''\ -Results of doctest builder run on %s -==================================%s -''' % (date, '='*len(date))) - - def _out(self, text): - self.info(text, nonl=True) - self.outfile.write(text) - - def _warn_out(self, text): - self.info(text, nonl=True) - if self.app.quiet: - self.warn(text) - self.outfile.write(text) - - def get_target_uri(self, docname, typ=None): - return '' - - def get_outdated_docs(self): - return self.env.found_docs - - def finish(self): - # write executive summary - def s(v): - return v != 1 and 's' or '' - self._out(''' -Doctest summary -=============== -%5d test%s -%5d failure%s in tests -%5d failure%s in setup code -''' % (self.total_tries, s(self.total_tries), - self.total_failures, s(self.total_failures), - self.setup_failures, s(self.setup_failures))) - self.outfile.close() - - if self.total_failures or self.setup_failures: - self.app.statuscode = 1 - - sys.path[0:0] = self.config.doctest_path - - def write(self, build_docnames, updated_docnames, method='update'): - if build_docnames is None: - build_docnames = sorted(self.env.all_docs) - - self.info(bold('running tests...')) - for docname in build_docnames: - # no need to resolve the doctree - doctree = self.env.get_doctree(docname) - self.test_doc(docname, doctree) - - def test_doc(self, docname, doctree): - groups = {} - add_to_all_groups = [] - self.setup_runner = SphinxDocTestRunner(verbose=False, - optionflags=self.opt) - self.test_runner = SphinxDocTestRunner(verbose=False, - optionflags=self.opt) - if self.config.doctest_test_doctest_blocks: - def condition(node): - return (isinstance(node, (nodes.literal_block, nodes.comment)) - and node.has_key('testnodetype')) or \ - isinstance(node, nodes.doctest_block) - else: - def condition(node): - return isinstance(node, (nodes.literal_block, nodes.comment)) \ - and node.has_key('testnodetype') - for node in doctree.traverse(condition): - source = node.has_key('test') and node['test'] or node.astext() - if not source: - self.warn('no code/output in %s block at %s:%s' % - (node.get('testnodetype', 'doctest'), - self.env.doc2path(docname), node.line)) - code = TestCode(source, type=node.get('testnodetype', 'doctest'), - lineno=node.line, options=node.get('options')) - node_groups = node.get('groups', ['default']) - if '*' in node_groups: - add_to_all_groups.append(code) - continue - for groupname in node_groups: - if groupname not in groups: - groups[groupname] = TestGroup(groupname) - groups[groupname].add_code(code) - for code in add_to_all_groups: - for group in groups.itervalues(): - group.add_code(code) - if not groups: - return - - self._out('\nDocument: %s\n----------%s\n' % (docname, '-'*len(docname))) - for group in groups.itervalues(): - self.test_group(group, self.env.doc2path(docname, base=None)) - # Separately count results from setup code - res_f, res_t = self.setup_runner.summarize(self._out, verbose=False) - self.setup_failures += res_f - self.setup_tries += res_t - if self.test_runner.tries: - res_f, res_t = self.test_runner.summarize(self._out, verbose=True) - self.total_failures += res_f - self.total_tries += res_t - - def compile(self, code, name, type, flags, dont_inherit): - return compile(code, name, self.type, flags, dont_inherit) - - def test_group(self, group, filename): - ns = {} - examples = [] - for setup in group.setup: - examples.append(doctest.Example(setup.code, '', lineno=setup.lineno)) - if examples: - # simulate a doctest with the setup code - setup_doctest = doctest.DocTest(examples, {}, - '%s (setup code)' % group.name, - filename, 0, None) - setup_doctest.globs = ns - old_f = self.setup_runner.failures - self.type = 'exec' # the snippet may contain multiple statements - self.setup_runner.run(setup_doctest, out=self._warn_out, - clear_globs=False) - if self.setup_runner.failures > old_f: - # don't run the group - return - for code in group.tests: - if len(code) == 1: - test = parser.get_doctest(code[0].code, {}, - group.name, filename, code[0].lineno) - if not test.examples: - continue - for example in test.examples: - # apply directive's comparison options - new_opt = code[0].options.copy() - new_opt.update(example.options) - example.options = new_opt - self.type = 'single' # ordinary doctests - else: - output = code[1] and code[1].code or '' - options = code[1] and code[1].options or {} - # disable processing as it is not needed - options[doctest.DONT_ACCEPT_BLANKLINE] = True - example = doctest.Example(code[0].code, output, - lineno=code[0].lineno, - options=options) - test = doctest.DocTest([example], {}, group.name, - filename, code[0].lineno, None) - self.type = 'exec' # multiple statements again - # DocTest.__init__ copies the globs namespace, which we don't want - test.globs = ns - # also don't clear the globs namespace after running the doctest - self.test_runner.run(test, out=self._warn_out, clear_globs=False) - - -def setup(app): - app.add_directive('testsetup', testsetup_directive, 1, (0, 1, 1)) - app.add_directive('doctest', doctest_directive, 1, (0, 1, 1), - hide=directives.flag, options=directives.unchanged) - app.add_directive('testcode', testcode_directive, 1, (0, 1, 1), - hide=directives.flag) - app.add_directive('testoutput', testoutput_directive, 1, (0, 1, 1), - hide=directives.flag, options=directives.unchanged) - app.add_builder(DocTestBuilder) - # this config value adds to sys.path - app.add_config_value('doctest_path', [], False) - app.add_config_value('doctest_test_doctest_blocks', 'default', False) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/ifconfig.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/ifconfig.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.ifconfig - ~~~~~~~~~~~~~~~~~~~ - - Provides the ``ifconfig`` directive that allows to write documentation - that is included depending on configuration variables. - - Usage:: - - .. ifconfig:: releaselevel in ('alpha', 'beta', 'rc') - - This stuff is only included in the built docs for unstable versions. - - The argument for ``ifconfig`` is a plain Python expression, evaluated in the - namespace of the project configuration (that is, all variables from ``conf.py`` - are available.) - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -from docutils import nodes - - -class ifconfig(nodes.Element): pass - - -def ifconfig_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - node = ifconfig() - node.line = lineno - node['expr'] = arguments[0] - state.nested_parse(content, content_offset, node) - return [node] - - -def process_ifconfig_nodes(app, doctree, docname): - ns = app.config.__dict__.copy() - ns['builder'] = app.builder.name - for node in doctree.traverse(ifconfig): - try: - res = eval(node['expr'], ns) - except Exception, err: - # handle exceptions in a clean fashion - from traceback import format_exception_only - msg = ''.join(format_exception_only(err.__class__, err)) - newnode = doctree.reporter.error('Exception occured in ' - 'ifconfig expression: \n%s' % - msg, base_node=node) - node.replace_self(newnode) - else: - if not res: - node.replace_self([]) - else: - node.replace_self(node.children) - - -def setup(app): - app.add_node(ifconfig) - app.add_directive('ifconfig', ifconfig_directive, 1, (1, 0, 1)) - app.connect('doctree-resolved', process_ifconfig_nodes) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/intersphinx.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/intersphinx.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.intersphinx - ~~~~~~~~~~~~~~~~~~~~~~ - - Insert links to Python objects documented in remote Sphinx documentation. - - This works as follows: - - * Each Sphinx HTML build creates a file named "objects.inv" that contains - a mapping from Python identifiers to URIs relative to the HTML set's root. - - * Projects using the Intersphinx extension can specify links to such mapping - files in the `intersphinx_mapping` config value. The mapping will then be - used to resolve otherwise missing references to Python objects into links - to the other documentation. - - * By default, the mapping file is assumed to be at the same location as the - rest of the documentation; however, the location of the mapping file can - also be specified individually, e.g. if the docs should be buildable - without Internet access. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -import time -import urllib -import posixpath -from os import path - -from docutils import nodes - -from sphinx.builder import INVENTORY_FILENAME - - -def fetch_inventory(app, uri, inv): - """Fetch, parse and return an intersphinx inventory file.""" - invdata = {} - # both *uri* (base URI of the links to generate) and *inv* (actual - # location of the inventory file) can be local or remote URIs - localuri = uri.find('://') == -1 - try: - if inv.find('://') != -1: - f = urllib.urlopen(inv) - else: - f = open(path.join(app.srcdir, inv)) - except Exception, err: - app.warn('intersphinx inventory %r not fetchable due to ' - '%s: %s' % (inv, err.__class__, err)) - return - try: - line = f.next() - if line.rstrip() != '# Sphinx inventory version 1': - raise ValueError('unknown or unsupported inventory version') - line = f.next() - projname = line.rstrip()[11:].decode('utf-8') - line = f.next() - version = line.rstrip()[11:] - for line in f: - name, type, location = line.rstrip().split(None, 2) - if localuri: - location = path.join(uri, location) - else: - location = posixpath.join(uri, location) - invdata[name] = (type, projname, version, location) - f.close() - except Exception, err: - app.warn('intersphinx inventory %r not readable due to ' - '%s: %s' % (inv, err.__class__, err)) - else: - return invdata - - -def load_mappings(app): - """Load all intersphinx mappings into the environment.""" - now = int(time.time()) - cache_time = now - app.config.intersphinx_cache_limit * 86400 - env = app.builder.env - if not hasattr(env, 'intersphinx_cache'): - env.intersphinx_cache = {} - cache = env.intersphinx_cache - update = False - for uri, inv in app.config.intersphinx_mapping.iteritems(): - # we can safely assume that the uri<->inv mapping is not changed - # during partial rebuilds since a changed intersphinx_mapping - # setting will cause a full environment reread - if not inv: - inv = posixpath.join(uri, INVENTORY_FILENAME) - # decide whether the inventory must be read: always read local - # files; remote ones only if the cache time is expired - if '://' not in inv or uri not in cache \ - or cache[uri][0] < cache_time: - invdata = fetch_inventory(app, uri, inv) - cache[uri] = (now, invdata) - update = True - if update: - env.intersphinx_inventory = {} - for _, invdata in cache.itervalues(): - if invdata: - env.intersphinx_inventory.update(invdata) - - -def missing_reference(app, env, node, contnode): - """Attempt to resolve a missing reference via intersphinx references.""" - type = node['reftype'] - target = node['reftarget'] - if type == 'mod': - type, proj, version, uri = env.intersphinx_inventory.get(target, - ('','','','')) - if type != 'mod': - return None - target = 'module-' + target # for link anchor - else: - if target[-2:] == '()': - target = target[:-2] - target = target.rstrip(' *') - # special case: exceptions and object methods - if type == 'exc' and '.' not in target and \ - 'exceptions.' + target in env.intersphinx_inventory: - target = 'exceptions.' + target - elif type in ('func', 'meth') and '.' not in target and \ - 'object.' + target in env.intersphinx_inventory: - target = 'object.' + target - if target not in env.intersphinx_inventory: - return None - type, proj, version, uri = env.intersphinx_inventory[target] - print "Intersphinx hit:", target, uri - newnode = nodes.reference('', '') - newnode['refuri'] = uri + '#' + target - newnode['reftitle'] = '(in %s v%s)' % (proj, version) - newnode['class'] = 'external-xref' - newnode.append(contnode) - return newnode - - -def setup(app): - app.add_config_value('intersphinx_mapping', {}, True) - app.add_config_value('intersphinx_cache_limit', 5, False) - app.connect('missing-reference', missing_reference) - app.connect('builder-inited', load_mappings) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/jsmath.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/jsmath.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.jsmath - ~~~~~~~~~~~~~~~~~ - - Set up everything for use of JSMath to display math in HTML - via JavaScript. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -from docutils import nodes - -from sphinx.application import ExtensionError -from sphinx.ext.mathbase import setup as mathbase_setup - - -def html_visit_math(self, node): - self.body.append(self.starttag(node, 'span', '', CLASS='math')) - self.body.append(self.encode(node['latex']) + '') - raise nodes.SkipNode - -def html_visit_displaymath(self, node): - if node['nowrap']: - self.body.append(self.starttag(node, 'div', CLASS='math')) - self.body.append(node['latex']) - self.body.append('') - raise nodes.SkipNode - for i, part in enumerate(node['latex'].split('\n\n')): - part = self.encode(part) - if i == 0: - # necessary to e.g. set the id property correctly - if node['number']: - self.body.append('(%s)' % node['number']) - self.body.append(self.starttag(node, 'div', CLASS='math')) - else: - # but only once! - self.body.append('

    ') - if '&' in part or '\\\\' in part: - self.body.append('\\begin{split}' + part + '\\end{split}') - else: - self.body.append(part) - self.body.append('
    \n') - raise nodes.SkipNode - -def builder_inited(app): - if not app.config.jsmath_path: - raise ExtensionError('jsmath_path config value must be set for the ' - 'jsmath extension to work') - app.add_javascript(app.config.jsmath_path) - - -def setup(app): - mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None)) - app.add_config_value('jsmath_path', '', False) - app.connect('builder-inited', builder_inited) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/mathbase.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/mathbase.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.mathbase - ~~~~~~~~~~~~~~~~~~~ - - Set up math support in source files and LaTeX/text output. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -from docutils import nodes, utils -from docutils.parsers.rst import directives - - -class math(nodes.Inline, nodes.TextElement): - pass - -class displaymath(nodes.Part, nodes.Element): - pass - -class eqref(nodes.Inline, nodes.TextElement): - pass - - -def wrap_displaymath(math, label): - parts = math.split('\n\n') - ret = [] - for i, part in enumerate(parts): - if label is not None and i == 0: - ret.append('\\begin{split}%s\\end{split}' % part + - (label and '\\label{'+label+'}' or '')) - else: - ret.append('\\begin{split}%s\\end{split}\\notag' % part) - return '\\begin{gather}\n' + '\\\\'.join(ret) + '\n\\end{gather}' - - -def math_role(role, rawtext, text, lineno, inliner, options={}, content=[]): - latex = utils.unescape(text, restore_backslashes=True) - return [math(latex=latex)], [] - -def eq_role(role, rawtext, text, lineno, inliner, options={}, content=[]): - text = utils.unescape(text) - node = eqref('(?)', '(?)', target=text) - node['docname'] = inliner.document.settings.env.docname - return [node], [] - -def math_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - latex = '\n'.join(content) - if arguments and arguments[0]: - latex = arguments[0] + '\n\n' + latex - node = displaymath() - node['latex'] = latex - node['label'] = options.get('label', None) - node['nowrap'] = 'nowrap' in options - node['docname'] = state.document.settings.env.docname - ret = [node] - if node['label']: - tnode = nodes.target('', '', ids=['equation-' + node['label']]) - state.document.note_explicit_target(tnode) - ret.insert(0, tnode) - return ret - - -def latex_visit_math(self, node): - self.body.append('$' + node['latex'] + '$') - raise nodes.SkipNode - -def latex_visit_displaymath(self, node): - if node['nowrap']: - self.body.append(node['latex']) - else: - label = node['label'] and node['docname'] + '-' + node['label'] or None - self.body.append(wrap_displaymath(node['latex'], label)) - raise nodes.SkipNode - -def latex_visit_eqref(self, node): - self.body.append('\\eqref{%s-%s}' % (node['docname'], node['target'])) - raise nodes.SkipNode - - -def text_visit_math(self, node): - self.add_text(node['latex']) - raise nodes.SkipNode - -def text_visit_displaymath(self, node): - self.new_state() - self.add_text(node['latex']) - self.end_state() - raise nodes.SkipNode - -def text_visit_eqref(self, node): - self.add_text(node['target']) - raise nodes.SkipNode - - -def html_visit_eqref(self, node): - self.body.append('' % node['target']) - -def html_depart_eqref(self, node): - self.body.append('') - - -def number_equations(app, doctree, docname): - num = 0 - numbers = {} - for node in doctree.traverse(displaymath): - if node['label'] is not None: - num += 1 - node['number'] = num - numbers[node['label']] = num - else: - node['number'] = None - for node in doctree.traverse(eqref): - if node['target'] not in numbers: - continue - num = '(%d)' % numbers[node['target']] - node[0] = nodes.Text(num, num) - - -def setup(app, htmlinlinevisitors, htmldisplayvisitors): - app.add_node(math, - latex=(latex_visit_math, None), - text=(text_visit_math, None), - html=htmlinlinevisitors) - app.add_node(displaymath, - latex=(latex_visit_displaymath, None), - text=(text_visit_displaymath, None), - html=htmldisplayvisitors) - app.add_node(eqref, - latex=(latex_visit_eqref, None), - text=(text_visit_eqref, None), - html=(html_visit_eqref, html_depart_eqref)) - app.add_role('math', math_role) - app.add_role('eq', eq_role) - app.add_directive('math', math_directive, 1, (0, 1, 1), - label=directives.unchanged, nowrap=directives.flag) - app.connect('doctree-resolved', number_equations) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/pngmath.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/pngmath.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,224 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.pngmath - ~~~~~~~~~~~~~~~~~~ - - Render math in HTML via dvipng. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -import re -import shutil -import tempfile -import posixpath -from os import path, getcwd, chdir -from subprocess import Popen, PIPE -try: - from hashlib import sha1 as sha -except ImportError: - from sha import sha - -from docutils import nodes - -from sphinx.util import ensuredir -from sphinx.util.png import read_png_depth, write_png_depth -from sphinx.application import SphinxError -from sphinx.ext.mathbase import setup as mathbase_setup, wrap_displaymath - -class MathExtError(SphinxError): - category = 'Math extension error' - - -DOC_HEAD = r''' -\documentclass[12pt]{article} -\usepackage[utf8]{inputenc} -\usepackage{amsmath} -\usepackage{amsthm} -\usepackage{amssymb} -\usepackage{amsfonts} -\usepackage{bm} -\pagestyle{empty} -''' - -DOC_BODY = r''' -\begin{document} -%s -\end{document} -''' - -DOC_BODY_PREVIEW = r''' -\usepackage[active]{preview} -\begin{document} -\begin{preview} -%s -\end{preview} -\end{document} -''' - -depth_re = re.compile(r'\[\d+ depth=(-?\d+)\]') - -def render_math(self, math): - """ - Render the LaTeX math expression *math* using latex and dvipng. - - Return the filename relative to the built document and the "depth", - that is, the distance of image bottom and baseline in pixels, if the - option to use preview_latex is switched on. - - Error handling may seem strange, but follows a pattern: if LaTeX or - dvipng aren't available, only a warning is generated (since that enables - people on machines without these programs to at least build the rest - of the docs successfully). If the programs are there, however, they - may not fail since that indicates a problem in the math source. - """ - use_preview = self.builder.config.pngmath_use_preview - - shasum = "%s.png" % sha(math.encode('utf-8')).hexdigest() - relfn = posixpath.join(self.builder.imgpath, 'math', shasum) - outfn = path.join(self.builder.outdir, '_images', 'math', shasum) - if path.isfile(outfn): - depth = read_png_depth(outfn) - return relfn, depth - - latex = DOC_HEAD + self.builder.config.pngmath_latex_preamble - latex += (use_preview and DOC_BODY_PREVIEW or DOC_BODY) % math - if isinstance(latex, unicode): - latex = latex.encode('utf-8') - - # use only one tempdir per build -- the use of a directory is cleaner - # than using temporary files, since we can clean up everything at once - # just removing the whole directory (see cleanup_tempdir) - if not hasattr(self.builder, '_mathpng_tempdir'): - tempdir = self.builder._mathpng_tempdir = tempfile.mkdtemp() - else: - tempdir = self.builder._mathpng_tempdir - - tf = open(path.join(tempdir, 'math.tex'), 'w') - tf.write(latex) - tf.close() - - # build latex command; old versions of latex don't have the - # --output-directory option, so we have to manually chdir to the - # temp dir to run it. - ltx_args = [self.builder.config.pngmath_latex, '--interaction=nonstopmode'] - # add custom args from the config file - ltx_args.extend(self.builder.config.pngmath_latex_args) - ltx_args.append('math.tex') - - curdir = getcwd() - chdir(tempdir) - - try: - try: - p = Popen(ltx_args, stdout=PIPE, stderr=PIPE) - except OSError, err: - if err.errno != 2: # No such file or directory - raise - if not hasattr(self.builder, '_mathpng_warned_latex'): - self.builder.warn('LaTeX command %r cannot be run (needed for math ' - 'display), check the pngmath_latex setting' % - self.builder.config.pngmath_latex) - self.builder._mathpng_warned_latex = True - return relfn, None - finally: - chdir(curdir) - - stdout, stderr = p.communicate() - if p.returncode != 0: - raise MathExtError('latex exited with error:\n[stderr]\n%s\n[stdout]\n%s' - % (stderr, stdout)) - - ensuredir(path.dirname(outfn)) - # use some standard dvipng arguments - dvipng_args = [self.builder.config.pngmath_dvipng] - dvipng_args += ['-o', outfn, '-T', 'tight', '-z9'] - # add custom ones from config value - dvipng_args.extend(self.builder.config.pngmath_dvipng_args) - if use_preview: - dvipng_args.append('--depth') - # last, the input file name - dvipng_args.append(path.join(tempdir, 'math.dvi')) - try: - p = Popen(dvipng_args, stdout=PIPE, stderr=PIPE) - except OSError, err: - if err.errno != 2: # No such file or directory - raise - if not hasattr(self.builder, '_mathpng_warned_dvipng'): - self.builder.warn('dvipng command %r cannot be run (needed for math ' - 'display), check the pngmath_dvipng setting' % - self.builder.config.pngmath_dvipng) - self.builder._mathpng_warned_dvipng = True - return relfn, None - stdout, stderr = p.communicate() - if p.returncode != 0: - raise MathExtError('dvipng exited with error:\n[stderr]\n%s\n[stdout]\n%s' - % (stderr, stdout)) - depth = None - if use_preview: - for line in stdout.splitlines(): - m = depth_re.match(line) - if m: - depth = int(m.group(1)) - write_png_depth(outfn, depth) - break - - return relfn, depth - -def cleanup_tempdir(app, exc): - if exc: - return - if not hasattr(app.builder, '_mathpng_tempdir'): - return - try: - shutil.rmtree(app.builder._mathpng_tempdir) - except Exception: - pass - -def html_visit_math(self, node): - try: - fname, depth = render_math(self, '$'+node['latex']+'$') - except MathExtError, exc: - sm = nodes.system_message(str(exc), type='WARNING', level=2, - backrefs=[], source=node['latex']) - sm.walkabout(self) - self.builder.warn('display latex %r: ' % node['latex'] + str(exc)) - raise nodes.SkipNode - self.body.append('%s' % - (fname, self.encode(node['latex']).strip(), - depth and 'style="vertical-align: %dpx" ' % (-depth) or '')) - raise nodes.SkipNode - -def html_visit_displaymath(self, node): - if node['nowrap']: - latex = node['latex'] - else: - latex = wrap_displaymath(node['latex'], None) - try: - fname, depth = render_math(self, latex) - except MathExtError, exc: - sm = nodes.system_message(str(exc), type='WARNING', level=2, - backrefs=[], source=node['latex']) - sm.walkabout(self) - self.builder.warn('inline latex %r: ' % node['latex'] + str(exc)) - raise nodes.SkipNode - self.body.append(self.starttag(node, 'div', CLASS='math')) - self.body.append('

    ') - if node['number']: - self.body.append('(%s)' % node['number']) - self.body.append('%s\n' % - (fname, self.encode(node['latex']).strip())) - self.body.append('

    ') - raise nodes.SkipNode - - -def setup(app): - mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None)) - app.add_config_value('pngmath_dvipng', 'dvipng', False) - app.add_config_value('pngmath_latex', 'latex', False) - app.add_config_value('pngmath_use_preview', False, False) - app.add_config_value('pngmath_dvipng_args', ['-gamma 1.5', '-D 110'], False) - app.add_config_value('pngmath_latex_args', [], False) - app.add_config_value('pngmath_latex_preamble', '', False) - app.connect('build-finished', cleanup_tempdir) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/refcounting.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/refcounting.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.refcounting - ~~~~~~~~~~~~~~~~~~~~~~ - - Supports reference count annotations for C API functions. Based on - refcount.py and anno-api.py in the old Python documentation tools. - - Usage: Set the `refcount_file` config value to the path to the reference - count data file. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -from os import path -from docutils import nodes - -from sphinx import addnodes - - -# refcount annotation -class refcount(nodes.emphasis): pass - - -class RCEntry: - def __init__(self, name): - self.name = name - self.args = [] - self.result_type = '' - self.result_refs = None - - -class Refcounts(dict): - @classmethod - def fromfile(cls, filename): - d = cls() - fp = open(filename, 'r') - try: - for line in fp: - line = line.strip() - if line[:1] in ("", "#"): - # blank lines and comments - continue - parts = line.split(":", 4) - if len(parts) != 5: - raise ValueError("Wrong field count in %r" % line) - function, type, arg, refcount, comment = parts - # Get the entry, creating it if needed: - try: - entry = d[function] - except KeyError: - entry = d[function] = RCEntry(function) - if not refcount or refcount == "null": - refcount = None - else: - refcount = int(refcount) - # Update the entry with the new parameter or the result information. - if arg: - entry.args.append((arg, type, refcount)) - else: - entry.result_type = type - entry.result_refs = refcount - finally: - fp.close() - return d - - def add_refcount_annotations(self, app, doctree): - for node in doctree.traverse(addnodes.desc_content): - par = node.parent - if par['desctype'] != 'cfunction': - continue - if not par[0].has_key('names') or not par[0]['names']: - continue - entry = self.get(par[0]['names'][0]) - if not entry: - continue - elif entry.result_type not in ("PyObject*", "PyVarObject*"): - continue - rc = 'Return value: ' - if entry.result_refs is None: - rc += "Always NULL." - else: - rc += (entry.result_refs and "New" or "Borrowed") + " reference." - node.insert(0, refcount(rc, rc)) - - -def init_refcounts(app): - if app.config.refcount_file: - refcounts = Refcounts.fromfile( - path.join(app.srcdir, app.config.refcount_file)) - app.connect('doctree-read', refcounts.add_refcount_annotations) - - -def setup(app): - app.add_node(refcount) - app.add_config_value('refcount_file', '', True) - app.connect('builder-inited', init_refcounts) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/todo.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/ext/todo.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.todo - ~~~~~~~~~~~~~~~ - - Allow todos to be inserted into your documentation. Inclusion of todos can - be switched of by a configuration variable. The todolist directive collects - all todos of your project and lists them along with a backlink to the - original location. - - :copyright: 2008 Daniel Bültmann. - :license: BSD. -""" - -from docutils import nodes - -from sphinx.util.compat import make_admonition - -class todo_node(nodes.Admonition, nodes.Element): pass -class todolist(nodes.General, nodes.Element): pass - - -def todo_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - env = state.document.settings.env - - targetid = "todo-%s" % env.index_num - env.index_num += 1 - targetnode = nodes.target('', '', ids=[targetid]) - - ad = make_admonition(todo_node, name, [_('Todo')], options, content, lineno, - content_offset, block_text, state, state_machine) - - # Attach a list of all todos to the environment, - # the todolist works with the collected todo nodes - if not hasattr(env, 'todo_all_todos'): - env.todo_all_todos = [] - env.todo_all_todos.append({ - 'docname': env.docname, - 'lineno': lineno, - 'todo': ad[0].deepcopy(), - 'target': targetnode, - }) - - return [targetnode] + ad - - -def todolist_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - # Simply insert an empty todolist node which will be replaced later - # when process_todo_nodes is called - return [todolist('')] - - -def process_todo_nodes(app, doctree, fromdocname): - if not app.config['todo_include_todos']: - for node in doctree.traverse(todo_node): - node.parent.remove(node) - - # Replace all todolist nodes with a list of the collected todos. - # Augment each todo with a backlink to the original location. - env = app.builder.env - - for node in doctree.traverse(todolist): - if not app.config['todo_include_todos']: - node.replace_self([]) - continue - - content = [] - - for todo_info in env.todo_all_todos: - para = nodes.paragraph() - filename = env.doc2path(todo_info['docname'], base=None) - description = ( - _('(The original entry is located in %s, line %d and can be found ') % - (filename, todo_info['lineno'])) - para += nodes.Text(description, description) - - # Create a reference - newnode = nodes.reference('', '') - innernode = nodes.emphasis(_('here'), _('here')) - newnode['refdocname'] = todo_info['docname'] - newnode['refuri'] = app.builder.get_relative_uri( - fromdocname, todo_info['docname']) - newnode['refuri'] += '#' + todo_info['target']['refid'] - newnode.append(innernode) - para += newnode - para += nodes.Text('.)', '.)') - - # Insert into the todolist - content.append(todo_info['todo']) - content.append(para) - - node.replace_self(content) - - -def purge_todos(app, env, docname): - if not hasattr(env, 'todo_all_todos'): - return - env.todo_all_todos = [todo for todo in env.todo_all_todos - if todo['docname'] != docname] - - -def visit_todo_node(self, node): - self.visit_admonition(node) - -def depart_todo_node(self, node): - self.depart_admonition(node) - -def setup(app): - app.add_config_value('todo_include_todos', False, False) - - app.add_node(todolist) - app.add_node(todo_node, - html=(visit_todo_node, depart_todo_node), - latex=(visit_todo_node, depart_todo_node), - text=(visit_todo_node, depart_todo_node)) - - app.add_directive('todo', todo_directive, 1, (0, 0, 1)) - app.add_directive('todolist', todolist_directive, 0, (0, 0, 0)) - app.connect('doctree-resolved', process_todo_nodes) - app.connect('env-purge-doc', purge_todos) - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/highlighting.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/highlighting.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.highlighting - ~~~~~~~~~~~~~~~~~~~ - - Highlight code blocks using Pygments. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import sys -import cgi -import re -import parser - -from sphinx.util.texescape import tex_hl_escape_map - -try: - import pygments - from pygments import highlight - from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \ - TextLexer, RstLexer - from pygments.lexers import get_lexer_by_name, guess_lexer - from pygments.formatters import HtmlFormatter, LatexFormatter - from pygments.filters import ErrorToken - from pygments.style import Style - from pygments.styles import get_style_by_name - from pygments.styles.friendly import FriendlyStyle - from pygments.token import Generic, Comment, Number -except ImportError: - pygments = None -else: - class SphinxStyle(Style): - """ - Like friendly, but a bit darker to enhance contrast on the green - background. - """ - - background_color = '#eeffcc' - default_style = '' - - styles = FriendlyStyle.styles - styles.update({ - Generic.Output: '#333', - Comment: 'italic #408090', - Number: '#208050', - }) - - lexers = dict( - none = TextLexer(), - python = PythonLexer(), - pycon = PythonConsoleLexer(), - # the python3 option exists as of Pygments 0.12, but it doesn't - # do any harm in previous versions - pycon3 = PythonConsoleLexer(python3=True), - rest = RstLexer(), - c = CLexer(), - ) - for _lexer in lexers.values(): - _lexer.add_filter('raiseonerror') - - -escape_hl_chars = {ord(u'@'): u'@PYGZat[]', - ord(u'['): u'@PYGZlb[]', - ord(u']'): u'@PYGZrb[]'} - -# used if Pygments is not available -_LATEX_STYLES = r''' -\newcommand\PYGZat{@} -\newcommand\PYGZlb{[} -\newcommand\PYGZrb{]} -''' - - -parsing_exceptions = (SyntaxError, UnicodeEncodeError) -if sys.version_info < (2, 5): - # Python <= 2.4 raises MemoryError when parsing an - # invalid encoding cookie - parsing_exceptions += MemoryError, - - -class PygmentsBridge(object): - def __init__(self, dest='html', stylename='sphinx'): - self.dest = dest - if not pygments: - return - if stylename == 'sphinx': - style = SphinxStyle - elif '.' in stylename: - module, stylename = stylename.rsplit('.', 1) - style = getattr(__import__(module, None, None, ['']), stylename) - else: - style = get_style_by_name(stylename) - self.hfmter = {False: HtmlFormatter(style=style), - True: HtmlFormatter(style=style, linenos=True)} - self.lfmter = {False: LatexFormatter(style=style, commandprefix='PYG'), - True: LatexFormatter(style=style, linenos=True, - commandprefix='PYG')} - - def unhighlighted(self, source): - if self.dest == 'html': - return '
    ' + cgi.escape(source) + '
    \n' - else: - # first, escape highlighting characters like Pygments does - source = source.translate(escape_hl_chars) - # then, escape all characters nonrepresentable in LaTeX - source = source.translate(tex_hl_escape_map) - return '\\begin{Verbatim}[commandchars=@\\[\\]]\n' + \ - source + '\\end{Verbatim}\n' - - def try_parse(self, src): - # Make sure it ends in a newline - src += '\n' - - # Replace "..." by a mark which is also a valid python expression - # (Note, the highlighter gets the original source, this is only done - # to allow "..." in code and still highlight it as Python code.) - mark = "__highlighting__ellipsis__" - src = src.replace("...", mark) - - # lines beginning with "..." are probably placeholders for suite - src = re.sub(r"(?m)^(\s*)" + mark + "(.)", r"\1"+ mark + r"# \2", src) - - # if we're using 2.5, use the with statement - if sys.version_info >= (2, 5): - src = 'from __future__ import with_statement\n' + src - - if isinstance(src, unicode): - # Non-ASCII chars will only occur in string literals - # and comments. If we wanted to give them to the parser - # correctly, we'd have to find out the correct source - # encoding. Since it may not even be given in a snippet, - # just replace all non-ASCII characters. - src = src.encode('ascii', 'replace') - - try: - parser.suite(src) - except parsing_exceptions: - return False - else: - return True - - def highlight_block(self, source, lang, linenos=False): - if not pygments: - return self.unhighlighted(source) - if lang in ('py', 'python'): - if source.startswith('>>>'): - # interactive session - lexer = lexers['pycon'] - else: - # maybe Python -- try parsing it - if self.try_parse(source): - lexer = lexers['python'] - else: - return self.unhighlighted(source) - elif lang in ('python3', 'py3') and source.startswith('>>>'): - # for py3, recognize interactive sessions, but do not try parsing... - lexer = lexers['pycon3'] - elif lang == 'guess': - try: - lexer = guess_lexer(source) - except Exception: - return self.unhighlighted(source) - else: - if lang in lexers: - lexer = lexers[lang] - else: - lexer = lexers[lang] = get_lexer_by_name(lang) - lexer.add_filter('raiseonerror') - try: - if self.dest == 'html': - return highlight(source, lexer, self.hfmter[bool(linenos)]) - else: - hlsource = highlight(source, lexer, self.lfmter[bool(linenos)]) - return hlsource.translate(tex_hl_escape_map) - except ErrorToken: - # this is most probably not the selected language, - # so let it pass unhighlighted - return self.unhighlighted(source) - - def get_stylesheet(self): - if not pygments: - if self.dest == 'latex': - return _LATEX_STYLES - # no HTML styles needed - return '' - if self.dest == 'html': - return self.hfmter[0].get_style_defs() - else: - styledefs = self.lfmter[0].get_style_defs() - # workaround for Pygments < 0.12 - if styledefs.startswith('\\newcommand\\at{@}'): - styledefs += _LATEX_STYLES - return styledefs diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/htmlhelp.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/htmlhelp.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.htmlhelp - ~~~~~~~~~~~~~~~ - - Build HTML help support files. - Adapted from the original Doc/tools/prechm.py. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import os -import cgi -from os import path - -from docutils import nodes - -from sphinx import addnodes - -# Project file (*.hhp) template. 'outname' is the file basename (like -# the pythlp in pythlp.hhp); 'version' is the doc version number (like -# the 2.2 in Python 2.2). -# The magical numbers in the long line under [WINDOWS] set most of the -# user-visible features (visible buttons, tabs, etc). -# About 0x10384e: This defines the buttons in the help viewer. The -# following defns are taken from htmlhelp.h. Not all possibilities -# actually work, and not all those that work are available from the Help -# Workshop GUI. In particular, the Zoom/Font button works and is not -# available from the GUI. The ones we're using are marked with 'x': -# -# 0x000002 Hide/Show x -# 0x000004 Back x -# 0x000008 Forward x -# 0x000010 Stop -# 0x000020 Refresh -# 0x000040 Home x -# 0x000080 Forward -# 0x000100 Back -# 0x000200 Notes -# 0x000400 Contents -# 0x000800 Locate x -# 0x001000 Options x -# 0x002000 Print x -# 0x004000 Index -# 0x008000 Search -# 0x010000 History -# 0x020000 Favorites -# 0x040000 Jump 1 -# 0x080000 Jump 2 -# 0x100000 Zoom/Font x -# 0x200000 TOC Next -# 0x400000 TOC Prev - -project_template = '''\ -[OPTIONS] -Binary TOC=Yes -Binary Index=No -Compiled file=%(outname)s.chm -Contents file=%(outname)s.hhc -Default Window=%(outname)s -Default topic=index.html -Display compile progress=No -Full text search stop list file=%(outname)s.stp -Full-text search=Yes -Index file=%(outname)s.hhk -Language=0x409 -Title=%(title)s - -[WINDOWS] -%(outname)s="%(title)s","%(outname)s.hhc","%(outname)s.hhk",\ -"index.html","index.html",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0 - -[FILES] -''' - -contents_header = '''\ - - - - - - - - - - -
      -''' - -contents_footer = '''\ -
    -''' - -object_sitemap = '''\ - - - - -''' - -# List of words the full text search facility shouldn't index. This -# becomes file outname.stp. Note that this list must be pretty small! -# Different versions of the MS docs claim the file has a maximum size of -# 256 or 512 bytes (including \r\n at the end of each line). -# Note that "and", "or", "not" and "near" are operators in the search -# language, so no point indexing them even if we wanted to. -stopwords = """ -a and are as at -be but by -for -if in into is it -near no not -of on or -such -that the their then there these they this to -was will with -""".split() - - -def build_hhx(builder, outdir, outname): - builder.info('dumping stopword list...') - f = open(path.join(outdir, outname+'.stp'), 'w') - try: - for word in sorted(stopwords): - print >>f, word - finally: - f.close() - - builder.info('writing project file...') - f = open(path.join(outdir, outname+'.hhp'), 'w') - try: - f.write(project_template % {'outname': outname, - 'title': builder.config.html_title, - 'version': builder.config.version, - 'project': builder.config.project}) - if not outdir.endswith(os.sep): - outdir += os.sep - olen = len(outdir) - for root, dirs, files in os.walk(outdir): - staticdir = (root == path.join(outdir, '_static')) - for fn in files: - if (staticdir and not fn.endswith('.js')) or fn.endswith('.html'): - print >>f, path.join(root, fn)[olen:].replace(os.sep, '\\') - finally: - f.close() - - builder.info('writing TOC file...') - f = open(path.join(outdir, outname+'.hhc'), 'w') - try: - f.write(contents_header) - # special books - f.write('
  • ' + object_sitemap % (builder.config.html_short_title, - 'index.html')) - if builder.config.html_use_modindex: - f.write('
  • ' + object_sitemap % (_('Global Module Index'), - 'modindex.html')) - # the TOC - tocdoc = builder.env.get_and_resolve_doctree(builder.config.master_doc, builder, - prune_toctrees=False) - def write_toc(node, ullevel=0): - if isinstance(node, nodes.list_item): - f.write('
  • ') - for subnode in node: - write_toc(subnode, ullevel) - elif isinstance(node, nodes.reference): - link = node['refuri'] - title = cgi.escape(node.astext()).replace('"','"') - item = object_sitemap % (title, link) - f.write(item.encode('ascii', 'xmlcharrefreplace')) - elif isinstance(node, nodes.bullet_list): - if ullevel != 0: - f.write('
      \n') - for subnode in node: - write_toc(subnode, ullevel+1) - if ullevel != 0: - f.write('
    \n') - elif isinstance(node, addnodes.compact_paragraph): - for subnode in node: - write_toc(subnode, ullevel) - istoctree = lambda node: isinstance(node, addnodes.compact_paragraph) and \ - node.has_key('toctree') - for node in tocdoc.traverse(istoctree): - write_toc(node) - f.write(contents_footer) - finally: - f.close() - - builder.info('writing index file...') - index = builder.env.create_index(builder) - f = open(path.join(outdir, outname+'.hhk'), 'w') - try: - f.write('
      \n') - def write_index(title, refs, subitems): - def write_param(name, value): - item = ' \n' % (name, value) - f.write(item.encode('ascii', 'xmlcharrefreplace')) - title = cgi.escape(title) - f.write('
    • \n') - write_param('Keyword', title) - if len(refs) == 0: - write_param('See Also', title) - elif len(refs) == 1: - write_param('Local', refs[0]) - else: - for i, ref in enumerate(refs): - write_param('Name', '[%d] %s' % (i, ref)) # XXX: better title? - write_param('Local', ref) - f.write('\n') - if subitems: - f.write('
        ') - for subitem in subitems: - write_index(subitem[0], subitem[1], []) - f.write('
      ') - for (key, group) in index: - for title, (refs, subitems) in group: - write_index(title, refs, subitems) - f.write('
    \n') - finally: - f.close() diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/htmlwriter.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/htmlwriter.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,457 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.htmlwriter - ~~~~~~~~~~~~~~~~~ - - docutils writers handling Sphinx' custom nodes. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import sys -import posixpath -import os - -from docutils import nodes -from docutils.writers.html4css1 import Writer, HTMLTranslator as BaseTranslator - -from sphinx.locale import admonitionlabels, versionlabels -from sphinx.highlighting import PygmentsBridge -from sphinx.util.smartypants import sphinx_smarty_pants - -try: - import Image # check for the Python Imaging Library -except ImportError: - Image = None - -class HTMLWriter(Writer): - def __init__(self, builder): - Writer.__init__(self) - self.builder = builder - - def translate(self): - # sadly, this is mostly copied from parent class - self.visitor = visitor = self.builder.translator_class(self.builder, - self.document) - self.document.walkabout(visitor) - self.output = visitor.astext() - for attr in ('head_prefix', 'stylesheet', 'head', 'body_prefix', - 'body_pre_docinfo', 'docinfo', 'body', 'fragment', - 'body_suffix', 'meta', 'title', 'subtitle', 'header', - 'footer', 'html_prolog', 'html_head', 'html_title', - 'html_subtitle', 'html_body', ): - setattr(self, attr, getattr(visitor, attr, None)) - self.clean_meta = ''.join(visitor.meta[2:]) - - -class HTMLTranslator(BaseTranslator): - """ - Our custom HTML translator. - """ - - def __init__(self, builder, *args, **kwds): - BaseTranslator.__init__(self, *args, **kwds) - self.highlighter = PygmentsBridge('html', builder.config.pygments_style) - self.no_smarty = 0 - self.builder = builder - self.highlightlang = builder.config.highlight_language - self.highlightlinenothreshold = sys.maxint - self.protect_literal_text = 0 - - def visit_desc(self, node): - self.body.append(self.starttag(node, 'dl', CLASS=node['desctype'])) - def depart_desc(self, node): - self.body.append('\n\n') - - def visit_desc_signature(self, node): - # the id is set automatically - self.body.append(self.starttag(node, 'dt')) - # anchor for per-desc interactive data - if node.parent['desctype'] != 'describe' and node['ids'] and node['first']: - self.body.append('' % node['ids'][0]) - if node.parent['desctype'] in ('class', 'exception'): - self.body.append('%s ' % node.parent['desctype']) - def depart_desc_signature(self, node): - if node['ids'] and self.builder.add_definition_links: - self.body.append(u'\u00B6' % - _('Permalink to this definition')) - self.body.append('\n') - - def visit_desc_addname(self, node): - self.body.append(self.starttag(node, 'tt', '', CLASS='descclassname')) - def depart_desc_addname(self, node): - self.body.append('') - - def visit_desc_type(self, node): - pass - def depart_desc_type(self, node): - pass - - def visit_desc_name(self, node): - self.body.append(self.starttag(node, 'tt', '', CLASS='descname')) - def depart_desc_name(self, node): - self.body.append('') - - def visit_desc_parameterlist(self, node): - self.body.append('(') - self.first_param = 1 - def depart_desc_parameterlist(self, node): - self.body.append(')') - - def visit_desc_parameter(self, node): - if not self.first_param: - self.body.append(', ') - else: - self.first_param = 0 - if not node.hasattr('noemph'): - self.body.append('') - def depart_desc_parameter(self, node): - if not node.hasattr('noemph'): - self.body.append('') - - def visit_desc_optional(self, node): - self.body.append('[') - def depart_desc_optional(self, node): - self.body.append(']') - - def visit_desc_annotation(self, node): - self.body.append(self.starttag(node, 'em', CLASS='property')) - def depart_desc_annotation(self, node): - self.body.append('') - - def visit_desc_content(self, node): - self.body.append(self.starttag(node, 'dd', '')) - def depart_desc_content(self, node): - self.body.append('') - - def visit_refcount(self, node): - self.body.append(self.starttag(node, 'em', '', CLASS='refcount')) - def depart_refcount(self, node): - self.body.append('') - - def visit_versionmodified(self, node): - self.body.append(self.starttag(node, 'p')) - text = versionlabels[node['type']] % node['version'] - if len(node): - text += ': ' - else: - text += '.' - self.body.append('%s' % text) - def depart_versionmodified(self, node): - self.body.append('

    \n') - - # overwritten - def visit_reference(self, node): - BaseTranslator.visit_reference(self, node) - if node.hasattr('reftitle'): - # ugly hack to add a title attribute - starttag = self.body[-1] - if not starttag.startswith(' tag - self.section_level += 1 - self.body.append(self.starttag(node, 'div', CLASS='section')) - - def visit_title(self, node): - # don't move the id attribute inside the tag - BaseTranslator.visit_title(self, node, move_ids=0) - - # overwritten - def visit_literal_block(self, node): - if node.rawsource != node.astext(): - # most probably a parsed-literal block -- don't highlight - return BaseTranslator.visit_literal_block(self, node) - lang = self.highlightlang - linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1 - if node.has_key('language'): - # code-block directives - lang = node['language'] - if node.has_key('linenos'): - linenos = node['linenos'] - highlighted = self.highlighter.highlight_block(node.rawsource, lang, linenos) - starttag = self.starttag(node, 'div', suffix='', CLASS='highlight-%s' % lang) - self.body.append(starttag + highlighted + '\n') - raise nodes.SkipNode - - def visit_doctest_block(self, node): - self.visit_literal_block(node) - - # overwritten - def visit_literal(self, node): - if len(node.children) == 1 and \ - node.children[0] in ('None', 'True', 'False'): - node['classes'].append('xref') - self.body.append(self.starttag(node, 'tt', '', CLASS='docutils literal')) - self.protect_literal_text += 1 - def depart_literal(self, node): - self.protect_literal_text -= 1 - self.body.append('') - - def visit_productionlist(self, node): - self.body.append(self.starttag(node, 'pre')) - names = [] - for production in node: - names.append(production['tokenname']) - maxlen = max(len(name) for name in names) - for production in node: - if production['tokenname']: - lastname = production['tokenname'].ljust(maxlen) - self.body.append(self.starttag(production, 'strong', '')) - self.body.append(lastname + ' ::= ') - else: - self.body.append('%s ' % (' '*len(lastname))) - production.walkabout(self) - self.body.append('\n') - self.body.append('\n') - raise nodes.SkipNode - def depart_productionlist(self, node): - pass - - def visit_production(self, node): - pass - def depart_production(self, node): - pass - - def visit_centered(self, node): - self.body.append(self.starttag(node, 'p', CLASS="centered") + '') - def depart_centered(self, node): - self.body.append('

    ') - - def visit_compact_paragraph(self, node): - pass - def depart_compact_paragraph(self, node): - pass - - def visit_highlightlang(self, node): - self.highlightlang = node['lang'] - self.highlightlinenothreshold = node['linenothreshold'] - def depart_highlightlang(self, node): - pass - - # overwritten - def visit_image(self, node): - olduri = node['uri'] - # rewrite the URI if the environment knows about it - if olduri in self.builder.images: - node['uri'] = posixpath.join(self.builder.imgpath, - self.builder.images[olduri]) - - if node.has_key('scale'): - if Image and not (node.has_key('width') - and node.has_key('height')): - try: - im = Image.open(os.path.join(self.builder.srcdir, - olduri)) - except (IOError, # Source image can't be found or opened - UnicodeError): # PIL doesn't like Unicode paths. - print olduri - pass - else: - if not node.has_key('width'): - node['width'] = str(im.size[0]) - if not node.has_key('height'): - node['height'] = str(im.size[1]) - del im - BaseTranslator.visit_image(self, node) - - def visit_toctree(self, node): - # this only happens when formatting a toc from env.tocs -- in this - # case we don't want to include the subtree - raise nodes.SkipNode - - def visit_index(self, node): - raise nodes.SkipNode - - def visit_tabular_col_spec(self, node): - raise nodes.SkipNode - - def visit_glossary(self, node): - pass - def depart_glossary(self, node): - pass - - def visit_acks(self, node): - pass - def depart_acks(self, node): - pass - - def visit_module(self, node): - pass - def depart_module(self, node): - pass - - def bulk_text_processor(self, text): - return text - - # overwritten - def visit_Text(self, node): - text = node.astext() - encoded = self.encode(text) - if self.protect_literal_text: - # moved here from base class's visit_literal to support - # more formatting in literal nodes - for token in self.words_and_spaces.findall(encoded): - if token.strip(): - # protect literal text from line wrapping - self.body.append('%s' % token) - elif token in ' \n': - # allow breaks at whitespace - self.body.append(token) - else: - # protect runs of multiple spaces; the last one can wrap - self.body.append(' ' * (len(token)-1) + ' ') - else: - if self.in_mailto and self.settings.cloak_email_addresses: - encoded = self.cloak_email(encoded) - else: - encoded = self.bulk_text_processor(encoded) - self.body.append(encoded) - - # these are all for docutils 0.5 compatibility - - def visit_note(self, node): - self.visit_admonition(node, 'note') - def depart_note(self, node): - self.depart_admonition(node) - - def visit_warning(self, node): - self.visit_admonition(node, 'warning') - def depart_warning(self, node): - self.depart_admonition(node) - - def visit_attention(self, node): - self.visit_admonition(node, 'attention') - - def depart_attention(self, node): - self.depart_admonition() - - def visit_caution(self, node): - self.visit_admonition(node, 'caution') - def depart_caution(self, node): - self.depart_admonition() - - def visit_danger(self, node): - self.visit_admonition(node, 'danger') - def depart_danger(self, node): - self.depart_admonition() - - def visit_error(self, node): - self.visit_admonition(node, 'error') - def depart_error(self, node): - self.depart_admonition() - - def visit_hint(self, node): - self.visit_admonition(node, 'hint') - def depart_hint(self, node): - self.depart_admonition() - - def visit_important(self, node): - self.visit_admonition(node, 'important') - def depart_important(self, node): - self.depart_admonition() - - def visit_tip(self, node): - self.visit_admonition(node, 'tip') - def depart_tip(self, node): - self.depart_admonition() - - # these are only handled specially in the SmartyPantsHTMLTranslator - def visit_literal_emphasis(self, node): - return self.visit_emphasis(node) - def depart_literal_emphasis(self, node): - return self.depart_emphasis(node) - - def depart_title(self, node): - close_tag = self.context[-1] - if self.builder.add_header_links and \ - (close_tag.startswith('\u00B6
    ' % - _('Permalink to this headline')) - BaseTranslator.depart_title(self, node) - - def unknown_visit(self, node): - raise NotImplementedError('Unknown node: ' + node.__class__.__name__) - - -class SmartyPantsHTMLTranslator(HTMLTranslator): - """ - Handle ordinary text via smartypants, converting quotes and dashes - to the correct entities. - """ - - def __init__(self, *args, **kwds): - self.no_smarty = 0 - HTMLTranslator.__init__(self, *args, **kwds) - - def visit_literal(self, node): - self.no_smarty += 1 - try: - # this raises SkipNode - HTMLTranslator.visit_literal(self, node) - finally: - self.no_smarty -= 1 - - def visit_literal_emphasis(self, node): - self.no_smarty += 1 - self.visit_emphasis(node) - - def depart_literal_emphasis(self, node): - self.depart_emphasis(node) - self.no_smarty -= 1 - - def visit_desc_signature(self, node): - self.no_smarty += 1 - HTMLTranslator.visit_desc_signature(self, node) - - def depart_desc_signature(self, node): - self.no_smarty -= 1 - HTMLTranslator.depart_desc_signature(self, node) - - def visit_productionlist(self, node): - self.no_smarty += 1 - try: - HTMLTranslator.visit_productionlist(self, node) - finally: - self.no_smarty -= 1 - - def visit_option(self, node): - self.no_smarty += 1 - HTMLTranslator.visit_option(self, node) - def depart_option(self, node): - self.no_smarty -= 1 - HTMLTranslator.depart_option(self, node) - - def bulk_text_processor(self, text): - if self.no_smarty <= 0: - return sphinx_smarty_pants(text) - return text diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/latexwriter.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/latexwriter.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1192 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.latexwriter - ~~~~~~~~~~~~~~~~~~ - - Custom docutils writer for LaTeX. - - Much of this code is adapted from Dave Kuhlman's "docpy" writer from his - docutils sandbox. - - :copyright: 2007-2008 by Georg Brandl, Dave Kuhlman. - :license: BSD. -""" - -import re -import sys -from os import path - -from docutils import nodes, writers -from docutils.writers.latex2e import Babel - -from sphinx import addnodes -from sphinx import highlighting -from sphinx.locale import admonitionlabels, versionlabels -from sphinx.util import ustrftime -from sphinx.util.texescape import tex_escape_map -from sphinx.util.smartypants import educateQuotesLatex - -HEADER = r'''%% Generated by Sphinx. -\documentclass[%(papersize)s,%(pointsize)s%(classoptions)s]{%(docclass)s} -%(inputenc)s -%(fontenc)s -%(babel)s -%(fontpkg)s -%(fncychap)s -\usepackage{sphinx} -%(preamble)s - -\title{%(title)s} -\date{%(date)s} -\release{%(release)s} -\author{%(author)s} -\newcommand{\sphinxlogo}{%(logo)s} -\renewcommand{\releasename}{%(releasename)s} -%(makeindex)s -%(makemodindex)s -''' - -BEGIN_DOC = r''' -\begin{document} -%(shorthandoff)s -%(maketitle)s -%(tableofcontents)s -''' - -FOOTER = r''' -%(footer)s -\renewcommand{\indexname}{%(modindexname)s} -%(printmodindex)s -\renewcommand{\indexname}{%(indexname)s} -%(printindex)s -\end{document} -''' - - -class LaTeXWriter(writers.Writer): - - supported = ('sphinxlatex',) - - settings_spec = ('LaTeX writer options', '', ( - ('Document name', ['--docname'], {'default': ''}), - ('Document class', ['--docclass'], {'default': 'manual'}), - ('Author', ['--author'], {'default': ''}), - )) - settings_defaults = {} - - output = None - - def __init__(self, builder): - writers.Writer.__init__(self) - self.builder = builder - - def translate(self): - visitor = LaTeXTranslator(self.document, self.builder) - self.document.walkabout(visitor) - self.output = visitor.astext() - - -# Helper classes - -class ExtBabel(Babel): - def get_shorthandoff(self): - shortlang = self.language.split('_')[0] - if shortlang in ('de', 'sl', 'pt', 'es', 'nl', 'pl'): - return '\\shorthandoff{"}' - return '' - - _ISO639_TO_BABEL = Babel._ISO639_TO_BABEL.copy() - _ISO639_TO_BABEL['sl'] = 'slovene' - - -class Table(object): - def __init__(self): - self.col = 0 - self.colcount = 0 - self.colspec = None - self.had_head = False - self.has_verbatim = False - self.caption = None - - -class Desc(object): - def __init__(self, node): - self.env = LaTeXTranslator.desc_map.get(node['desctype'], 'describe') - self.type = self.cls = self.name = self.params = self.annotation = '' - self.count = 0 - - -class LaTeXTranslator(nodes.NodeVisitor): - sectionnames = ["part", "chapter", "section", "subsection", - "subsubsection", "paragraph", "subparagraph"] - - ignore_missing_images = False - - default_elements = { - 'docclass': 'manual', - 'papersize': 'letterpaper', - 'pointsize': '10pt', - 'classoptions': '', - 'inputenc': '\\usepackage[utf8]{inputenc}', - 'fontenc': '\\usepackage[T1]{fontenc}', - 'babel': '\\usepackage{babel}', - 'fontpkg': '\\usepackage{times}', - 'fncychap': '\\usepackage[Bjarne]{fncychap}', - 'preamble': '', - 'title': '', - 'date': '', - 'release': '', - 'author': '', - 'logo': '', - 'releasename': 'Release', - 'makeindex': '\\makeindex', - 'makemodindex': '\\makemodindex', - 'shorthandoff': '', - 'maketitle': '\\maketitle', - 'tableofcontents': '\\tableofcontents', - 'footer': '', - 'printmodindex': '\\printmodindex', - 'printindex': '\\printindex', - } - - def __init__(self, document, builder): - nodes.NodeVisitor.__init__(self, document) - self.builder = builder - self.body = [] - - # sort out some elements - papersize = builder.config.latex_paper_size + 'paper' - if papersize == 'paper': # e.g. command line "-D latex_paper_size=" - papersize = 'letterpaper' - - self.elements = self.default_elements.copy() - self.elements.update({ - 'docclass': document.settings.docclass, - 'papersize': papersize, - 'pointsize': builder.config.latex_font_size, - # if empty, the title is set to the first section title - 'title': document.settings.title, - 'date': ustrftime(builder.config.today_fmt or _('%B %d, %Y')), - 'release': builder.config.release, - 'author': document.settings.author, - 'releasename': _('Release'), - 'preamble': builder.config.latex_preamble, - 'modindexname': _('Module Index'), - 'indexname': _('Index'), - }) - if builder.config.latex_logo: - self.elements['logo'] = '\\includegraphics{%s}\\par' % \ - path.basename(builder.config.latex_logo) - if builder.config.language: - babel = ExtBabel(builder.config.language) - lang = babel.get_language() - if lang: - self.elements['classoptions'] += ',' + babel.get_language() - else: - self.builder.warn('no Babel option known for language %r' % - builder.config.language) - self.elements['shorthandoff'] = babel.get_shorthandoff() - self.elements['fncychap'] = '\\usepackage[Sonny]{fncychap}' - else: - self.elements['classoptions'] += ',english' - if not builder.config.latex_use_modindex: - self.elements['makemodindex'] = '' - self.elements['printmodindex'] = '' - # allow the user to override them all - self.elements.update(builder.config.latex_elements) - - self.highlighter = highlighting.PygmentsBridge( - 'latex', builder.config.pygments_style) - self.context = [] - self.descstack = [] - self.bibitems = [] - self.table = None - self.next_table_colspec = None - self.highlightlang = builder.config.highlight_language - self.highlightlinenothreshold = sys.maxint - self.written_ids = set() - self.footnotestack = [] - if self.elements['docclass'] == 'manual': - if builder.config.latex_use_parts: - self.top_sectionlevel = 0 - else: - self.top_sectionlevel = 1 - else: - self.top_sectionlevel = 2 - self.next_section_target = None - # flags - self.verbatim = None - self.in_title = 0 - self.in_production_list = 0 - self.first_document = 1 - self.this_is_the_title = 1 - self.literal_whitespace = 0 - self.no_contractions = 0 - - def astext(self): - return (HEADER % self.elements + self.highlighter.get_stylesheet() + - u''.join(self.body) + FOOTER % self.elements) - - def visit_document(self, node): - self.footnotestack.append(self.collect_footnotes(node)) - if self.first_document == 1: - # the first document is all the regular content ... - self.body.append(BEGIN_DOC % self.elements) - self.first_document = 0 - elif self.first_document == 0: - # ... and all others are the appendices - self.body.append('\n\\appendix\n') - self.first_document = -1 - # "- 1" because the level is increased before the title is visited - self.sectionlevel = self.top_sectionlevel - 1 - def depart_document(self, node): - if self.bibitems: - widest_label = "" - for bi in self.bibitems: - if len(widest_label) < len(bi[0]): - widest_label = bi[0] - self.body.append('\n\\begin{thebibliography}{%s}\n' % widest_label) - for bi in self.bibitems: - # cite_key: underscores must not be escaped - cite_key = bi[0].replace(r"\_", "_") - self.body.append('\\bibitem[%s]{%s}{%s}\n' % (bi[0], cite_key, bi[1])) - self.body.append('\\end{thebibliography}\n') - self.bibitems = [] - - def visit_start_of_file(self, node): - # This marks the begin of a new file; therefore the current module and - # class must be reset - self.body.append('\n\\resetcurrentobjects\n') - # and also, new footnotes - self.footnotestack.append(self.collect_footnotes(node)) - - def collect_footnotes(self, node): - fnotes = {} - def footnotes_under(n): - if isinstance(n, nodes.footnote): - yield n - else: - for c in n.children: - if isinstance(c, addnodes.start_of_file): - continue - for k in footnotes_under(c): - yield k - for fn in footnotes_under(node): - num = fn.children[0].astext().strip() - fnotes[num] = fn - fn.parent.remove(fn) - return fnotes - - def depart_start_of_file(self, node): - self.footnotestack.pop() - - def visit_highlightlang(self, node): - self.highlightlang = node['lang'] - self.highlightlinenothreshold = node['linenothreshold'] - raise nodes.SkipNode - - def visit_section(self, node): - if not self.this_is_the_title: - self.sectionlevel += 1 - self.body.append('\n\n') - if self.next_section_target: - self.body.append(r'\hypertarget{%s}{}' % self.next_section_target) - self.next_section_target = None - #if node.get('ids'): - # for id in node['ids']: - # if id not in self.written_ids: - # self.body.append(r'\hypertarget{%s}{}' % id) - # self.written_ids.add(id) - def depart_section(self, node): - self.sectionlevel = max(self.sectionlevel - 1, self.top_sectionlevel - 1) - - def visit_problematic(self, node): - self.body.append(r'{\color{red}\bfseries{}') - def depart_problematic(self, node): - self.body.append('}') - - def visit_topic(self, node): - self.body.append('\\setbox0\\vbox{\n' - '\\begin{minipage}{0.95\\textwidth}\n') - def depart_topic(self, node): - self.body.append('\\end{minipage}}\n' - '\\begin{center}\\setlength{\\fboxsep}{5pt}' - '\\shadowbox{\\box0}\\end{center}\n') - visit_sidebar = visit_topic - depart_sidebar = depart_topic - - def visit_glossary(self, node): - pass - def depart_glossary(self, node): - pass - - def visit_productionlist(self, node): - self.body.append('\n\n\\begin{productionlist}\n') - self.in_production_list = 1 - def depart_productionlist(self, node): - self.body.append('\\end{productionlist}\n\n') - self.in_production_list = 0 - - def visit_production(self, node): - if node['tokenname']: - self.body.append('\\production{%s}{' % self.encode(node['tokenname'])) - else: - self.body.append('\\productioncont{') - def depart_production(self, node): - self.body.append('}\n') - - def visit_transition(self, node): - self.body.append('\n\n\\bigskip\\hrule{}\\bigskip\n\n') - def depart_transition(self, node): - pass - - def visit_title(self, node): - parent = node.parent - if isinstance(parent, addnodes.seealso): - # the environment already handles this - raise nodes.SkipNode - elif self.this_is_the_title: - if len(node.children) != 1 and not isinstance(node.children[0], nodes.Text): - self.builder.warn('document title is not a single Text node') - if not self.elements['title']: - # text needs to be escaped since it is inserted into - # the output literally - self.elements['title'] = node.astext().translate(tex_escape_map) - self.this_is_the_title = 0 - raise nodes.SkipNode - elif isinstance(parent, nodes.section): - try: - self.body.append(r'\%s{' % self.sectionnames[self.sectionlevel]) - except IndexError: - from sphinx.application import SphinxError - raise SphinxError('too many nesting section levels for LaTeX, ' - 'at heading: %s' % node.astext()) - self.context.append('}\n') - elif isinstance(parent, (nodes.topic, nodes.sidebar)): - self.body.append(r'\textbf{') - self.context.append('}\n\n\medskip\n\n') - elif isinstance(parent, nodes.Admonition): - self.body.append('{') - self.context.append('}\n') - elif isinstance(parent, nodes.table): - self.table.caption = self.encode(node.astext()) - raise nodes.SkipNode - else: - self.builder.warn('encountered title node not in section, topic, ' - 'table, admonition or sidebar') - self.body.append('\\textbf{') - self.context.append('}\n') - self.in_title = 1 - def depart_title(self, node): - self.in_title = 0 - self.body.append(self.context.pop()) - - def visit_subtitle(self, node): - if isinstance(node.parent, nodes.sidebar): - self.body.append('~\\\\\n\\textbf{') - self.context.append('}\n\\smallskip\n') - else: - self.context.append('') - def depart_subtitle(self, node): - self.body.append(self.context.pop()) - - desc_map = { - 'function' : 'funcdesc', - 'class': 'classdesc', - 'method': 'methoddesc', - 'staticmethod': 'staticmethoddesc', - 'exception': 'excdesc', - 'data': 'datadesc', - 'attribute': 'memberdesc', - 'opcode': 'opcodedesc', - - 'cfunction': 'cfuncdesc', - 'cmember': 'cmemberdesc', - 'cmacro': 'csimplemacrodesc', - 'ctype': 'ctypedesc', - 'cvar': 'cvardesc', - - 'describe': 'describe', - # and all others are 'describe' too - } - - def visit_desc(self, node): - self.descstack.append(Desc(node)) - def depart_desc(self, node): - d = self.descstack.pop() - self.body.append("\\end{%s}\n" % d.env) - - def visit_desc_signature(self, node): - d = self.descstack[-1] - # reset these for every signature - d.type = d.cls = d.name = d.params = '' - def depart_desc_signature(self, node): - d = self.descstack[-1] - d.cls = d.cls.rstrip('.') - if node.parent['desctype'] != 'describe' and node['ids']: - hyper = '\\hypertarget{%s}{}' % node['ids'][0] - else: - hyper = '' - if d.count == 0: - t1 = "\n\n%s\\begin{%s}" % (hyper, d.env) - else: - t1 = "\n%s\\%sline" % (hyper, d.env[:-4]) - d.count += 1 - if d.env in ('funcdesc', 'classdesc', 'excclassdesc'): - t2 = "{%s}{%s}" % (d.name, d.params) - elif d.env in ('datadesc', 'excdesc', 'csimplemacrodesc'): - t2 = "{%s}" % (d.name) - elif d.env in ('methoddesc', 'staticmethoddesc'): - if d.cls: - t2 = "[%s]{%s}{%s}" % (d.cls, d.name, d.params) - else: - t2 = "{%s}{%s}" % (d.name, d.params) - elif d.env == 'memberdesc': - if d.cls: - t2 = "[%s]{%s}" % (d.cls, d.name) - else: - t2 = "{%s}" % d.name - elif d.env == 'cfuncdesc': - if d.cls: - # C++ class names - d.name = '%s::%s' % (d.cls, d.name) - t2 = "{%s}{%s}{%s}" % (d.type, d.name, d.params) - elif d.env == 'cmemberdesc': - try: - type, container = d.type.rsplit(' ', 1) - container = container.rstrip('.') - except ValueError: - container = '' - type = d.type - t2 = "{%s}{%s}{%s}" % (container, type, d.name) - elif d.env == 'cvardesc': - t2 = "{%s}{%s}" % (d.type, d.name) - elif d.env == 'ctypedesc': - t2 = "{%s}" % (d.name) - elif d.env == 'opcodedesc': - t2 = "{%s}{%s}" % (d.name, d.params) - elif d.env == 'describe': - t2 = "{%s}" % d.name - self.body.append(t1 + t2) - - def visit_desc_type(self, node): - d = self.descstack[-1] - if d.env == 'describe': - d.name += self.encode(node.astext()) - else: - self.descstack[-1].type = self.encode(node.astext().strip()) - raise nodes.SkipNode - - def visit_desc_name(self, node): - d = self.descstack[-1] - if d.env == 'describe': - d.name += self.encode(node.astext()) - else: - self.descstack[-1].name = self.encode(node.astext().strip()) - raise nodes.SkipNode - - def visit_desc_addname(self, node): - d = self.descstack[-1] - if d.env == 'describe': - d.name += self.encode(node.astext()) - else: - self.descstack[-1].cls = self.encode(node.astext().strip()) - raise nodes.SkipNode - - def visit_desc_parameterlist(self, node): - d = self.descstack[-1] - if d.env == 'describe': - d.name += self.encode(node.astext()) - else: - self.descstack[-1].params = self.encode(node.astext().strip()) - raise nodes.SkipNode - - def visit_desc_annotation(self, node): - d = self.descstack[-1] - if d.env == 'describe': - d.name += self.encode(node.astext()) - else: - self.descstack[-1].annotation = self.encode(node.astext().strip()) - raise nodes.SkipNode - - def visit_refcount(self, node): - self.body.append("\\emph{") - def depart_refcount(self, node): - self.body.append("}\\\\") - - def visit_desc_content(self, node): - if node.children and not isinstance(node.children[0], nodes.paragraph): - # avoid empty desc environment which causes a formatting bug - self.body.append('~') - def depart_desc_content(self, node): - pass - - def visit_seealso(self, node): - self.body.append("\n\n\\strong{%s:}\n\n" % admonitionlabels['seealso']) - def depart_seealso(self, node): - self.body.append("\n\n") - - def visit_rubric(self, node): - if len(node.children) == 1 and node.children[0].astext() == 'Footnotes': - raise nodes.SkipNode - self.body.append('\\paragraph{') - self.context.append('}\n') - def depart_rubric(self, node): - self.body.append(self.context.pop()) - - def visit_footnote(self, node): - pass - def depart_footnote(self, node): - pass - - def visit_label(self, node): - if isinstance(node.parent, nodes.citation): - self.bibitems[-1][0] = node.astext() - raise nodes.SkipNode - - def visit_tabular_col_spec(self, node): - self.next_table_colspec = node['spec'] - raise nodes.SkipNode - - def visit_table(self, node): - if self.table: - raise NotImplementedError('Nested tables are not supported.') - self.table = Table() - self.tablebody = [] - # Redirect body output until table is finished. - self._body = self.body - self.body = self.tablebody - def depart_table(self, node): - self.body = self._body - if self.table.caption is not None: - self.body.append('\n\\begin{threeparttable}\n' - '\\caption{%s}\n' % self.table.caption) - if self.table.has_verbatim: - self.body.append('\n\\begin{tabular}') - else: - self.body.append('\n\\begin{tabulary}{\\textwidth}') - if self.table.colspec: - self.body.append(self.table.colspec) - else: - if self.table.has_verbatim: - colwidth = 0.95 / self.table.colcount - colspec = ('p{%.3f\\textwidth}|' % colwidth) * self.table.colcount - self.body.append('{|' + colspec + '}\n') - else: - self.body.append('{|' + ('L|' * self.table.colcount) + '}\n') - self.body.extend(self.tablebody) - if self.table.has_verbatim: - self.body.append('\\end{tabular}\n\n') - else: - self.body.append('\\end{tabulary}\n\n') - if self.table.caption is not None: - self.body.append('\\end{threeparttable}\n\n') - self.table = None - self.tablebody = None - - def visit_colspec(self, node): - self.table.colcount += 1 - def depart_colspec(self, node): - pass - - def visit_tgroup(self, node): - pass - def depart_tgroup(self, node): - pass - - def visit_thead(self, node): - if self.next_table_colspec: - self.table.colspec = '{%s}\n' % self.next_table_colspec - self.next_table_colspec = None - self.body.append('\\hline\n') - self.table.had_head = True - def depart_thead(self, node): - self.body.append('\\hline\n') - - def visit_tbody(self, node): - if not self.table.had_head: - self.visit_thead(node) - def depart_tbody(self, node): - self.body.append('\\hline\n') - - def visit_row(self, node): - self.table.col = 0 - def depart_row(self, node): - self.body.append('\\\\\n') - - def visit_entry(self, node): - if node.has_key('morerows') or node.has_key('morecols'): - raise NotImplementedError('Column or row spanning cells are ' - 'not implemented.') - if self.table.col > 0: - self.body.append(' & ') - self.table.col += 1 - if isinstance(node.parent.parent, nodes.thead): - self.body.append('\\textbf{') - self.context.append('}') - else: - self.context.append('') - def depart_entry(self, node): - self.body.append(self.context.pop()) # header - - def visit_acks(self, node): - # this is a list in the source, but should be rendered as a - # comma-separated list here - self.body.append('\n\n') - self.body.append(', '.join(n.astext() for n in node.children[0].children) + '.') - self.body.append('\n\n') - raise nodes.SkipNode - - def visit_bullet_list(self, node): - self.body.append('\\begin{itemize}\n' ) - def depart_bullet_list(self, node): - self.body.append('\\end{itemize}\n' ) - - def visit_enumerated_list(self, node): - self.body.append('\\begin{enumerate}\n' ) - def depart_enumerated_list(self, node): - self.body.append('\\end{enumerate}\n' ) - - def visit_list_item(self, node): - # Append "{}" in case the next character is "[", which would break - # LaTeX's list environment (no numbering and the "[" is not printed). - self.body.append(r'\item {} ') - def depart_list_item(self, node): - self.body.append('\n') - - def visit_definition_list(self, node): - self.body.append('\\begin{description}\n') - def depart_definition_list(self, node): - self.body.append('\\end{description}\n') - - def visit_definition_list_item(self, node): - pass - def depart_definition_list_item(self, node): - pass - - def visit_term(self, node): - ctx = ']' - if node.has_key('ids') and node['ids']: - ctx += '\\hypertarget{%s}{}' % node['ids'][0] - self.body.append('\\item[') - self.context.append(ctx) - def depart_term(self, node): - self.body.append(self.context.pop()) - - def visit_classifier(self, node): - self.body.append('{[}') - def depart_classifier(self, node): - self.body.append('{]}') - - def visit_definition(self, node): - pass - def depart_definition(self, node): - self.body.append('\n') - - def visit_field_list(self, node): - self.body.append('\\begin{quote}\\begin{description}\n') - def depart_field_list(self, node): - self.body.append('\\end{description}\\end{quote}\n') - - def visit_field(self, node): - pass - def depart_field(self, node): - pass - - visit_field_name = visit_term - depart_field_name = depart_term - - visit_field_body = visit_definition - depart_field_body = depart_definition - - def visit_paragraph(self, node): - self.body.append('\n') - def depart_paragraph(self, node): - self.body.append('\n') - - def visit_centered(self, node): - self.body.append('\n\\begin{centering}') - def depart_centered(self, node): - self.body.append('\n\\end{centering}') - - def visit_module(self, node): - modname = node['modname'] - self.body.append('\n\\declaremodule[%s]{}{%s}' % (modname.replace('_', ''), - self.encode(modname))) - self.body.append('\n\\modulesynopsis{%s}' % self.encode(node['synopsis'])) - if node.has_key('platform'): - self.body.append('\\platform{%s}' % self.encode(node['platform'])) - def depart_module(self, node): - pass - - def latex_image_length(self, width_str): - match = re.match('(\d*\.?\d*)\s*(\S*)', width_str) - if not match: - # fallback - return width_str - res = width_str - amount, unit = match.groups()[:2] - if not unit or unit == "px": - # pixels: let LaTeX alone - return None - elif unit == "%": - res = "%.3f\\linewidth" % (float(amount) / 100.0) - return res - - def visit_image(self, node): - attrs = node.attributes - pre = [] # in reverse order - post = [] - include_graphics_options = [] - inline = isinstance(node.parent, nodes.TextElement) - if attrs.has_key('scale'): - # Could also be done with ``scale`` option to - # ``\includegraphics``; doing it this way for consistency. - pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,)) - post.append('}') - if attrs.has_key('width'): - w = self.latex_image_length(attrs['width']) - if w: - include_graphics_options.append('width=%s' % w) - if attrs.has_key('height'): - h = self.latex_image_length(attrs['height']) - if h: - include_graphics_options.append('height=%s' % h) - if attrs.has_key('align'): - align_prepost = { - # By default latex aligns the top of an image. - (1, 'top'): ('', ''), - (1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'), - (1, 'bottom'): ('\\raisebox{-\\height}{', '}'), - (0, 'center'): ('{\\hfill', '\\hfill}'), - # These 2 don't exactly do the right thing. The image should - # be floated alongside the paragraph. See - # http://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG - (0, 'left'): ('{', '\\hfill}'), - (0, 'right'): ('{\\hfill', '}'),} - try: - pre.append(align_prepost[inline, attrs['align']][0]) - post.append(align_prepost[inline, attrs['align']][1]) - except KeyError: - pass # XXX complain here? - if not inline: - pre.append('\n') - post.append('\n') - pre.reverse() - if node['uri'] in self.builder.images: - uri = self.builder.images[node['uri']] - else: - # missing image! - if self.ignore_missing_images: - return - uri = node['uri'] - if uri.find('://') != -1: - # ignore remote images - return - self.body.extend(pre) - options = '' - if include_graphics_options: - options = '[%s]' % ','.join(include_graphics_options) - self.body.append('\\includegraphics%s{%s}' % (options, uri)) - self.body.extend(post) - def depart_image(self, node): - pass - - def visit_figure(self, node): - if (not node.attributes.has_key('align') or - node.attributes['align'] == 'center'): - # centering does not add vertical space like center. - align = '\n\\centering' - align_end = '' - else: - # TODO non vertical space for other alignments. - align = '\\begin{flush%s}' % node.attributes['align'] - align_end = '\\end{flush%s}' % node.attributes['align'] - self.body.append('\\begin{figure}[htbp]%s\n' % align) - self.context.append('%s\\end{figure}\n' % align_end) - def depart_figure(self, node): - self.body.append(self.context.pop()) - - def visit_caption(self, node): - self.body.append('\\caption{') - def depart_caption(self, node): - self.body.append('}') - - def visit_legend(self, node): - self.body.append('{\\small ') - def depart_legend(self, node): - self.body.append('}') - - def visit_admonition(self, node): - self.body.append('\n\\begin{notice}{note}') - def depart_admonition(self, node): - self.body.append('\\end{notice}\n') - - def _make_visit_admonition(name): - def visit_admonition(self, node): - self.body.append('\n\\begin{notice}{%s}{%s:}' % - (name, admonitionlabels[name])) - return visit_admonition - def _depart_named_admonition(self, node): - self.body.append('\\end{notice}\n') - - visit_attention = _make_visit_admonition('attention') - depart_attention = _depart_named_admonition - visit_caution = _make_visit_admonition('caution') - depart_caution = _depart_named_admonition - visit_danger = _make_visit_admonition('danger') - depart_danger = _depart_named_admonition - visit_error = _make_visit_admonition('error') - depart_error = _depart_named_admonition - visit_hint = _make_visit_admonition('hint') - depart_hint = _depart_named_admonition - visit_important = _make_visit_admonition('important') - depart_important = _depart_named_admonition - visit_note = _make_visit_admonition('note') - depart_note = _depart_named_admonition - visit_tip = _make_visit_admonition('tip') - depart_tip = _depart_named_admonition - visit_warning = _make_visit_admonition('warning') - depart_warning = _depart_named_admonition - - def visit_versionmodified(self, node): - intro = versionlabels[node['type']] % node['version'] - if node.children: - intro += ': ' - else: - intro += '.' - self.body.append(intro) - def depart_versionmodified(self, node): - pass - - def visit_target(self, node): - def add_target(id): - # indexing uses standard LaTeX index markup, so the targets - # will be generated differently - if not id.startswith('index-'): - self.body.append(r'\hypertarget{%s}{}' % id) - - if node.has_key('refid') and node['refid'] not in self.written_ids: - parindex = node.parent.index(node) - try: - next = node.parent[parindex+1] - if isinstance(next, nodes.section): - self.next_section_target = node['refid'] - return - except IndexError: - pass - add_target(node['refid']) - self.written_ids.add(node['refid']) - def depart_target(self, node): - pass - - def visit_attribution(self, node): - self.body.append('\n\\begin{flushright}\n') - self.body.append('---') - def depart_attribution(self, node): - self.body.append('\n\\end{flushright}\n') - - def visit_index(self, node, scre=re.compile(r';\s*')): - entries = node['entries'] - for type, string, tid, _ in entries: - if type == 'single': - self.body.append(r'\index{%s}' % scre.sub('!', self.encode(string))) - elif type == 'pair': - parts = tuple(self.encode(x.strip()) for x in string.split(';', 1)) - try: - self.body.append(r'\indexii{%s}{%s}' % parts) - except TypeError: - self.builder.warn('invalid pair index entry %r' % string) - elif type == 'triple': - parts = tuple(self.encode(x.strip()) for x in string.split(';', 2)) - try: - self.body.append(r'\indexiii{%s}{%s}{%s}' % parts) - except TypeError: - self.builder.warn('invalid triple index entry %r' % string) - else: - self.builder.warn('unknown index entry type %s found' % type) - raise nodes.SkipNode - - def visit_raw(self, node): - if 'latex' in node.get('format', '').split(): - self.body.append(node.astext()) - raise nodes.SkipNode - - def visit_reference(self, node): - uri = node.get('refuri', '') - if self.in_title or not uri: - self.context.append('') - elif uri.startswith('mailto:') or uri.startswith('http:') or \ - uri.startswith('https:') or uri.startswith('ftp:'): - self.body.append('\\href{%s}{' % self.encode(uri)) - self.context.append('}') - elif uri.startswith('#'): - self.body.append('\\hyperlink{%s}{' % uri[1:]) - self.context.append('}') - elif uri.startswith('@token'): - if self.in_production_list: - self.body.append('\\token{') - else: - self.body.append('\\grammartoken{') - self.context.append('}') - else: - self.builder.warn('unusable reference target found: %s' % uri) - self.context.append('') - def depart_reference(self, node): - self.body.append(self.context.pop()) - - def visit_pending_xref(self, node): - pass - def depart_pending_xref(self, node): - pass - - def visit_emphasis(self, node): - self.body.append(r'\emph{') - def depart_emphasis(self, node): - self.body.append('}') - - def visit_literal_emphasis(self, node): - self.body.append(r'\emph{\texttt{') - self.no_contractions += 1 - def depart_literal_emphasis(self, node): - self.body.append('}}') - self.no_contractions -= 1 - - def visit_strong(self, node): - self.body.append(r'\textbf{') - def depart_strong(self, node): - self.body.append('}') - - def visit_title_reference(self, node): - self.body.append(r'\emph{') - def depart_title_reference(self, node): - self.body.append('}') - - def visit_citation(self, node): - # TODO maybe use cite bibitems - self.bibitems.append(['', '']) - self.context.append(len(self.body)) - def depart_citation(self, node): - size = self.context.pop() - text = ''.join(self.body[size:]) - del self.body[size:] - self.bibitems[-1][1] = text - - def visit_citation_reference(self, node): - citeid = node.astext() - self.body.append('\\cite{%s}' % citeid) - raise nodes.SkipNode - - def visit_literal(self, node): - content = self.encode(node.astext().strip()) - if self.in_title: - self.body.append(r'\texttt{%s}' % content) - elif node.has_key('role') and node['role'] == 'samp': - self.body.append(r'\samp{%s}' % content) - else: - self.body.append(r'\code{%s}' % content) - raise nodes.SkipNode - - def visit_footnote_reference(self, node): - num = node.astext().strip() - try: - fn = self.footnotestack[-1][num] - except (KeyError, IndexError): - raise nodes.SkipNode - self.body.append('\\footnote{') - fn.walkabout(self) - raise nodes.SkipChildren - def depart_footnote_reference(self, node): - self.body.append('}') - - def visit_literal_block(self, node): - self.verbatim = '' - def depart_literal_block(self, node): - code = self.verbatim.rstrip('\n') - lang = self.highlightlang - linenos = code.count('\n') >= self.highlightlinenothreshold - 1 - if node.has_key('language'): - # code-block directives - lang = node['language'] - if node.has_key('linenos'): - linenos = node['linenos'] - hlcode = self.highlighter.highlight_block(code, lang, linenos) - # workaround for Unicode issue - hlcode = hlcode.replace(u'€', u'@texteuro[]') - # must use original Verbatim environment and "tabular" environment - if self.table: - hlcode = hlcode.replace('\\begin{Verbatim}', - '\\begin{OriginalVerbatim}') - self.table.has_verbatim = True - # get consistent trailer - hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim} - hlcode = hlcode.rstrip() + '\n' - self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % - (self.table and 'Original' or '')) - self.verbatim = None - visit_doctest_block = visit_literal_block - depart_doctest_block = depart_literal_block - - def visit_line_block(self, node): - """line-block: - * whitespace (including linebreaks) is significant - * inline markup is supported. - * serif typeface - """ - self.body.append('{\\raggedright{}') - self.literal_whitespace = 1 - def depart_line_block(self, node): - self.literal_whitespace = 0 - # remove the last \\ - del self.body[-1] - self.body.append('}\n') - - def visit_line(self, node): - self._line_start = len(self.body) - def depart_line(self, node): - if self._line_start == len(self.body): - # no output in this line -- add a nonbreaking space, else the - # \\ command will give an error - self.body.append('~') - if self.table is not None: - self.body.append('\\newline\n') - else: - self.body.append('\\\\\n') - - def visit_block_quote(self, node): - # If the block quote contains a single object and that object - # is a list, then generate a list not a block quote. - # This lets us indent lists. - done = 0 - if len(node.children) == 1: - child = node.children[0] - if isinstance(child, nodes.bullet_list) or \ - isinstance(child, nodes.enumerated_list): - done = 1 - if not done: - self.body.append('\\begin{quote}\n') - def depart_block_quote(self, node): - done = 0 - if len(node.children) == 1: - child = node.children[0] - if isinstance(child, nodes.bullet_list) or \ - isinstance(child, nodes.enumerated_list): - done = 1 - if not done: - self.body.append('\\end{quote}\n') - - # option node handling copied from docutils' latex writer - - def visit_option(self, node): - if self.context[-1]: - # this is not the first option - self.body.append(', ') - def depart_option(self, node): - # flag that the first option is done. - self.context[-1] += 1 - - def visit_option_argument(self, node): - """The delimiter betweeen an option and its argument.""" - self.body.append(node.get('delimiter', ' ')) - def depart_option_argument(self, node): - pass - - def visit_option_group(self, node): - self.body.append('\\item [') - # flag for first option - self.context.append(0) - def depart_option_group(self, node): - self.context.pop() # the flag - self.body.append('] ') - - def visit_option_list(self, node): - self.body.append('\\begin{optionlist}{3cm}\n') - def depart_option_list(self, node): - self.body.append('\\end{optionlist}\n') - - def visit_option_list_item(self, node): - pass - def depart_option_list_item(self, node): - pass - - def visit_option_string(self, node): - ostring = node.astext() - self.body.append(self.encode(ostring.replace('--', u'-{-}'))) - raise nodes.SkipNode - - def visit_description(self, node): - self.body.append( ' ' ) - def depart_description(self, node): - pass - - def visit_superscript(self, node): - self.body.append('$^{\\text{') - def depart_superscript(self, node): - self.body.append('}}$') - - def visit_subscript(self, node): - self.body.append('$_{\\text{') - def depart_subscript(self, node): - self.body.append('}}$') - - def visit_substitution_definition(self, node): - raise nodes.SkipNode - - def visit_substitution_reference(self, node): - raise nodes.SkipNode - - def visit_generated(self, node): - pass - def depart_generated(self, node): - pass - - def visit_compound(self, node): - pass - def depart_compound(self, node): - pass - - def visit_container(self, node): - pass - def depart_container(self, node): - pass - - def visit_decoration(self, node): - pass - def depart_decoration(self, node): - pass - - # text handling - - def encode(self, text): - text = unicode(text).translate(tex_escape_map) - if self.literal_whitespace: - # Insert a blank before the newline, to avoid - # ! LaTeX Error: There's no line here to end. - text = text.replace(u'\n', u'~\\\\\n').replace(u' ', u'~') - if self.no_contractions: - text = text.replace('--', u'-{-}') - return text - - def visit_Text(self, node): - if self.verbatim is not None: - self.verbatim += node.astext() - else: - text = self.encode(node.astext()) - self.body.append(educateQuotesLatex(text)) - def depart_Text(self, node): - pass - - def visit_comment(self, node): - raise nodes.SkipNode - - def visit_meta(self, node): - # only valid for HTML - raise nodes.SkipNode - - def visit_system_message(self, node): - pass - def depart_system_message(self, node): - self.body.append('\n') - - def unknown_visit(self, node): - raise NotImplementedError('Unknown node: ' + node.__class__.__name__) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/linkcheck.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/linkcheck.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.linkcheck - ~~~~~~~~~~~~~~~~ - - The CheckExternalLinksBuilder class. - - :copyright: 2008 by Georg Brandl, Thomas Lamb. - :license: BSD. -""" - -import socket -from os import path -from urllib2 import build_opener, HTTPError - -from docutils import nodes - -from sphinx.builder import Builder -from sphinx.util.console import purple, red, darkgreen - -# create an opener that will simulate a browser user-agent -opener = build_opener() -opener.addheaders = [('User-agent', 'Mozilla/5.0')] - - -class CheckExternalLinksBuilder(Builder): - """ - Checks for broken external links. - """ - name = 'linkcheck' - - def init(self): - self.good = set() - self.broken = {} - self.redirected = {} - # set a timeout for non-responding servers - socket.setdefaulttimeout(5.0) - # create output file - open(path.join(self.outdir, 'output.txt'), 'w').close() - - def get_target_uri(self, docname, typ=None): - return '' - - def get_outdated_docs(self): - return self.env.found_docs - - def prepare_writing(self, docnames): - return - - def write_doc(self, docname, doctree): - self.info() - for node in doctree.traverse(nodes.reference): - try: - self.check(node, docname) - except KeyError: - continue - - def check(self, node, docname): - uri = node['refuri'] - - if '#' in uri: - uri = uri.split('#')[0] - - if uri in self.good: - return - - lineno = None - while lineno is None and node: - node = node.parent - lineno = node.line - - if uri[0:5] == 'http:' or uri[0:6] == 'https:': - self.info(uri, nonl=1) - - if uri in self.broken: - (r, s) = self.broken[uri] - elif uri in self.redirected: - (r, s) = self.redirected[uri] - else: - (r, s) = self.resolve(uri) - - if r == 0: - self.info(' - ' + darkgreen('working')) - self.good.add(uri) - elif r == 2: - self.info(' - ' + red('broken: ') + s) - self.write_entry('broken', docname, lineno, uri + ': ' + s) - self.broken[uri] = (r, s) - if self.app.quiet: - self.warn('%s:%s: broken link: %s' % (docname, lineno, uri)) - else: - self.info(' - ' + purple('redirected') + ' to ' + s) - self.write_entry('redirected', docname, lineno, uri + ' to ' + s) - self.redirected[uri] = (r, s) - elif len(uri) == 0 or uri[0:7] == 'mailto:' or uri[0:4] == 'ftp:': - return - else: - self.warn(uri + ' - ' + red('malformed!')) - self.write_entry('malformed', docname, lineno, uri) - if self.app.quiet: - self.warn('%s:%s: malformed link: %s' % (docname, lineno, uri)) - self.app.statuscode = 1 - - if self.broken: - self.app.statuscode = 1 - - def write_entry(self, what, docname, line, uri): - output = open(path.join(self.outdir, 'output.txt'), 'a') - output.write("%s:%s: [%s] %s\n" % (self.env.doc2path(docname, None), - line, what, uri)) - output.close() - - def resolve(self, uri): - try: - f = opener.open(uri) - f.close() - except HTTPError, err: - #if err.code == 403 and uri.startswith('http://en.wikipedia.org/'): - # # Wikipedia blocks requests from urllib User-Agent - # return (0, 0) - return (2, str(err)) - except Exception, err: - return (2, str(err)) - if f.url.rstrip('/') == uri.rstrip('/'): - return (0, 0) - else: - return (1, f.url) - - def finish(self): - return diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/__init__.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.locale - ~~~~~~~~~~~~~ - - Locale utilities. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -_ = lambda x: x - -admonitionlabels = { - 'attention': _('Attention'), - 'caution': _('Caution'), - 'danger': _('Danger'), - 'error': _('Error'), - 'hint': _('Hint'), - 'important': _('Important'), - 'note': _('Note'), - 'seealso': _('See Also'), - 'tip': _('Tip'), - 'warning': _('Warning'), -} - -versionlabels = { - 'versionadded': _('New in version %s'), - 'versionchanged': _('Changed in version %s'), - 'deprecated': _('Deprecated since version %s'), -} - -pairindextypes = { - 'module': _('module'), - 'keyword': _('keyword'), - 'operator': _('operator'), - 'object': _('object'), - 'exception': _('exception'), - 'statement': _('statement'), - 'builtin': _('built-in function'), -} - -del _ - -def init(): - for dct in (admonitionlabels, versionlabels, pairindextypes): - for key in dct: - dct[key] = _(dct[key]) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "cs", "plural_expr": "(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)", "messages": {"module, in ": "modul", "Preparing search...": "", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "", "Search finished, found %s page(s) matching the search query.": "", ", in ": "", "Permalink to this headline": "Trval\u00fd odkaz na tento nadpis", "Searching": "hledej", "Permalink to this definition": "Trval\u00fd odkaz na tuto definici", "Hide Search Matches": "", "Search Results": "V\u00fdsledky hled\u00e1n\u00ed"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/cs/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,604 +0,0 @@ -# Czech translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# FIRST AUTHOR , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-08-10 11:43+0000\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Pavel Kosina \n" -"Language-Team: Pavel Kosina \n" -"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " -"n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d.%m.%Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Rejstřík indexů" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "index" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Rejstřík modulů" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "moduly" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "další" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "pÅ™edchozí" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "VestavÄ›né funkce" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Úroveň modulů" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d.%m.%Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Index" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -#, fuzzy -msgid "Module Index" -msgstr "Rejstřík modulů" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -#, fuzzy -msgid "Search Page" -msgstr "Vyhledávací stránka" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Trvalý odkaz na tuto definici" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Trvalý odkaz na tento nadpis" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Vydání" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "prommÄ›ná prostÅ™edí, %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Platforma: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[obrázek]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (vestavÄ›ná funkce)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (v modulu %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s() (vestavÄ›ná promÄ›nná)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s() (v modulu %s)" - -#: sphinx/directives/desc.py:33 -#, fuzzy, python-format -msgid "%s (built-in class)" -msgstr "%s() (vestavÄ›ná promÄ›nná)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s() (třída v %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (metoda %s.%s)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (metoda %s)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (statická metoda %s.%s)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (statická metoda %s)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s() (atribut %s.%s)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s() (atribut %s)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C funkce)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (Älen C)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C makro)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C typ)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C promÄ›nná)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Vyvolá" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "PromÄ›nná" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Vrací" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Typ navrácené hodnoty" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parametry" - -#: sphinx/directives/desc.py:423 -#, fuzzy, python-format -msgid "%scommand line option; %s" -msgstr "%sparametry příkazového řádku; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Platformy: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (module)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Autor sekce: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Autor modulu: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Autor: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Viz také" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Výstraha" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "UpozornÄ›ní" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "NebezpeÄí" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Chyba" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Rada" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Důležité" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Poznámka" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Viz také" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Tip" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Varování" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Nové ve verzi %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "ZmÄ›nÄ›no ve verzi %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Zastaralé od verze %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "modul" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "klíÄové slovo" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operátor" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "objekt" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "výjimka" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "příkaz" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "vestavÄ›ná funkce" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "" - -#: sphinx/static/searchtools.js:274 -#, fuzzy -msgid "Searching" -msgstr "hledej" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "" - -#: sphinx/static/searchtools.js:338 -#, fuzzy -msgid "module, in " -msgstr "modul" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Výsledky hledání" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "" - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "PÅ™ehled" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Rejstříky a tabulky:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Celkový obsah" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "seznam vÅ¡ech sekcí a podsekcí" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "prohledej tuto dokumentaci" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "rychlý přístup ke vÅ¡em modulům" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "vÅ¡echny funkce, třídy, termíny" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Index – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Plný index na jedné stránce" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Index podle písmene" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "může být obrovský" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navigace" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Obsah" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "PÅ™echozí téma" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "pÅ™edchozí kapitola" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Další téma" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "další kapitola" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Tato stránka" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Návrh zmÄ›nu" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Ukázat zdroj" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Rychlé vyhledávání" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Hledání dle klíÄe" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "hledej" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Zadej jméno modulu, třídy nebo funkce." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Hledání uvnitÅ™ %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "O tÄ›chto dokumentech" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Hledání" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "VeÅ¡kerá práva vyhrazena" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Naposledy aktualizováno dne %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"VytvoÅ™eno pomocí Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "NejpopulárnÄ›jší moduly:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Zobrazit moduly dostupné na této platformÄ›" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Zastaralé" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Prohledat %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Poznámka: Stránka, kterou hledáte, " -"neexistuje.
    Snažili jsme se najít nové umístÄ›ní této stránky, ale " -"nepovedlo se." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"Toto je vyhledávací stránka. Zadejte klíÄová slova do pole níže a " -"kliknÄ›te na \"hledej\". \n" -"Prohledávání funkcí hledá automaticky vÅ¡echna slova. Stránky obsahující" -" slov ménÄ›, nebudou nalezeny." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "hledej" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Nic jsme nenaÅ¡li." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "ZmÄ›ny ve verzi %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Automaticky generovaný seznam zmÄ›n ve verzi %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "ZmÄ›ny v knihovnách" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "ZmÄ›ny API" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Ostatní zmÄ›ny" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "de", "plural_expr": "(n != 1)", "messages": {"module, in ": "Modul, in ", "Preparing search...": "Suche wird vorbereitet...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Es wurden keine Dokumente gefunden. Haben Sie alle Suchworte richtig geschrieben und gen\u00fcgend Kategorien ausgew\u00e4hlt?", "Search finished, found %s page(s) matching the search query.": "Suche beendet, %s Seite(n) mit Ergebnissen wurden gefunden.", ", in ": "", "Permalink to this headline": "Permalink zu dieser \u00dcberschrift", "Searching": "Suchen...", "Permalink to this definition": "Permalink zu dieser Definition", "Hide Search Matches": "Suchergebnisse ausblenden", "Search Results": "Suchergebnisse"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/de/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,601 +0,0 @@ -# German translations for Sphinx. -# Copyright (C) 2008 Translators. -# This file is distributed under the same license as the Sphinx project. -# -msgid "" -msgstr "" -"Project-Id-Version: PROJECT VERSION\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-08-07 21:40+0200\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Horst Gutmann \n" -"Language-Team: de \n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d. %m. %Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Allgemeiner Index" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "Index" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Globaler Modulindex" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "Module" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "weiter" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "zurück" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Builtins" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Modulebene" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d. %m. %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Stichwortverzeichnis" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Modulindex" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Suche" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Permalink zu dieser Definition" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Permalink zu dieser Ãœberschrift" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Release" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "Umgebungsvariable; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Plattform: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[Bild]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (eingebaute Funktion)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (in Modul %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (eingebaute Variable)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (in Modul %s)" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "%s (eingebaute Klasse)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (Klasse in %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (Methode von %s.%s)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (Methode von %s)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (statische Methode von %s.%s)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (statische Methode von %s)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (Attribut von %s.%s)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (Attribut von %s)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C-Funktion)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (C-Member)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C-Makro)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C-Typ)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C-Variable)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Verursacht:" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Variable" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Rückgabe" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Rückgabetyp" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parameter" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "%sKommandozeilenoption; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Plattformen: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (Modul)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Autor des Abschnitts: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Autor des Moduls: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Autor: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Siehe auch" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Achtung" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Vorsicht" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Gefahr" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Fehler" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Hinweis" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Wichtig" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Bemerkung" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Siehe auch" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Tipp" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Warnung" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Neu in Version %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Geändert in Version %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Veraltet ab Version %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "Module" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "Schlüsselwort" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "Operator" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "Objekt" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "Exception" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "Statement" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "eingebaute Funktion" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Suchergebnisse ausblenden" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "Suchen..." - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Suche wird vorbereitet..." - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "Modul, in " - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Suchergebnisse" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Es wurden keine Dokumente gefunden. Haben Sie alle Suchworte richtig " -"geschrieben und genügend Kategorien ausgewählt?" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "Suche beendet, %s Seite(n) mit Ergebnissen wurden gefunden." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Ãœbersicht" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Indizes und Tabellen:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Vollständiges Inhaltsverzeichnis" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "Liste aller Kapitel und Unterkapitel" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "Durchsuche diese Dokumentation" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "Schneller Zugriff auf alle Module" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "Alle Funktionen, Klassen, Begriffe" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Stichwortverzeichnis – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Gesamtes Stichwortverzeichnis auf einer Seite" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Stichwortverzeichnis nach Anfangsbuchstabe" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "kann groß sein" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navigation" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Inhalt" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Vorheriges Thema" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "vorheriges Kapitel" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Nächstes Thema" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "nächstes Kapitel" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Diese Seite" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Änderung vorschlagen" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Quelltext anzeigen" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Schnellsuche" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Stichwortsuche" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Los" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Gib einen Modul-, Klassen- oder Funktionsnamen an." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Suche in %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "Ãœber diese Dokumentation" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Suche" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Zuletzt aktualisiert am %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Mit Sphinx %(sphinx_version)s " -"erstellt." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Beliebteste Module:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Zeige nur Module, die auf diesen Plattformen verfügbar sind" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Veraltet" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Suche in %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Anmerkung: Du hast eine nicht länger gültige URL von " -"diesem Server angefragt. Wir haben versucht dich auf die neue Adresse " -"dieser Seite umzuleiten, aber dies muss nicht die richtige Seite sein." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"Von hier aus kannst du die Dokumentation durchsuchen. Gib deine " -"Suchbegriffe in das untenstehende Feld ein und klicke auf \"suchen\". " -"Bitte beachte, dass die Suchfunktion automatisch nach all diesen Worten " -"suchen wird. Seiten, die nicht alle Worte enthalten, werden nicht in der " -"Ergebnisliste erscheinen." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "suchen" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Deine Suche ergab leider keine Treffer." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Änderungen in Version %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Automatisch generierte Liste der Änderungen in Version %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Bibliotheksänderungen" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "C API-Änderungen" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Andere Änderungen" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "es", "plural_expr": "(n != 1)", "messages": {"module, in ": "m\u00f3dulo", "Preparing search...": "Preparando la b\u00fasqueda", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "La b\u00fasqueda no dio ning\u00fan resultado. Por favor aseg\u00farese que escribi\u00f3 todas las palabras correctamente y que ha seleccionado suficientes categor\u00edas", "Search finished, found %s page(s) matching the search query.": "B\u00fasqueda finalizada, se han encontrado %s p\u00e1gina(s) que concuerdan con su consulta", ", in ": "", "Permalink to this headline": "Enlazar permanentemente con este t\u00edtulo", "Searching": "Buscando", "Permalink to this definition": "Enlazar permanentemente con esta definici\u00f3n", "Hide Search Matches": "Coincidencias de la b\u00fasqueda", "Search Results": "Resultados de la b\u00fasqueda"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/es/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,611 +0,0 @@ -# Spanish translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# FIRST AUTHOR , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: guillem@torroja.dmt.upm.es\n" -"POT-Creation-Date: 2008-09-11 23:58+0200\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Guillem Borrell \n" -"Language-Team: es \n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d %b, %Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Ãndice General" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "índice" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Ãndice Global de Módulos" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "módulos" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "siguiente" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "anterior" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -#, fuzzy -msgid "Builtins" -msgstr "Funciones de base" - -#: sphinx/builder.py:1131 -#, fuzzy -msgid "Module level" -msgstr "Módulos" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, fuzzy, python-format -msgid "%B %d, %Y" -msgstr "%d de %B de %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Ãndice" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Ãndice de Módulos" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Página de Búsqueda" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Enlazar permanentemente con esta definición" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Enlazar permanentemente con este título" - -#: sphinx/latexwriter.py:172 -#, fuzzy -msgid "Release" -msgstr "Versión" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "variables de entorno; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Plataforma: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[imagen]" - -#: sphinx/directives/desc.py:25 -#, fuzzy, python-format -msgid "%s() (built-in function)" -msgstr "%s() (función de base)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (en el módulo %s)" - -#: sphinx/directives/desc.py:29 -#, fuzzy, python-format -msgid "%s (built-in variable)" -msgstr "%s (variable de base)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (en el módulo %s)" - -#: sphinx/directives/desc.py:33 -#, fuzzy, python-format -msgid "%s (built-in class)" -msgstr "%s (variable de base)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (clase en %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s método)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s método)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s método estático)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s método estático)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s atributo)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s atributo)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (función C)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (miembro C)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (macro C)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (tipo C)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (variable C)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Muestra" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Variable" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Devuelve" - -#: sphinx/directives/desc.py:116 -#, fuzzy -msgid "Return type" -msgstr "Tipo del argumento devuelto" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parámetros" - -#: sphinx/directives/desc.py:423 -#, fuzzy, python-format -msgid "%scommand line option; %s" -msgstr "%sOpciones en línea de comandos; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Plataformas:" - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (módulo)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Autor de la sección" - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Autor del módulo" - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Autor:" - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Ver también" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Atención" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Prudencia" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Peligro" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Error" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Consejo" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Importante" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Nota" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Ver También" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Truco" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Advertencia" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Nuevo en la versión %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Distinto en la versión %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Obsoleto desde la versión %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "módulo" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "palabra clave" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operador" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "objeto" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "excepción" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "sentencia" - -#: sphinx/locale/__init__.py:40 -#, fuzzy -msgid "built-in function" -msgstr "función de base" - -#: sphinx/static/doctools.js:174 -#, fuzzy -msgid "Hide Search Matches" -msgstr "Coincidencias de la búsqueda" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "Buscando" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Preparando la búsqueda" - -#: sphinx/static/searchtools.js:338 -#, fuzzy -msgid "module, in " -msgstr "módulo" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Resultados de la búsqueda" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"La búsqueda no dio ningún resultado. Por favor asegúrese que escribió " -"todas las palabras correctamente y que ha seleccionado suficientes " -"categorías" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "" -"Búsqueda finalizada, se han encontrado %s página(s) que concuerdan con su" -" consulta" - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Resumen" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Ãndices y tablas:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Ãndice de contenidos completo" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "Muestra todas las secciones" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "buscar en esta documentación" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "acceso rápido a todos los módulos" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "todas las funciones, clases, términos" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Ãndice – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Ãndice completo en una página" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Ãndice alfabético" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "puede ser muy grande" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navegación" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Contenidos" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Tema anterior" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "Capítulo anterior" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Próximo tema" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "Próximo capítulo" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Esta página" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Sugerir una modificación" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Enseñar el código" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Búsqueda rápida" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Búsqueda por palabras clave" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Ir a" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Introducir en nombre de un módulo, clase o función" - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Buscar en %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "Sobre este documento" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Búsqueda" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Actualizado por última vez en %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Creado con Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Módulos más comunes:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Mostrar sólo los módulos disponibles en estas plataformas" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Obsoleto" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Buscar en %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Nota: Has solicitado una dirección desactualizada a este" -" servidor. Hemos intentado redirigirte a la nueva dirección de la misma " -"página pero puede no ser la correcta." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"Este es el diálogo de búsqueda. Introduce los términos en el diálogo " -"siguiente y pulsa \"buscar\". El asistente buscará automáticamente todas" -" las palabras. Las páginas que contengan menos palabras no aparecerán en" -" la lista de resultados." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "buscar" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Tu consulta no obtuvo ningún resultado" - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Cambios en la versión %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Lista de cambios generada automáticamente en la versión %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Cambios en la biblioteca" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "Cambios en la API C" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Otros cambios" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "fr", "plural_expr": "(n > 1)", "messages": {"module, in ": "module, dans", "Preparing search...": "Pr\u00e9paration de la recherche...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Votre recherche ne correspond \u00e0 aucun document. V\u00e9rifiez l'orthographe des termes de recherche et que vous avez s\u00e9lectionn\u00e9 suffisamment de cat\u00e9gories.", "Search finished, found %s page(s) matching the search query.": "La recherche est termin\u00e9e, %s page(s) correspond(ent) \u00e0 la requ\u00eate.", ", in ": ", dans", "Permalink to this headline": "Lien permanent vers ce titre", "Searching": "En cours de recherche", "Permalink to this definition": "Lien permanent vers cette d\u00e9finition", "Hide Search Matches": "Cacher les r\u00e9sultats de la recherche", "Search Results": "R\u00e9sultats de la recherche"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/fr/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,606 +0,0 @@ -# French translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# David Larlet , 2008. -# Sebastien Douche , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: larlet@gmail.com\n" -"POT-Creation-Date: 2008-08-08 12:39+0000\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Sébastien Douche \n" -"Language-Team: French Translation Team \n" -"Plural-Forms: nplurals=2; plural=(n > 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d %b %Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Index général" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "index" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Index général des modules" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "modules" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "suivant" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "précédent" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "(dans" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Fonctions de base" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Module" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d %B %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Index" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Index du module" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Page de recherche" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Lien permanent vers cette définition" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Lien permanent vers ce titre" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Version" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "variable d'environnement; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Plateforme : %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[image]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (fonction de base)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (dans le module %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (variable de base)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (dans le module %s)" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "%s (classe de base)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (classe dans %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (méthode %s.%s)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (méthode %s)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (méthode statique %s.%s)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (méthode statique %s)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (attribut %s.%s)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (attribut %s)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (fonction C)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (membre C)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (macro C)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (type C)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (variable C)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Lève" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Variable" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Retourne" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Type retourné" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Paramètres" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "%soption de ligne de commande; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Plateformes : " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (module)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Auteur de la section : " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Auteur du module : " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Auteur : " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Voir aussi" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "A faire" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "(L'entrée orginale se trouve dans %s, à la ligne %d et peut être trouvé" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "ici" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Attention" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Prudence" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Danger" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Erreur" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Indice" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Important" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Note" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Voir aussi" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Astuce" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Warning" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Introduit dans la version %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Modifié dans la version %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Obsolète depuis la version %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "module" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "mot-clé" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "opérateur" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "objet" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "exception" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "état" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "fonction de base" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Cacher les résultats de la recherche" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "En cours de recherche" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Préparation de la recherche..." - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "module, dans" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr ", dans" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Résultats de la recherche" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Votre recherche ne correspond à aucun document. Vérifiez l'orthographe " -"des termes de recherche et que vous avez sélectionné suffisamment de " -"catégories." - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "La recherche est terminée, %s page(s) correspond(ent) à la requête." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Résumé" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Indices et tables :" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Table des matières complète" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "lister l'ensemble des sections et sous-sections" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "rechercher dans cette documentation" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "accès rapide à l'ensemble des modules" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "toutes les fonctions, classes, termes" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Index – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Index complet sur une seule page" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Indexer les pages par lettre" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "peut être énorme" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navigation" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Table des matières" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Sujet précédent" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "Chapitre précédent" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Sujet suivant" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "Chapitre suivant" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Cette page" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Suggérer une modification" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Montrer la source" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Recherche rapide" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Recherche par mot-clé" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Go" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Saisissez un nom de module, classe ou fonction." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Recherchez dans %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "À propos de ces documents" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Recherche" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Mis à jour le %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Créé avec Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Modules les plus utilisés :" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "N'afficher que les modules disponibles sur ces plateformes" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Obsolète" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Rechercher %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Note : Vous tentez d'accéder à une ancienne URL de ce " -"serveur. Nous avons essayé de vous rediriger vers la nouvelle adresse de " -"cette page, mais ce n'est peut-être pas la bonne." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"Vous pouvez effectuer une recherche au sein des documents. Saisissez les " -"termes\n" -" de votre recherche dans le champs ci-dessous et cliquez sur " -"\"rechercher\". Notez que la fonctionnalité de recherche\n" -" va automatique chercher pour tous les mots. Les pages\n" -" contenant moins de mots n'apparaîtront pas dans la liste des " -"résultats." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "rechercher" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Votre recherche n'a retourné aucun résultat" - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Modifications dans la version %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Liste auto-générée des modifications dans la version %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Modifications de la bibliothèque" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "Modifications de l'API C" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Autres modifications" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "ja", "plural_expr": "0", "messages": {"module, in ": "\u30e2\u30b8\u30e5\u30fc\u30eb", "Preparing search...": "\u691c\u7d22\u306e\u6e96\u5099\u4e2d...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "\u691c\u7d22\u6761\u4ef6\u306b\u4e00\u81f4\u3059\u308b\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u306f\u3042\u308a\u307e\u305b\u3093\u3067\u3057\u305f\u3002\u691c\u7d22\u3057\u305f\u3044\u8a00\u8449\u3092\u6b63\u3057\u3044\u3064\u3065\u308a\u3067\u5165\u529b\u3057\u3066\u3044\u308b\u304b\u78ba\u8a8d\u3057\u3066\u304f\u3060\u3055\u3044\u3002\u307e\u305f\u3001\u6b63\u3057\u3044\u30ab\u30c6\u30b4\u30ea\u306e\u691c\u7d22\u3092\u884c\u3063\u3066\u3044\u308b\u304b\u78ba\u8a8d\u3057\u3066\u304f\u3060\u3055\u3044\u3002", "Search finished, found %s page(s) matching the search query.": "\u691c\u7d22\u304c\u7d42\u4e86\u3057\u3001\u6761\u4ef6\u306b\u4e00\u81f4\u3059\u308b\u30da\u30fc\u30b8\u304c %s \u500b\u307f\u3064\u304b\u308a\u307e\u3057\u305f\u3002", ", in ": "", "Permalink to this headline": "\u3053\u306e\u30d8\u30c3\u30c9\u30e9\u30a4\u30f3\u3078\u306e\u30d1\u30fc\u30de\u30ea\u30f3\u30af", "Searching": "\u691c\u7d22\u4e2d", "Permalink to this definition": "\u3053\u306e\u5b9a\u7fa9\u3078\u306e\u30d1\u30fc\u30de\u30ea\u30f3\u30af", "Hide Search Matches": "\u691c\u7d22\u7d50\u679c\u3092\u96a0\u3059", "Search Results": "\u691c\u7d22\u7d50\u679c"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/ja/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,595 +0,0 @@ -# Japanese translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# Yasushi Masuda , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-09-11 23:58+0200\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Yasushi MASUDA \n" -"Language-Team: ja \n" -"Plural-Forms: nplurals=1; plural=0\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%Y å¹´ %m 月 %d æ—¥" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "ç·åˆç´¢å¼•" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "索引" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "モジュールç·ç´¢å¼•" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "モジュール" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "次ã¸" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "å‰ã¸" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "組ã¿è¾¼ã¿" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "モジュールレベル" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%Y å¹´ %m 月 %d æ—¥" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "索引" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "モジュール索引" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "検索ページ" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "ã“ã®å®šç¾©ã¸ã®ãƒ‘ーマリンク" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "ã“ã®ãƒ˜ãƒƒãƒ‰ãƒ©ã‚¤ãƒ³ã¸ã®ãƒ‘ーマリンク" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "リリース" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "環境変数; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "プラットフォーム: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[ç”»åƒ]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (組ã¿è¾¼ã¿é–¢æ•°)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (%s モジュール)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (組ã¿è¾¼ã¿å¤‰æ•°)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (%s モジュール)" - -#: sphinx/directives/desc.py:33 -#, fuzzy, python-format -msgid "%s (built-in class)" -msgstr "%s (組ã¿è¾¼ã¿å¤‰æ•°)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (%s ã®ã‚¯ãƒ©ã‚¹)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s ã®ãƒ¡ã‚½ãƒƒãƒ‰)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s ã®ãƒ¡ã‚½ãƒƒãƒ‰)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s ã®é™çš„メソッド)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s ã®é™çš„メソッド)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s ã®å±žæ€§)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s ã®å±žæ€§)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C ã®é–¢æ•°)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (C ã®ãƒ¡ãƒ³ãƒå¤‰æ•°)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C ã®ãƒžã‚¯ãƒ­)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C ã®ãƒ‡ãƒ¼ã‚¿åž‹)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C ã®å¤‰æ•°)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "例外" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "変数" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "戻り値" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "戻り値ã®åž‹" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "パラメタ" - -#: sphinx/directives/desc.py:423 -#, fuzzy, python-format -msgid "%scommand line option; %s" -msgstr "%sコマンドラインオプション; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "プラットフォーム: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (モジュール)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "ã“ã®ç¯€ã®ä½œè€…: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "モジュールã®ä½œè€…: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "作者: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "å‚考" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "注æ„" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "ã”用心" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "å±é™º" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "エラー" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "ヒント" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "é‡è¦" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "ノート" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "å‚考" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "ã¡ãªã¿ã«" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "警告" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "ãƒãƒ¼ã‚¸ãƒ§ãƒ³ %s ã§è¿½åŠ " - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "ãƒãƒ¼ã‚¸ãƒ§ãƒ³ %s ã§å¤‰æ›´" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "ãƒãƒ¼ã‚¸ãƒ§ãƒ³ %s ã§æ’¤å»ƒ" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "モジュール" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "キーワード" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "演算å­" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "オブジェクト" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "例外" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "æ–‡" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "組ã¿è¾¼ã¿é–¢æ•°" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "検索çµæžœã‚’éš ã™" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "検索中" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "検索ã®æº–備中..." - -#: sphinx/static/searchtools.js:338 -#, fuzzy -msgid "module, in " -msgstr "モジュール" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "検索çµæžœ" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "検索æ¡ä»¶ã«ä¸€è‡´ã™ã‚‹ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã¯ã‚ã‚Šã¾ã›ã‚“ã§ã—ãŸã€‚検索ã—ãŸã„言葉を正ã—ã„ã¤ã¥ã‚Šã§å…¥åŠ›ã—ã¦ã„ã‚‹ã‹ç¢ºèªã—ã¦ãã ã•ã„。ã¾ãŸã€æ­£ã—ã„カテゴリã®æ¤œç´¢ã‚’è¡Œã£ã¦ã„ã‚‹ã‹ç¢ºèªã—ã¦ãã ã•ã„。" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "検索ãŒçµ‚了ã—ã€æ¡ä»¶ã«ä¸€è‡´ã™ã‚‹ãƒšãƒ¼ã‚¸ãŒ %s 個ã¿ã¤ã‹ã‚Šã¾ã—ãŸã€‚" - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "概è¦" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "索引ã¨è¡¨ä¸€è¦§:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "ç·åˆç›®æ¬¡" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "ç« ï¼ç¯€ä¸€è¦§" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "ドキュメントを検索" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "全モジュール早見表" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "関数ã€ã‚¯ãƒ©ã‚¹ãŠã‚ˆã³ç”¨èªžç·è¦§" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "索引 – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "ç·ç´¢å¼•" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "頭文字別索引" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "大ãã„å ´åˆãŒã‚ã‚‹ã®ã§æ³¨æ„" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "ナビゲーション" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "目次" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "å‰ã®ãƒˆãƒ”ックã¸" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "å‰ã®ç« ã¸" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "次ã®ãƒˆãƒ”ックã¸" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "次ã®ç« ã¸" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "ã“ã®ãƒšãƒ¼ã‚¸" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "変更ã®ã‚µã‚¸ã‚§ã‚¹ãƒˆ" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "ソースコードを表示" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "クイック検索" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "キーワード検索" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "検索" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "モジュールã€ã‚¯ãƒ©ã‚¹ã€ã¾ãŸã¯é–¢æ•°åを入力ã—ã¦ãã ã•ã„" - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "%(docstitle)s 内を検索" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã«ã¤ã„ã¦" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "検索" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "著作権" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "最終更新: %(last_updated)s" - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã¯ Sphinx " -"%(sphinx_version)s ã§ç”Ÿæˆã—ã¾ã—ãŸã€‚" - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "よãå‚ç…§ã•ã‚Œã¦ã„るモジュール:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "ã“ã®ãƒ—ラットフォームã§åˆ©ç”¨å¯èƒ½ãªãƒ¢ã‚¸ãƒ¥ãƒ¼ãƒ«ã ã‘を表示ã™ã‚‹" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "撤廃" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "%(docstitle)s 内を検索" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"注æ„: ã‚ãªãŸãŒè¡¨ç¤ºã—よã†ã¨ã—ã¦ã„ã‚‹ã®ã¯å¤ã„ URL ã§ã™ã€‚ã“ã®ãƒšãƒ¼ã‚¸ã«å¯¾å¿œã™ã‚‹æ–°ã—ã„ URL " -"ã¸ã®ãƒªãƒ€ã‚¤ãƒ¬ã‚¯ãƒˆã‚’試ã¿ã¾ã™ãŒã€é©åˆ‡ãªãƒªãƒ€ã‚¤ãƒ¬ã‚¯ãƒˆå…ˆã§ãªã„ã‹ã‚‚ã—ã‚Œãªã„ã®ã§æ³¨æ„ã—ã¦ãã ã•ã„。" - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "ã“ã®ãƒšãƒ¼ã‚¸ã‹ã‚‰ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã‚’検索ã§ãã¾ã™ã€‚キーワードを下ã®ãƒœãƒƒã‚¯ã‚¹ã«å…¥åŠ›ã—ã¦ã€ã€Œæ¤œç´¢ã€ã‚’クリックã—ã¦ãã ã•ã„。入力ã•ã‚ŒãŸå…¨ã¦ã®ã‚­ãƒ¼ãƒ¯ãƒ¼ãƒ‰ã‚’å«ã‚€ãƒšãƒ¼ã‚¸ãŒæ¤œç´¢ã•ã‚Œã¾ã™ã€‚一部ã®ã‚­ãƒ¼ãƒ¯ãƒ¼ãƒ‰ã—ã‹å«ã¾ãªã„ページã¯æ¤œç´¢çµæžœã«è¡¨ç¤ºã•ã‚Œãªã„ã®ã§æ³¨æ„ã—ã¦ãã ã•ã„。" - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "検索" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "検索æ¡ä»¶ã«ä¸€è‡´ã™ã‚‹é …ç›®ãŒã‚ã‚Šã¾ã›ã‚“ã§ã—ãŸã€‚" - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "ãƒãƒ¼ã‚¸ãƒ§ãƒ³ %(version)s ã®å¤‰æ›´ç‚¹ — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "ãƒãƒ¼ã‚¸ãƒ§ãƒ³ %(version)s ã®å¤‰æ›´ç‚¹ï¼ˆã“ã®ãƒªã‚¹ãƒˆã¯è‡ªå‹•ç”Ÿæˆã•ã‚Œã¦ã„ã¾ã™ï¼‰" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "ライブラリã«é–¢ã™ã‚‹å¤‰æ›´" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "C API ã«é–¢ã™ã‚‹å¤‰æ›´" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "ãã®å¤šã®å¤‰æ›´" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "nl", "plural_expr": "(n != 1)", "messages": {"module, in ": "module", "Preparing search...": "Het zoeken wordt voorbereid", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Uw zoekopdracht leverde geen resultaten op. Controleer of alle woordencorrect gespeld zijn en dat u genoeg categori\u00ebn hebt geselecteerd.", "Search finished, found %s page(s) matching the search query.": "Zoeken voltooid, %s pagina(s) gevonden.", ", in ": "", "Permalink to this headline": "Permanente link naar deze titel", "Searching": "Zoeken", "Permalink to this definition": "Permanente link naar deze definitie", "Hide Search Matches": "Zoekresultaten verbergen", "Search Results": "Zoekresultaten"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/nl/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,603 +0,0 @@ -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# FIRST AUTHOR , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-09-11 23:58+0200\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: FULL NAME \n" -"Language-Team: nl \n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d.%b.%Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Algemene index" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "Index" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Globale Module-index" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "modules" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "volgende" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "vorige" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Builtins" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Moduleniveau" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d. %B %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Index" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Module-index" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Zoekpagina" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Permanente link naar deze definitie" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Permanente link naar deze titel" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Release" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "Omgevingsvariabele; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Platform: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[afbeelding]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (geïntegreerde functie)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (in module %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (geïntegreerde variabele)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (in module %s)" - -#: sphinx/directives/desc.py:33 -#, fuzzy, python-format -msgid "%s (built-in class)" -msgstr "%s (geïntegreerde variabele)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (klasse in %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s methode)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s methode)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s statische methode)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s statische methode)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s attribuut)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s attribuut)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C-functie)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (C member)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C-macro)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C type)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C-variabele)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Veroorzaakt" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Variabele" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Returns" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Return type" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parameters" - -#: sphinx/directives/desc.py:423 -#, fuzzy, python-format -msgid "%scommand line option; %s" -msgstr "%scommandolijn optie; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Platformen: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (module)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Auteur van deze sectie: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Auteur van deze module: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Auteur: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Zie ook" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Let op!" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Pas op!" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Gevaar" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Fout" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Hint" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Belangrijk" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Notitie" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Zie Ook" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Tip" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Waarschuwing" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Nieuw in versie %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Veranderd in versie %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Verouderd sinds versie %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "module" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "trefwoord" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operator" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "object" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "foutmelding" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "statement" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "geïntegreerde functie" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Zoekresultaten verbergen" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "Zoeken" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Het zoeken wordt voorbereid" - -#: sphinx/static/searchtools.js:338 -#, fuzzy -msgid "module, in " -msgstr "module" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Zoekresultaten" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Uw zoekopdracht leverde geen resultaten op. Controleer of alle " -"woordencorrect gespeld zijn en dat u genoeg categoriën hebt geselecteerd." - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "Zoeken voltooid, %s pagina(s) gevonden." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Overzicht" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Indices en tabellen:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Volledige inhoudstafel" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "geeft alle secties en subsecties weer" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "zoeken in deze documentatie" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "sneltoegang naar alle modules" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "alle functies, klasses en begrippen" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Index – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Volledige index op een pagina" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Index pagineerd per letter" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "kan heel groot zijn" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navigatie" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Inhoudstafel" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Vorig onderwerp" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "Vorig hoofdstuk" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Volgend onderwerp" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "volgend hoofdstuk" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Deze Pagina" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Wijziging Voorstellen" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Broncode weergeven" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Snel zoeken" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Trefwoord opzoeken" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Go" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Geef de naam van een module, klasse of functie." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Zoeken in %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "Over deze documenten" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Zoeken" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Laatste aanpassing op %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Aangemaakt met Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Populairste modules:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Enkel modules weergeven die op deze platformen beschikbaar zijn" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Verouderd" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Zoeken %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Opgelet: U heeft een verouderde URL aangevraagd op deze " -"server. Wij hebben probeerd u door te verwijzen naar de nieuwe locatie " -"van deze pagina, maar dat is misschien niet gelukt." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"Hier kan u de documenten doorzoeken. Geef enkele trefwoorden\n" -" in het veld hieronder en klik \"zoeken\". Merk op dat de zoekfunctie" -"\n" -" steeds naar alle woorden zoekt. Pagina's die minder woorden bevatten" -"\n" -" zullen niet tussen de resultaten verschijnen." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "zoeken" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Uw zoekopdracht leverde geen resultaten op." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Veranderingen in versie %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Automatisch genereerde lijst van veranderingen in versie %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Veranderingen in de bibliotheek" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "Veranderingen in de C-API" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Andere veranderingen" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "pl", "plural_expr": "(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)", "messages": {"module, in ": "modu\u0142", "Preparing search...": "Przygotowanie wyszukiwania...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Nie znaleziono \u017cadnych pasuj\u0105cych dokument\u00f3w. Upewnij si\u0119, \u017ce wszystkie s\u0142owa s\u0105 poprawnie wpisane i \u017ce wybra\u0142e\u015b wystarczaj\u0105c\u0105liczb\u0119 kategorii.", "Search finished, found %s page(s) matching the search query.": "Przeszukiwanie zako\u0144czone, znaleziono %s pasuj\u0105cych stron.", ", in ": "", "Permalink to this headline": "Sta\u0142y odno\u015bnik do tego nag\u0142\u00f3wka", "Searching": "Wyszukiwanie", "Permalink to this definition": "Sta\u0142y odno\u015bnik do tej definicji", "Hide Search Matches": "Ukryj wyniki wyszukiwania", "Search Results": "Wyniki wyszukiwania"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pl/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,600 +0,0 @@ - -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-08-10 11:43+0000\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: MichaÅ‚ Kandulski \n" -"Language-Team: \n" -"Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && " -"(n%100<10 || n%100>=20) ? 1 : 2)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%b %d %Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Indeks ogólny" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "indeks" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Indeks modułów" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "moduÅ‚y" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "dalej" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "wstecz" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Wbudowane" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Poziom moduÅ‚u" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%B %d %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Indeks" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Indeks modułów" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Wyszukiwanie" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "StaÅ‚y odnoÅ›nik do tej definicji" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "StaÅ‚y odnoÅ›nik do tego nagłówka" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Wydanie" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "zmienna Å›rodowiskowa; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Platforma: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[obrazek]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (funkcja wbudowana)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (w module %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (zmienna wbudowana)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (w module %s)" - -#: sphinx/directives/desc.py:33 -#, fuzzy, python-format -msgid "%s (built-in class)" -msgstr "%s (zmienna wbudowana)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (w klasie %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s metoda)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s metoda)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s statyczna metoda)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s statyczna metoda)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s atrybut)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s atrybut)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (funkcja C)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (pole C)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (makro C)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (typ C)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (zmienna C)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Wyrzuca" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Zmienna" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Zwraca" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Typ zwracany" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parametry" - -#: sphinx/directives/desc.py:423 -#, fuzzy, python-format -msgid "%scommand line option; %s" -msgstr "%sopcja linii komend; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Platformy: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (moduÅ‚)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Autor rozdziaÅ‚u: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Autor moduÅ‚u: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Autor: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Zobacz także" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Uwaga" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Ostrożnie" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "NiebezpieczeÅ„stwo" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "BÅ‚Ä…d" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Podpowiedź" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Ważne" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Uwaga" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Zobacz także" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Wskazówka" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Ostrzeżenie" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Nowe w wersji %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Zmienione w wersji %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Niezalecane od wersji %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "moduÅ‚" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "sÅ‚owo kluczowe" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operator" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "obiekt" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "wyjÄ…tek" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "instrukcja" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "funkcja wbudowana" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Ukryj wyniki wyszukiwania" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "Wyszukiwanie" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Przygotowanie wyszukiwania..." - -#: sphinx/static/searchtools.js:338 -#, fuzzy -msgid "module, in " -msgstr "moduÅ‚" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Wyniki wyszukiwania" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Nie znaleziono żadnych pasujÄ…cych dokumentów. Upewnij siÄ™, że wszystkie " -"sÅ‚owa sÄ… poprawnie wpisane i że wybraÅ‚eÅ› wystarczajÄ…cÄ…liczbÄ™ kategorii." - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "Przeszukiwanie zakoÅ„czone, znaleziono %s pasujÄ…cych stron." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "PrzeglÄ…d" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Indeksy i tablice:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Kompletny spis treÅ›ci" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "wymieÅ„ wszystkie rozdziaÅ‚y i podrozdziaÅ‚y" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "wyszukaj w dokumentacji" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "szybki dostÄ™p do wszystkich modułów" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "wszystkie funkcje, klasy, terminy" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Indeks – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "CaÅ‚y indeks na jednej stronie" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Strony indeksu alfabetycznie" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "może być ogromny" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Nawigacja" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Spis treÅ›ci" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Poprzedni temat" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "poprzedni rozdziaÅ‚" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "NastÄ™pny temat" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "nastÄ™pny rozdziaÅ‚" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Ta strona" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Zasugeruj zmianÄ™" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Pokaż źródÅ‚o" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Szybkie wyszukiwanie" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Szukanie wg sÅ‚owa kluczowego" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Szukaj" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Wprowadź nazwÄ™ moduÅ‚u, klasy lub funkcji." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Szukaj poÅ›ród %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "O tych dokumentach" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Szukaj" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Ostatnia modyfikacja %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Utworzone przy pomocy Sphinx'a " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Najbardziej popularne moduÅ‚y:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Pokaż moduÅ‚y dostÄ™pne tylko na tych platformach" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Niezalecane" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Przeszukaj %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Uwaga: Zażądano przedawnionego URL'a z tego serwera. " -"NastÄ…piÅ‚a próba przekierowania do nowej lokalizacji, ale może ona być " -"niewÅ‚aÅ›ciwa." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"StÄ…d możesz przeszukać dokumentacjÄ™. Wprowadź szukane\n" -" sÅ‚owa w poniższym okienku i kliknij \"Szukaj\". Zwróć uwagÄ™, że\n" -" funkcja szukajÄ…ca bÄ™dzie automatycznie szukaÅ‚a wszystkich słów. " -"Strony nie zawierajÄ…ce wszystkich słów nie znajdÄ… siÄ™ na wynikowej " -"liÅ›cie." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "Szukaj" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Nie znaleziono żadnych pasujÄ…cych stron." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Zmiany w wesji %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Automatycznie wygenerowana lista zmian w wersji %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Zmiany w bibliotekach" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "Zmiany w C API" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Inne zmiany" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "pt_BR", "plural_expr": "(n > 1)", "messages": {"module, in ": "m\u00f3dulo, em ", "Preparing search...": "Preparando pesquisa...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Sua pesquisa n\u00e3o encontrou nenhum documento. Por favor assegure-se de que todas as palavras foram digitadas corretamente e de que voc\u00ea tenha selecionado o m\u00ednimo de categorias.", "Search finished, found %s page(s) matching the search query.": "Pesquisa finalizada, foram encontrada(s) %s p\u00e1gina(s) que conferem com o crit\u00e9rio de pesquisa.", ", in ": ", em ", "Permalink to this headline": "Link permanente para este t\u00edtulo", "Searching": "Pesquisando", "Permalink to this definition": "Link permanente para esta defini\u00e7\u00e3o", "Hide Search Matches": "Esconder Resultados da Pesquisa", "Search Results": "Resultados da Pesquisa"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/pt_BR/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,606 +0,0 @@ -# Portuguese (Brazil) translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# FIRST AUTHOR , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: roger.demetrescu@gmail.com\n" -"POT-Creation-Date: 2008-11-09 19:46+0100\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Roger Demetrescu \n" -"Language-Team: pt_BR \n" -"Plural-Forms: nplurals=2; plural=(n > 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d/%m/%Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "Ãndice Geral" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "índice" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "Ãndice Global de Módulos" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "módulos" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "próximo" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "anterior" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr " (em " - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Internos" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Módulo" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d/%m/%Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Ãndice" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Ãndice do Módulo" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Página de Pesquisa" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Link permanente para esta definição" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Link permanente para este título" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Versão" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "váriavel de ambiente; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Plataforma: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[imagem]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (função interna)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (no módulo %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (variável interna)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (no módulo %s)" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "%s (classe interna)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (classe em %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (método %s.%s)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (método %s)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (método estático %s.%s)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (método estático %s)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (atributo %s.%s)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (atributo %s)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (função C)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (membro C)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (macro C)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (tipo C)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (variável C)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Levanta" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Variável" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Retorna" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Tipo de retorno" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parâmetros" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "%sopção de linha de comando; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Plataformas: " - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (módulo)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Autor da seção: " - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Autor do módulo: " - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Autor: " - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Veja também" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "Por fazer" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "(A entrada original está localizada em %s, linha %d e pode ser encontrada " - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "aqui" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Atenção" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Cuidado" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Perigo" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Erro" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Dica" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Importante" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Nota" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Veja Também" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Dica" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Aviso" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Novo na versão %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Alterado na versão %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Obsoleto desde a versão %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "módulo" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "palavra-chave" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operador" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "objeto" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "exceção" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "comando" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "função interna" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Esconder Resultados da Pesquisa" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "Pesquisando" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Preparando pesquisa..." - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "módulo, em " - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr ", em " - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Resultados da Pesquisa" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Sua pesquisa não encontrou nenhum documento. Por favor assegure-se de que" -" todas as palavras foram digitadas corretamente e de que você tenha " -"selecionado o mínimo de categorias." - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "" -"Pesquisa finalizada, foram encontrada(s) %s página(s) que conferem com o " -"critério de pesquisa." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Visão geral" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Ãndices e tabelas:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Tabela de Conteúdo Completa" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "Lista todas seções e subseções" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "Pesquisar esta documentação" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "acesso rápido para todos os módulos" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "todas funções, classes, termos" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Ãndice – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Ãndice completo em uma página" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Paginas de índice por letra" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "pode ser enorme" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navegação" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Tabela de Conteúdo" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "Tópico anterior" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "capítulo anterior" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Próximo tópico" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "próximo capítulo" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Esta Página" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Sugerir Alteração" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Exibir Fonte" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Pesquisa rápida" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Pesquisa de palavras-chave" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Ir" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Informe o nome de um módulo, classe ou função." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "Pesquisar dentro de %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "Sobre estes documentos" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "Pesquisar" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Copyright" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Copyright %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "Última atualização em %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Criado com Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Módulos mais populares:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Exibir somente módulos disponíveis nestas plataformas" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Obsoleto" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "Pesquisar em %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Nota: Você requisitou uma URL desatualizada deste " -"servidor. Tentamos redirecioná-lo para um novo endereço desta página, " -"porém é possível que o mesmo não seja o correto." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"A partir daqui você pode pesquisar estes documentos. Preencha suas \n" -" palavras de pesquisa na caixa abaixo e clique em \"pesquisar\". " -"Observe que a função de pesquisa\n" -" irá pesquisar automaticamente por todas as palavras.\n" -" Páginas contendo menos palavras não irão aparecer na lista de " -"resultado." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "pesquisar" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "Sua pesquisa não encontrou nenhum resultado." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Alterações na Versão%(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Lista de alterações na versão %(version)s gerada automaticamente" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Alterações na biblioteca" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "Alterações na API C" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Outras alterações" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "sl", "plural_expr": "0", "messages": {"module, in ": "modul, v ", "Preparing search...": "Pripravljam iskanje...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Za va\u0161e iskanje ni rezultatov. Prosimo preglejte ali so vse besede pravilno \u010drkovane in ali ste izbrali dovolj kategorij.", "Search finished, found %s page(s) matching the search query.": "Iskanje kon\u010dano, najdeno %s strani, ki ustrezajo iskalnemu nizu.", ", in ": ", v ", "Permalink to this headline": "Povezava na naslov", "Searching": "I\u0161\u010dem", "Permalink to this definition": "Povezava na to definicijo", "Hide Search Matches": "Skrij Resultate Iskanja", "Search Results": "Rezultati Iskanja"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sl/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,597 +0,0 @@ - -msgid "" -msgstr "" -"Project-Id-Version: Sphinx\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-09-11 23:58+0200\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Rok Garbas \n" -"Language-Team: Rok Garbas \n" -"Plural-Forms: nplurals=1; plural=0\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%d %b, %Y" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "SploÅ¡ni abecedni seznam" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "abecedni seznam" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "SploÅ¡en Seznam Modulov" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "Moduli" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "naprej" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "nazaj" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "(v " - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "Vgrajeni deli" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "Nivo modula" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%d %B, %Y" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "Abecedni seznam" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "Seznam modulov" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "Iskalna stran" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "Povezava na to definicijo" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "Povezava na naslov" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "Izdaja" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "globalna spremenljivka; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python Enhancement Proposals!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "Platforma: %s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[slika]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (vgrajene funkcije)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (v modulu %s)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (vgrajene spremenljivke)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s (v modulu %s)" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "%s (vgrajen razred)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "%s (razred v %s)" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s metoda)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s metoda)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s statiÄna metoda)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s statiÄna metoda)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s atribut)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s atribut)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C funkcija)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (C Älan)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C makro)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C tip)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C spremenljivka)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "Javi" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "Spremenljivka" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "Vrne" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "Vrne tip" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "Parametri" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "%sopcija komandne linije; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "Platforma:" - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (modul)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Avtor sekcije:" - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "Avtor modula:" - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "Avtor:" - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "Poglej tudi" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "Naredi" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "(Originalen vnos se nahajana v %s, vrstica %d in jo je moÄ poiskati " - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "tukaj" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "Pozor" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "Previdno" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "Navarno" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "Napaka" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "Nasvet" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "Pomembno" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "Opomba" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "Poglej Tudi" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "Nasvet" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "Opozorilo" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "Novo v verziji %s" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "Spemenjeno v verziji %s" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "Zastarelo od verzije %s" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "modul" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "klluÄna beseda" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "operator" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "objekt" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "izjema" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "izjava" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "vgrajene funkcije" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "Skrij Resultate Iskanja" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "IÅ¡Äem" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "Pripravljam iskanje..." - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "modul, v " - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr ", v " - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "Rezultati Iskanja" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" -"Za vaÅ¡e iskanje ni rezultatov. Prosimo preglejte ali so vse besede " -"pravilno Ärkovane in ali ste izbrali dovolj kategorij." - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "Iskanje konÄano, najdeno %s strani, ki ustrezajo iskalnemu nizu." - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "Pregled" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "Kazalo in tabele:" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "Popoln Seznam Vsebine" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "prikazi vse sekcije in podsekcije" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "isÄi po dokumentaciji" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "hiter dostop do vseh modulov" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "vse funkcije, rezredi, termini" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "Seznam – %(key)s" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "Poln indeks na eni strani" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "Indeksiraj strani po Ärki" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "lahko je veliko" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "Navigacija" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "Seznam Vsebine" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "PrejÅ¡nja tema" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "prejÅ¡nje poglavje" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "Naslednja tema" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "naslednje poglavje" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "Ta stran" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "Predlagaj spremembo" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "Prikaži izvorno kodo" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "Hitro iskanje" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "Iskanje po kljuÄniih besedah" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "Potrdi" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "Vnesi ime mudla, razreda ali funkcije." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "IÅ¡Äi med %(docstitle)s" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "O teh dokumentih" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "IÅ¡Äi" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "Vse pravice pridržane" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Vse pravice pridržane %(copyright)s." - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "© Vse pravice pridržane %(copyright)s." - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "ZadnjiÄ posodobljeno na %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" -"Narejeno s Sphinx " -"%(sphinx_version)s." - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "Najbolj popularni moduli:" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "Prikaži module na razpolago na platformah" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "Zastarelo" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "IÅ¡Äi %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" -"Opomba: VaÅ¡ zahtevek za URL s tega streznika je " -"zastaral. PoskuÅ¡ali smo vas preusmeriti na novo lokacijo, vendar utegne " -"biti napaÄna." - -#: sphinx/templates/search.html:7 -#, fuzzy -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" -"O tukaj lahko isÄete dokumente. Vnesite iskalni\n" -" niz v polje spodaj in pritisnite \"iÅ¡Äi\". Sproženo iskanje\n" -" bo iskalo po vseh besedah v iskalnem nizu. Strani, ki ne\n" -" vsebujejo vseh besed ne bodo prikazane na seznamu rezultatov." - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "iÅ¡Äi" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "VaÅ¡e iskanje ni imelo nobenega zadetka." - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "Spremembe v Verziji %(version)s — %(docstitle)s" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "%(filename)s — %(docstitle)s" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "Avtomatsko generiran seznam sprememb v verziji %(version)s" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "Spremembe knjižnice" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "C API spremembe" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "Ostale spremembe" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sphinx.pot --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/sphinx.pot Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,589 +0,0 @@ -# Translations template for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# FIRST AUTHOR , 2008. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-11-27 18:39+0100\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "" - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "" - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "" - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "" - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "" - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "" - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "" - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "" - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "" - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "" - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "" - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "" - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "" - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" - -#: sphinx/templates/search.html:7 -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "" - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -Documentation.addTranslations({"locale": "zh_TW", "plural_expr": "0", "messages": {"module, in ": "", "Preparing search...": "\u6e96\u5099\u641c\u5c0b...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "", "Search finished, found %s page(s) matching the search query.": "", ", in ": "", "Permalink to this headline": "", "Searching": "\u641c\u5c0b\u4e2d", "Permalink to this definition": "", "Hide Search Matches": "", "Search Results": "\u641c\u5c0b\u7d50\u679c"}}); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.mo Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.mo has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.po --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/locale/zh_TW/LC_MESSAGES/sphinx.po Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,598 +0,0 @@ -# Chinese (Taiwan) translations for Sphinx. -# Copyright (C) 2008 ORGANIZATION -# This file is distributed under the same license as the Sphinx project. -# Fred Lin , 2008. -# -msgid "" -msgstr "" -"Project-Id-Version: Sphinx 0.5\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2008-11-09 19:46+0100\n" -"PO-Revision-Date: 2008-11-27 18:40+0100\n" -"Last-Translator: Fred Lin \n" -"Language-Team: tw \n" -"Plural-Forms: nplurals=1; plural=0\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: sphinx/builder.py:408 -#, python-format -msgid "%b %d, %Y" -msgstr "%Y å¹´ %m 月 %d æ—¥" - -#: sphinx/builder.py:427 sphinx/templates/defindex.html:21 -msgid "General Index" -msgstr "總索引" - -#: sphinx/builder.py:427 -msgid "index" -msgstr "索引" - -#: sphinx/builder.py:429 sphinx/htmlhelp.py:156 -#: sphinx/templates/defindex.html:19 sphinx/templates/modindex.html:2 -#: sphinx/templates/modindex.html:13 -msgid "Global Module Index" -msgstr "" - -#: sphinx/builder.py:429 -msgid "modules" -msgstr "模組" - -#: sphinx/builder.py:466 -msgid "next" -msgstr "下一é " - -#: sphinx/builder.py:473 -msgid "previous" -msgstr "上一é " - -#: sphinx/builder.py:1054 -msgid " (in " -msgstr "" - -#: sphinx/builder.py:1129 -msgid "Builtins" -msgstr "" - -#: sphinx/builder.py:1131 -msgid "Module level" -msgstr "" - -#: sphinx/environment.py:102 sphinx/latexwriter.py:169 -#, python-format -msgid "%B %d, %Y" -msgstr "%Y å¹´ %m 月 %d æ—¥" - -#: sphinx/environment.py:291 sphinx/latexwriter.py:175 -#: sphinx/templates/genindex-single.html:2 -#: sphinx/templates/genindex-split.html:2 -#: sphinx/templates/genindex-split.html:5 sphinx/templates/genindex.html:2 -#: sphinx/templates/genindex.html:5 sphinx/templates/genindex.html:48 -#: sphinx/templates/layout.html:130 -msgid "Index" -msgstr "索引" - -#: sphinx/environment.py:292 sphinx/latexwriter.py:174 -msgid "Module Index" -msgstr "模組索引" - -#: sphinx/environment.py:293 sphinx/templates/defindex.html:16 -msgid "Search Page" -msgstr "æœå°‹é é¢" - -#: sphinx/htmlwriter.py:79 sphinx/static/doctools.js:145 -msgid "Permalink to this definition" -msgstr "" - -#: sphinx/htmlwriter.py:399 sphinx/static/doctools.js:139 -msgid "Permalink to this headline" -msgstr "" - -#: sphinx/latexwriter.py:172 -msgid "Release" -msgstr "釋出" - -#: sphinx/roles.py:53 sphinx/directives/desc.py:537 -#, python-format -msgid "environment variable; %s" -msgstr "環境變數; %s" - -#: sphinx/roles.py:60 -#, python-format -msgid "Python Enhancement Proposals!PEP %s" -msgstr "Python 建議文件!PEP %s" - -#: sphinx/textwriter.py:166 -#, python-format -msgid "Platform: %s" -msgstr "å¹³å°ï¼š%s" - -#: sphinx/textwriter.py:422 -msgid "[image]" -msgstr "[圖片]" - -#: sphinx/directives/desc.py:25 -#, python-format -msgid "%s() (built-in function)" -msgstr "%s() (內建函å¼)" - -#: sphinx/directives/desc.py:26 sphinx/directives/desc.py:42 -#: sphinx/directives/desc.py:54 -#, python-format -msgid "%s() (in module %s)" -msgstr "%s() (在 %s 模組中)" - -#: sphinx/directives/desc.py:29 -#, python-format -msgid "%s (built-in variable)" -msgstr "%s (內建變數)" - -#: sphinx/directives/desc.py:30 sphinx/directives/desc.py:66 -#, python-format -msgid "%s (in module %s)" -msgstr "%s() (在 %s 模組中)" - -#: sphinx/directives/desc.py:33 -#, python-format -msgid "%s (built-in class)" -msgstr "%s (內建類別)" - -#: sphinx/directives/desc.py:34 -#, python-format -msgid "%s (class in %s)" -msgstr "" - -#: sphinx/directives/desc.py:46 -#, python-format -msgid "%s() (%s.%s method)" -msgstr "%s() (%s.%s 方法)" - -#: sphinx/directives/desc.py:48 -#, python-format -msgid "%s() (%s method)" -msgstr "%s() (%s 方法)" - -#: sphinx/directives/desc.py:58 -#, python-format -msgid "%s() (%s.%s static method)" -msgstr "%s() (%s.%s éœæ…‹æ–¹æ³•)" - -#: sphinx/directives/desc.py:60 -#, python-format -msgid "%s() (%s static method)" -msgstr "%s() (%s éœæ…‹æ–¹æ³•)" - -#: sphinx/directives/desc.py:70 -#, python-format -msgid "%s (%s.%s attribute)" -msgstr "%s (%s.%s 屬性)" - -#: sphinx/directives/desc.py:72 -#, python-format -msgid "%s (%s attribute)" -msgstr "%s (%s 屬性)" - -#: sphinx/directives/desc.py:74 -#, python-format -msgid "%s (C function)" -msgstr "%s (C 函å¼)" - -#: sphinx/directives/desc.py:76 -#, python-format -msgid "%s (C member)" -msgstr "%s (C æˆå“¡)" - -#: sphinx/directives/desc.py:78 -#, python-format -msgid "%s (C macro)" -msgstr "%s (C 巨集)" - -#: sphinx/directives/desc.py:80 -#, python-format -msgid "%s (C type)" -msgstr "%s (C 類別)" - -#: sphinx/directives/desc.py:82 -#, python-format -msgid "%s (C variable)" -msgstr "%s (C 變數)" - -#: sphinx/directives/desc.py:100 -msgid "Raises" -msgstr "" - -#: sphinx/directives/desc.py:104 -msgid "Variable" -msgstr "變數" - -#: sphinx/directives/desc.py:107 -msgid "Returns" -msgstr "返回" - -#: sphinx/directives/desc.py:116 -msgid "Return type" -msgstr "返回類別" - -#: sphinx/directives/desc.py:143 -msgid "Parameters" -msgstr "åƒæ•¸" - -#: sphinx/directives/desc.py:423 -#, python-format -msgid "%scommand line option; %s" -msgstr "%s命令列é¸é …; %s" - -#: sphinx/directives/other.py:101 -msgid "Platforms: " -msgstr "å¹³å°" - -#: sphinx/directives/other.py:106 -#, python-format -msgid "%s (module)" -msgstr "%s (模組)" - -#: sphinx/directives/other.py:146 -msgid "Section author: " -msgstr "Section 作者:" - -#: sphinx/directives/other.py:148 -msgid "Module author: " -msgstr "模組作者:" - -#: sphinx/directives/other.py:150 -msgid "Author: " -msgstr "作者:" - -#: sphinx/directives/other.py:246 -msgid "See also" -msgstr "" - -#: sphinx/ext/todo.py:31 -msgid "Todo" -msgstr "待辦" - -#: sphinx/ext/todo.py:75 -#, python-format -msgid "(The original entry is located in %s, line %d and can be found " -msgstr "" - -#: sphinx/ext/todo.py:81 -msgid "here" -msgstr "" - -#: sphinx/locale/__init__.py:15 -msgid "Attention" -msgstr "注æ„" - -#: sphinx/locale/__init__.py:16 -msgid "Caution" -msgstr "警示" - -#: sphinx/locale/__init__.py:17 -msgid "Danger" -msgstr "å±éšª" - -#: sphinx/locale/__init__.py:18 -msgid "Error" -msgstr "錯誤" - -#: sphinx/locale/__init__.py:19 -msgid "Hint" -msgstr "æ示" - -#: sphinx/locale/__init__.py:20 -msgid "Important" -msgstr "é‡è¦" - -#: sphinx/locale/__init__.py:21 -msgid "Note" -msgstr "註解" - -#: sphinx/locale/__init__.py:22 -msgid "See Also" -msgstr "" - -#: sphinx/locale/__init__.py:23 -msgid "Tip" -msgstr "å°æŠ€å·§" - -#: sphinx/locale/__init__.py:24 -msgid "Warning" -msgstr "警告" - -#: sphinx/locale/__init__.py:28 -#, python-format -msgid "New in version %s" -msgstr "%s 版新功能" - -#: sphinx/locale/__init__.py:29 -#, python-format -msgid "Changed in version %s" -msgstr "在 %s 版改變" - -#: sphinx/locale/__init__.py:30 -#, python-format -msgid "Deprecated since version %s" -msgstr "%s 版後已移除" - -#: sphinx/locale/__init__.py:34 -msgid "module" -msgstr "模組" - -#: sphinx/locale/__init__.py:35 -msgid "keyword" -msgstr "é—œéµå­—" - -#: sphinx/locale/__init__.py:36 -msgid "operator" -msgstr "é‹ç®—å­" - -#: sphinx/locale/__init__.py:37 -msgid "object" -msgstr "物件" - -#: sphinx/locale/__init__.py:38 -msgid "exception" -msgstr "例外" - -#: sphinx/locale/__init__.py:39 -msgid "statement" -msgstr "" - -#: sphinx/locale/__init__.py:40 -msgid "built-in function" -msgstr "內建函å¼" - -#: sphinx/static/doctools.js:174 -msgid "Hide Search Matches" -msgstr "" - -#: sphinx/static/searchtools.js:274 -msgid "Searching" -msgstr "æœå°‹ä¸­" - -#: sphinx/static/searchtools.js:279 -msgid "Preparing search..." -msgstr "準備æœå°‹..." - -#: sphinx/static/searchtools.js:338 -msgid "module, in " -msgstr "" - -#: sphinx/static/searchtools.js:347 -msgid ", in " -msgstr "" - -#: sphinx/static/searchtools.js:447 sphinx/templates/search.html:18 -msgid "Search Results" -msgstr "æœå°‹çµæžœ" - -#: sphinx/static/searchtools.js:449 -msgid "" -"Your search did not match any documents. Please make sure that all words " -"are spelled correctly and that you've selected enough categories." -msgstr "" - -#: sphinx/static/searchtools.js:451 -#, python-format -msgid "Search finished, found %s page(s) matching the search query." -msgstr "" - -#: sphinx/templates/defindex.html:2 -msgid "Overview" -msgstr "" - -#: sphinx/templates/defindex.html:11 -msgid "Indices and tables:" -msgstr "" - -#: sphinx/templates/defindex.html:14 -msgid "Complete Table of Contents" -msgstr "" - -#: sphinx/templates/defindex.html:15 -msgid "lists all sections and subsections" -msgstr "" - -#: sphinx/templates/defindex.html:17 -msgid "search this documentation" -msgstr "" - -#: sphinx/templates/defindex.html:20 -msgid "quick access to all modules" -msgstr "" - -#: sphinx/templates/defindex.html:22 -msgid "all functions, classes, terms" -msgstr "" - -#: sphinx/templates/genindex-single.html:5 -#, python-format -msgid "Index – %(key)s" -msgstr "" - -#: sphinx/templates/genindex-single.html:44 -#: sphinx/templates/genindex-split.html:14 -#: sphinx/templates/genindex-split.html:27 sphinx/templates/genindex.html:54 -msgid "Full index on one page" -msgstr "" - -#: sphinx/templates/genindex-split.html:7 -msgid "Index pages by letter" -msgstr "" - -#: sphinx/templates/genindex-split.html:15 -msgid "can be huge" -msgstr "" - -#: sphinx/templates/layout.html:9 -msgid "Navigation" -msgstr "ç€è¦½" - -#: sphinx/templates/layout.html:40 -msgid "Table Of Contents" -msgstr "內容目錄" - -#: sphinx/templates/layout.html:46 -msgid "Previous topic" -msgstr "上一個主題" - -#: sphinx/templates/layout.html:47 -msgid "previous chapter" -msgstr "上一章" - -#: sphinx/templates/layout.html:50 -msgid "Next topic" -msgstr "下一個主題" - -#: sphinx/templates/layout.html:51 -msgid "next chapter" -msgstr "下一章" - -#: sphinx/templates/layout.html:55 -msgid "This Page" -msgstr "本é " - -#: sphinx/templates/layout.html:59 -msgid "Suggest Change" -msgstr "" - -#: sphinx/templates/layout.html:60 sphinx/templates/layout.html:62 -msgid "Show Source" -msgstr "顯示原始碼" - -#: sphinx/templates/layout.html:71 -msgid "Quick search" -msgstr "快速æœå°‹" - -#: sphinx/templates/layout.html:71 -msgid "Keyword search" -msgstr "é—œéµå­—æœå°‹" - -#: sphinx/templates/layout.html:73 -msgid "Go" -msgstr "" - -#: sphinx/templates/layout.html:78 -msgid "Enter a module, class or function name." -msgstr "輸入一個模組ã€é¡žåˆ¥ã€æˆ–是函å¼å稱." - -#: sphinx/templates/layout.html:119 -#, python-format -msgid "Search within %(docstitle)s" -msgstr "在 %(docstitle)s 中æœå°‹" - -#: sphinx/templates/layout.html:128 -msgid "About these documents" -msgstr "" - -#: sphinx/templates/layout.html:131 sphinx/templates/search.html:2 -#: sphinx/templates/search.html:5 -msgid "Search" -msgstr "æœå°‹" - -#: sphinx/templates/layout.html:133 -msgid "Copyright" -msgstr "版權所有" - -#: sphinx/templates/layout.html:178 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "" - -#: sphinx/templates/layout.html:180 -#, python-format -msgid "© Copyright %(copyright)s." -msgstr "" - -#: sphinx/templates/layout.html:183 -#, python-format -msgid "Last updated on %(last_updated)s." -msgstr "最後更新日期是 %(last_updated)s." - -#: sphinx/templates/layout.html:186 -#, python-format -msgid "" -"Created using Sphinx " -"%(sphinx_version)s." -msgstr "" - -#: sphinx/templates/modindex.html:15 -msgid "Most popular modules:" -msgstr "" - -#: sphinx/templates/modindex.html:24 -msgid "Show modules only available on these platforms" -msgstr "" - -#: sphinx/templates/modindex.html:56 -msgid "Deprecated" -msgstr "已移除" - -#: sphinx/templates/opensearch.xml:4 -#, python-format -msgid "Search %(docstitle)s" -msgstr "æœå°‹ %(docstitle)s" - -#: sphinx/templates/page.html:8 -msgid "" -"Note: You requested an out-of-date URL from this server." -" We've tried to redirect you to the new location of this page, but it may" -" not be the right one." -msgstr "" - -#: sphinx/templates/search.html:7 -msgid "" -"From here you can search these documents. Enter your search\n" -" words into the box below and click \"search\". Note that the search\n" -" function will automatically search for all of the words. Pages\n" -" containing fewer words won't appear in the result list." -msgstr "" - -#: sphinx/templates/search.html:14 -msgid "search" -msgstr "æœå°‹" - -#: sphinx/templates/search.html:20 -msgid "Your search did not match any results." -msgstr "" - -#: sphinx/templates/changes/frameset.html:5 -#: sphinx/templates/changes/versionchanges.html:12 -#, python-format -msgid "Changes in Version %(version)s — %(docstitle)s" -msgstr "" - -#: sphinx/templates/changes/rstsource.html:5 -#, python-format -msgid "%(filename)s — %(docstitle)s" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:17 -#, python-format -msgid "Automatically generated list of changes in version %(version)s" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:18 -msgid "Library changes" -msgstr "" - -#: sphinx/templates/changes/versionchanges.html:23 -msgid "C API changes" -msgstr "C API 改變" - -#: sphinx/templates/changes/versionchanges.html:25 -msgid "Other changes" -msgstr "其他改變:" - -#~ msgid "" -#~ "From here you can search these documents. Enter your search\n" -#~ " words into the box below and" -#~ " click \"search\". Note that the " -#~ "search\n" -#~ " function will automatically search for all of the words. Pages\n" -#~ " containing less words won't appear in the result list." -#~ msgstr "" - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/quickstart.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/quickstart.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,533 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.quickstart - ~~~~~~~~~~~~~~~~~ - - Quickly setup documentation source to work with Sphinx. - - :copyright: 2008 by Georg Brandl. - :license: BSD. -""" - -import sys, os, time -from os import path - -TERM_ENCODING = getattr(sys.stdin, 'encoding', None) - -from sphinx.util import make_filename -from sphinx.util.console import purple, bold, red, turquoise, nocolor, color_terminal -from sphinx.util.texescape import tex_escape_map - - -PROMPT_PREFIX = '> ' - -QUICKSTART_CONF = '''\ -# -*- coding: utf-8 -*- -# -# %(project)s documentation build configuration file, created by -# sphinx-quickstart on %(now)s. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# The contents of this file are pickled, so don't put values in the namespace -# that aren't pickleable (module imports are okay, they're removed automatically). -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -# If your extensions are in another directory, add it here. If the directory -# is relative to the documentation root, use os.path.abspath to make it -# absolute, like shown here. -#sys.path.append(os.path.abspath('.')) - -# General configuration -# --------------------- - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [%(extensions)s] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['%(dot)stemplates'] - -# The suffix of source filenames. -source_suffix = '%(suffix)s' - -# The encoding of source files. -#source_encoding = 'utf-8' - -# The master toctree document. -master_doc = '%(master)s' - -# General information about the project. -project = u'%(project)s' -copyright = u'%(copyright)s' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '%(version)s' -# The full version, including alpha/beta/rc tags. -release = '%(release)s' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%%B %%d, %%Y' - -# List of documents that shouldn't be included in the build. -#unused_docs = [] - -# List of directories, relative to source directory, that shouldn't be searched -# for source files. -exclude_trees = [%(exclude_trees)s] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - - -# Options for HTML output -# ----------------------- - -# The style sheet to use for HTML and HTML Help pages. A file of that name -# must exist either in Sphinx' static/ path, or in one of the custom paths -# given in html_static_path. -html_style = 'default.css' - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['%(dot)sstatic'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%%b %%d, %%Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_use_modindex = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, the reST sources are included in the HTML build as _sources/. -#html_copy_source = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = '%(project_fn)sdoc' - - -# Options for LaTeX output -# ------------------------ - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, document class [howto/manual]). -latex_documents = [ - ('%(master)s', '%(project_fn)s.tex', ur'%(project_doc_texescaped)s', - ur'%(author_texescaped)s', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_use_modindex = True -''' - -INTERSPHINX_CONFIG = ''' - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/dev': None} -''' - -MASTER_FILE = '''\ -.. %(project)s documentation master file, created by sphinx-quickstart on %(now)s. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to %(project)s's documentation! -===========%(underline)s================= - -Contents: - -.. toctree:: - :maxdepth: 2 - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - -''' - -MAKEFILE = '''\ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d %(rbuilddir)s/doctrees $(PAPEROPT_$(PAPER)) \ -$(SPHINXOPTS) %(rsrcdir)s - -.PHONY: help clean html web pickle htmlhelp latex changes linkcheck - -help: -\t@echo "Please use \\`make ' where is one of" -\t@echo " html to make standalone HTML files" -\t@echo " pickle to make pickle files" -\t@echo " json to make JSON files" -\t@echo " htmlhelp to make HTML files and a HTML help project" -\t@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" -\t@echo " changes to make an overview over all changed/added/deprecated items" -\t@echo " linkcheck to check all external links for integrity" - -clean: -\t-rm -rf %(rbuilddir)s/* - -html: -\tmkdir -p %(rbuilddir)s/html %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) %(rbuilddir)s/html -\t@echo -\t@echo "Build finished. The HTML pages are in %(rbuilddir)s/html." - -pickle: -\tmkdir -p %(rbuilddir)s/pickle %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) %(rbuilddir)s/pickle -\t@echo -\t@echo "Build finished; now you can process the pickle files." - -web: pickle - -json: -\tmkdir -p %(rbuilddir)s/json %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) %(rbuilddir)s/json -\t@echo -\t@echo "Build finished; now you can process the JSON files." - -htmlhelp: -\tmkdir -p %(rbuilddir)s/htmlhelp %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) %(rbuilddir)s/htmlhelp -\t@echo -\t@echo "Build finished; now you can run HTML Help Workshop with the" \\ -\t ".hhp project file in %(rbuilddir)s/htmlhelp." - -latex: -\tmkdir -p %(rbuilddir)s/latex %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) %(rbuilddir)s/latex -\t@echo -\t@echo "Build finished; the LaTeX files are in %(rbuilddir)s/latex." -\t@echo "Run \\`make all-pdf' or \\`make all-ps' in that directory to" \\ -\t "run these through (pdf)latex." - -changes: -\tmkdir -p %(rbuilddir)s/changes %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) %(rbuilddir)s/changes -\t@echo -\t@echo "The overview file is in %(rbuilddir)s/changes." - -linkcheck: -\tmkdir -p %(rbuilddir)s/linkcheck %(rbuilddir)s/doctrees -\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) %(rbuilddir)s/linkcheck -\t@echo -\t@echo "Link check complete; look for any errors in the above output " \\ -\t "or in %(rbuilddir)s/linkcheck/output.txt." -''' - - -def mkdir_p(dir): - if path.isdir(dir): - return - os.makedirs(dir) - - -def is_path(x): - """Please enter a valid path name.""" - return path.isdir(x) or not path.exists(x) - -def nonempty(x): - """Please enter some text.""" - return len(x) - -def choice(*l): - def val(x): - return x in l - val.__doc__ = 'Please enter one of %s.' % ', '.join(l) - return val - -def boolean(x): - """Please enter either 'y' or 'n'.""" - return x.upper() in ('Y', 'YES', 'N', 'NO') - -def suffix(x): - """Please enter a file suffix, e.g. '.rst' or '.txt'.""" - return x[0:1] == '.' and len(x) > 1 - -def ok(x): - return True - - -def do_prompt(d, key, text, default=None, validator=nonempty): - while True: - if default: - prompt = purple(PROMPT_PREFIX + '%s [%s]: ' % (text, default)) - else: - prompt = purple(PROMPT_PREFIX + text + ': ') - x = raw_input(prompt) - if default and not x: - x = default - if x.decode('ascii', 'replace').encode('ascii', 'replace') != x: - if TERM_ENCODING: - x = x.decode(TERM_ENCODING) - else: - print turquoise('* Note: non-ASCII characters entered and terminal ' - 'encoding unknown -- assuming UTF-8 or Latin-1.') - try: - x = x.decode('utf-8') - except UnicodeDecodeError: - x = x.decode('latin1') - if validator and not validator(x): - print red('* ' + validator.__doc__) - continue - break - d[key] = x - - -def inner_main(args): - d = {} - - if not sys.stdout.isatty() or not color_terminal(): - nocolor() - - print bold('Welcome to the Sphinx quickstart utility.') - print ''' -Please enter values for the following settings (just press Enter to -accept a default value, if one is given in brackets).''' - - print ''' -Enter the root path for documentation.''' - do_prompt(d, 'path', 'Root path for the documentation', '.', is_path) - - while path.isfile(path.join(d['path'], 'conf.py')) or \ - path.isfile(path.join(d['path'], 'source', 'conf.py')): - print - print bold('Error: an existing conf.py has been found in the ' - 'selected root path.') - print 'sphinx-quickstart will not overwrite existing Sphinx projects.' - print - do_prompt(d, 'path', 'Please enter a new root path (or just Enter to exit)', - '', is_path) - if not d['path']: - sys.exit(1) - - print ''' -You have two options for placing the build directory for Sphinx output. -Either, you use a directory ".build" within the root path, or you separate -"source" and "build" directories within the root path.''' - do_prompt(d, 'sep', 'Separate source and build directories (y/N)', 'n', - boolean) - print ''' -Inside the root directory, two more directories will be created; ".templates" -for custom HTML templates and ".static" for custom stylesheets and other -static files. Since the leading dot may be inconvenient for Windows users, -you can enter another prefix (such as "_") to replace the dot.''' - do_prompt(d, 'dot', 'Name prefix for templates and static dir', '.', ok) - - print ''' -The project name will occur in several places in the built documentation.''' - do_prompt(d, 'project', 'Project name') - do_prompt(d, 'author', 'Author name(s)') - print ''' -Sphinx has the notion of a "version" and a "release" for the -software. Each version can have multiple releases. For example, for -Python the version is something like 2.5 or 3.0, while the release is -something like 2.5.1 or 3.0a1. If you don't need this dual structure, -just set both to the same value.''' - do_prompt(d, 'version', 'Project version') - do_prompt(d, 'release', 'Project release', d['version']) - print ''' -The file name suffix for source files. Commonly, this is either ".txt" -or ".rst". Only files with this suffix are considered documents.''' - do_prompt(d, 'suffix', 'Source file suffix', '.rst', suffix) - print ''' -One document is special in that it is considered the top node of the -"contents tree", that is, it is the root of the hierarchical structure -of the documents. Normally, this is "index", but if your "index" -document is a custom template, you can also set this to another filename.''' - do_prompt(d, 'master', 'Name of your master document (without suffix)', - 'index') - print ''' -Please indicate if you want to use one of the following Sphinx extensions:''' - do_prompt(d, 'ext_autodoc', 'autodoc: automatically insert docstrings ' - 'from modules (y/N)', 'n', boolean) - do_prompt(d, 'ext_doctest', 'doctest: automatically test code snippets ' - 'in doctest blocks (y/N)', 'n', boolean) - do_prompt(d, 'ext_intersphinx', 'intersphinx: link between Sphinx documentation ' - 'of different projects (y/N)', 'n', boolean) - print ''' -If you are under Unix, a Makefile can be generated for you so that you -only have to run e.g. `make html' instead of invoking sphinx-build -directly.''' - do_prompt(d, 'makefile', 'Create Makefile? (Y/n)', - os.name == 'posix' and 'y' or 'n', boolean) - - d['project_fn'] = make_filename(d['project']) - d['now'] = time.asctime() - d['underline'] = len(d['project']) * '=' - d['extensions'] = ', '.join( - repr('sphinx.ext.' + name) for name in ('autodoc', 'doctest', 'intersphinx') - if d['ext_' + name].upper() in ('Y', 'YES')) - d['copyright'] = time.strftime('%Y') + ', ' + d['author'] - d['author_texescaped'] = unicode(d['author']).translate(tex_escape_map) - d['project_doc'] = d['project'] + ' Documentation' - d['project_doc_texescaped'] = \ - unicode(d['project'] + ' Documentation').translate(tex_escape_map) - - if not path.isdir(d['path']): - mkdir_p(d['path']) - - separate = d['sep'].upper() in ('Y', 'YES') - srcdir = separate and path.join(d['path'], 'source') or d['path'] - - mkdir_p(srcdir) - if separate: - builddir = path.join(d['path'], 'build') - d['exclude_trees'] = '' - else: - builddir = path.join(srcdir, d['dot'] + 'build') - d['exclude_trees'] = repr(d['dot'] + 'build') - mkdir_p(builddir) - mkdir_p(path.join(srcdir, d['dot'] + 'templates')) - mkdir_p(path.join(srcdir, d['dot'] + 'static')) - - conf_text = QUICKSTART_CONF % d - if d['ext_intersphinx'].upper() in ('Y', 'YES'): - conf_text += INTERSPHINX_CONFIG - - f = open(path.join(srcdir, 'conf.py'), 'w') - f.write(conf_text.encode('utf-8')) - f.close() - - masterfile = path.join(srcdir, d['master'] + d['suffix']) - f = open(masterfile, 'w') - f.write((MASTER_FILE % d).encode('utf-8')) - f.close() - - create_makefile = d['makefile'].upper() in ('Y', 'YES') - if create_makefile: - d['rsrcdir'] = separate and 'source' or '.' - d['rbuilddir'] = separate and 'build' or d['dot'] + 'build' - f = open(path.join(d['path'], 'Makefile'), 'w') - f.write((MAKEFILE % d).encode('utf-8')) - f.close() - - print - print bold('Finished: An initial directory structure has been created.') - print ''' -You should now populate your master file %s and create other documentation -source files. ''' % masterfile + (create_makefile and '''\ -Use the Makefile to build the docs, like so: - make builder -''' or '''\ -Use the sphinx-build command to build the docs, like so: - sphinx-build -b builder %s %s -''' % (srcdir, builddir)) + '''\ -where "builder" is one of the supported builders, e.g. html, latex or linkcheck. -''' - - -def main(argv=sys.argv): - try: - return inner_main(argv) - except (KeyboardInterrupt, EOFError): - print - print '[Interrupted.]' - return - diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/roles.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/roles.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,245 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.roles - ~~~~~~~~~~~~ - - Handlers for additional ReST roles. - - :copyright: 2007-2008 by Georg Brandl. - :license: BSD. -""" - -import re - -from docutils import nodes, utils -from docutils.parsers.rst import roles - -from sphinx import addnodes -from sphinx.util import ws_re, caption_ref_re - - -generic_docroles = { - 'command' : nodes.strong, - 'dfn' : nodes.emphasis, - 'guilabel' : nodes.strong, - 'kbd' : nodes.literal, - 'mailheader' : addnodes.literal_emphasis, - 'makevar' : nodes.Text, - 'manpage' : addnodes.literal_emphasis, - 'mimetype' : addnodes.literal_emphasis, - 'newsgroup' : addnodes.literal_emphasis, - 'program' : nodes.strong, - 'regexp' : nodes.literal, -} - -for rolename, nodeclass in generic_docroles.iteritems(): - roles.register_generic_role(rolename, nodeclass) - - -def indexmarkup_role(typ, rawtext, etext, lineno, inliner, options={}, content=[]): - env = inliner.document.settings.env - if not typ: - typ = env.config.default_role - else: - typ = typ.lower() - text = utils.unescape(etext) - targetid = 'index-%s' % env.index_num - env.index_num += 1 - indexnode = addnodes.index() - targetnode = nodes.target('', '', ids=[targetid]) - inliner.document.note_explicit_target(targetnode) - if typ == 'envvar': - indexnode['entries'] = [('single', text, targetid, text), - ('single', _('environment variable; %s') % text, - targetid, text)] - xref_nodes = xfileref_role(typ, rawtext, etext, lineno, inliner, - options, content)[0] - return [indexnode, targetnode] + xref_nodes, [] - elif typ == 'pep': - indexnode['entries'] = [('single', - _('Python Enhancement Proposals!PEP %s') % text, - targetid, 'PEP %s' % text)] - try: - pepnum = int(text) - except ValueError: - msg = inliner.reporter.error('invalid PEP number %s' % text, line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - ref = inliner.document.settings.pep_base_url + 'pep-%04d' % pepnum - sn = nodes.strong('PEP '+text, 'PEP '+text) - rn = nodes.reference('', '', refuri=ref) - rn += sn - return [indexnode, targetnode, rn], [] - elif typ == 'rfc': - indexnode['entries'] = [('single', 'RFC; RFC %s' % text, - targetid, 'RFC %s' % text)] - try: - rfcnum = int(text) - except ValueError: - msg = inliner.reporter.error('invalid RFC number %s' % text, line=lineno) - prb = inliner.problematic(rawtext, rawtext, msg) - return [prb], [msg] - ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum - sn = nodes.strong('RFC '+text, 'RFC '+text) - rn = nodes.reference('', '', refuri=ref) - rn += sn - return [indexnode, targetnode, rn], [] - -roles.register_canonical_role('envvar', indexmarkup_role) -roles.register_local_role('pep', indexmarkup_role) -roles.register_local_role('rfc', indexmarkup_role) - - -# default is `literal` -innernodetypes = { - 'ref': nodes.emphasis, - 'term': nodes.emphasis, - 'token': nodes.strong, - 'envvar': nodes.strong, - 'option': addnodes.literal_emphasis, -} - -def _fix_parens(typ, text, env): - if typ in ('func', 'meth', 'cfunc'): - if text.endswith('()'): - # remove parentheses - text = text[:-2] - if env.config.add_function_parentheses: - # add them back to all occurrences if configured - text += '()' - return text - -def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): - env = inliner.document.settings.env - if not typ: - typ = env.config.default_role - else: - typ = typ.lower() - text = utils.unescape(text) - # if the first character is a bang, don't cross-reference at all - if text[0:1] == '!': - text = _fix_parens(typ, text[1:], env) - return [innernodetypes.get(typ, nodes.literal)( - rawtext, text, classes=['xref'])], [] - # we want a cross-reference, create the reference node - pnode = addnodes.pending_xref(rawtext, reftype=typ, refcaption=False, - modname=env.currmodule, classname=env.currclass) - # we may need the line number for warnings - pnode.line = lineno - # the link title may differ from the target, but by default they are the same - title = target = text - titleistarget = True - # look if explicit title and target are given with `foo ` syntax - brace = text.find('<') - if brace != -1: - titleistarget = False - pnode['refcaption'] = True - m = caption_ref_re.match(text) - if m: - target = m.group(2) - title = m.group(1) - else: - # fallback: everything after '<' is the target - target = text[brace+1:] - title = text[:brace] - # special target for Python object cross-references - if typ in ('data', 'exc', 'func', 'class', 'const', 'attr', 'meth', 'mod', 'obj'): - # fix-up parentheses in link title - if titleistarget: - title = title.lstrip('.') # only has a meaning for the target - target = target.lstrip('~') # only has a meaning for the title - title = _fix_parens(typ, title, env) - # if the first character is a tilde, don't display the module/class - # parts of the contents - if title[0:1] == '~': - title = title[1:] - dot = title.rfind('.') - if dot != -1: - title = title[dot+1:] - # remove parentheses from the target too - if target.endswith('()'): - target = target[:-2] - # if the first character is a dot, search more specific namespaces first - # else search builtins first - if target[0:1] == '.': - target = target[1:] - pnode['refspecific'] = True - # some other special cases for the target - elif typ == 'option': - program = env.currprogram - if titleistarget: - if ' ' in title and not (title.startswith('/') or title.startswith('-')): - program, target = re.split(' (?=-|--|/)', title, 1) - program = ws_re.sub('-', program) - target = target.strip() - elif ' ' in target: - program, target = re.split(' (?=-|--|/)', target, 1) - program = ws_re.sub('-', program) - pnode['refprogram'] = program - elif typ == 'term': - # normalize whitespace in definition terms (if the term reference is - # broken over a line, a newline will be in target) - target = ws_re.sub(' ', target).lower() - elif typ == 'ref': - # reST label names are always lowercased - target = ws_re.sub('', target).lower() - else: - # remove all whitespace to avoid referencing problems - target = ws_re.sub('', target) - pnode['reftarget'] = target - pnode += innernodetypes.get(typ, nodes.literal)(rawtext, title, classes=['xref']) - return [pnode], [] - - -def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): - return [nodes.emphasis( - rawtext, utils.unescape(text).replace('-->', u'\N{TRIANGULAR BULLET}'))], [] - - -_litvar_re = re.compile('{([^}]+)}') - -def emph_literal_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): - text = utils.unescape(text) - pos = 0 - retnode = nodes.literal(role=typ.lower()) - for m in _litvar_re.finditer(text): - if m.start() > pos: - txt = text[pos:m.start()] - retnode += nodes.Text(txt, txt) - retnode += nodes.emphasis(m.group(1), m.group(1)) - pos = m.end() - if pos < len(text): - retnode += nodes.Text(text[pos:], text[pos:]) - return [retnode], [] - - -specific_docroles = { - 'data': xfileref_role, - 'exc': xfileref_role, - 'func': xfileref_role, - 'class': xfileref_role, - 'const': xfileref_role, - 'attr': xfileref_role, - 'meth': xfileref_role, - 'obj': xfileref_role, - 'cfunc' : xfileref_role, - 'cmember': xfileref_role, - 'cdata': xfileref_role, - 'ctype': xfileref_role, - 'cmacro': xfileref_role, - - 'mod': xfileref_role, - - 'keyword': xfileref_role, - 'ref': xfileref_role, - 'token': xfileref_role, - 'term': xfileref_role, - 'option': xfileref_role, - - 'menuselection': menusel_role, - 'file': emph_literal_role, - 'samp': emph_literal_role, -} - -for rolename, func in specific_docroles.iteritems(): - roles.register_canonical_role(rolename, func) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/search.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/search.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,205 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.search - ~~~~~~~~~~~~~ - - Create a search index for offline search. - - :copyright: 2007-2008 by Armin Ronacher. - :license: BSD. -""" -import re -import cPickle as pickle -from cStringIO import StringIO - -from docutils.nodes import Text, NodeVisitor - -from sphinx.util.stemmer import PorterStemmer -from sphinx.util import jsdump, rpartition - - -word_re = re.compile(r'\w+(?u)') - -stopwords = set(""" -a and are as at -be but by -for -if in into is it -near no not -of on or -such -that the their then there these they this to -was will with -""".split()) - - -class _JavaScriptIndex(object): - """ - The search index as javascript file that calls a function - on the documentation search object to register the index. - """ - - PREFIX = 'Search.setIndex(' - SUFFIX = ')' - - def dumps(self, data): - return self.PREFIX + jsdump.dumps(data) + self.SUFFIX - - def loads(self, s): - data = s[len(self.PREFIX):-len(self.SUFFIX)] - if not data or not s.startswith(self.PREFIX) or not \ - s.endswith(self.SUFFIX): - raise ValueError('invalid data') - return jsdump.loads(data) - - def dump(self, data, f): - f.write(self.dumps(data)) - - def load(self, f): - return self.loads(f.read()) - - -js_index = _JavaScriptIndex() - - -class Stemmer(PorterStemmer): - """ - All those porter stemmer implementations look hideous. - make at least the stem method nicer. - """ - - def stem(self, word): - word = word.lower() - return PorterStemmer.stem(self, word, 0, len(word) - 1) - - -class WordCollector(NodeVisitor): - """ - A special visitor that collects words for the `IndexBuilder`. - """ - - def __init__(self, document): - NodeVisitor.__init__(self, document) - self.found_words = [] - - def dispatch_visit(self, node): - if node.__class__ is Text: - self.found_words.extend(word_re.findall(node.astext())) - - -class IndexBuilder(object): - """ - Helper class that creates a searchindex based on the doctrees - passed to the `feed` method. - """ - formats = { - 'jsdump': jsdump, - 'pickle': pickle - } - - def __init__(self, env): - self.env = env - self._stemmer = Stemmer() - # filename -> title - self._titles = {} - # stemmed word -> set(filenames) - self._mapping = {} - # desctypes -> index - self._desctypes = {} - - def load(self, stream, format): - """Reconstruct from frozen data.""" - if isinstance(format, basestring): - format = self.formats[format] - frozen = format.load(stream) - # if an old index is present, we treat it as not existing. - if not isinstance(frozen, dict): - raise ValueError('old format') - index2fn = frozen['filenames'] - self._titles = dict(zip(index2fn, frozen['titles'])) - self._mapping = {} - for k, v in frozen['terms'].iteritems(): - if isinstance(v, int): - self._mapping[k] = set([index2fn[v]]) - else: - self._mapping[k] = set(index2fn[i] for i in v) - # no need to load keywords/desctypes - - def dump(self, stream, format): - """Dump the frozen index to a stream.""" - if isinstance(format, basestring): - format = self.formats[format] - format.dump(self.freeze(), stream) - - def get_modules(self, fn2index): - rv = {} - for name, (doc, _, _, _) in self.env.modules.iteritems(): - rv[name] = fn2index[doc] - return rv - - def get_descrefs(self, fn2index): - rv = {} - dt = self._desctypes - for fullname, (doc, desctype) in self.env.descrefs.iteritems(): - prefix, name = rpartition(fullname, '.') - pdict = rv.setdefault(prefix, {}) - try: - i = dt[desctype] - except KeyError: - i = len(dt) - dt[desctype] = i - pdict[name] = (fn2index[doc], i) - return rv - - def get_terms(self, fn2index): - rv = {} - for k, v in self._mapping.iteritems(): - if len(v) == 1: - fn, = v - rv[k] = fn2index[fn] - else: - rv[k] = [fn2index[fn] for fn in v] - return rv - - def freeze(self): - """Create a usable data structure for serializing.""" - filenames = self._titles.keys() - titles = self._titles.values() - fn2index = dict((f, i) for (i, f) in enumerate(filenames)) - return dict( - filenames=filenames, - titles=titles, - terms=self.get_terms(fn2index), - descrefs=self.get_descrefs(fn2index), - modules=self.get_modules(fn2index), - desctypes=dict((v, k) for (k, v) in self._desctypes.items()), - ) - - def prune(self, filenames): - """Remove data for all filenames not in the list.""" - new_titles = {} - for filename in filenames: - if filename in self._titles: - new_titles[filename] = self._titles[filename] - self._titles = new_titles - for wordnames in self._mapping.itervalues(): - wordnames.intersection_update(filenames) - - def feed(self, filename, title, doctree): - """Feed a doctree to the index.""" - self._titles[filename] = title - - visitor = WordCollector(doctree) - doctree.walk(visitor) - - def add_term(word, prefix='', stem=self._stemmer.stem): - word = stem(word) - if len(word) < 3 or word in stopwords or word.isdigit(): - return - self._mapping.setdefault(prefix + word, set()).add(filename) - - for word in word_re.findall(title): - add_term(word) - - for word in visitor.found_words: - add_term(word) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/setup_command.py --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/setup_command.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.setup_command - ~~~~~~~~~~~~~~~~~~~~ - - Setuptools/distutils commands to assist the building of sphinx - documentation. - - :author: Sebastian Wiesner - :contact: basti.wiesner@gmx.net - :copyright: 2008 by Sebastian Wiesner. - :license: MIT. -""" - -import sys -import os -from StringIO import StringIO -from distutils.cmd import Command - -from sphinx.application import Sphinx -from sphinx.util.console import darkred, nocolor - - -class BuildDoc(Command): - """Distutils command to build Sphinx documentation.""" - - description = 'Build Sphinx documentation' - user_options = [ - ('fresh-env', 'E', 'discard saved environment'), - ('all-files', 'a', 'build all files'), - ('source-dir=', 's', 'Source directory'), - ('build-dir=', None, 'Build directory'), - ('builder=', 'b', 'The builder to use. Defaults to "html"'), - ] - boolean_options = ['fresh-env', 'all-files'] - - - def initialize_options(self): - self.fresh_env = self.all_files = False - self.source_dir = self.build_dir = None - self.conf_file_name = 'conf.py' - self.builder = 'html' - - def finalize_options(self): - if self.source_dir is None: - if os.path.isdir('doc'): - for root, dirnames, filenames in os.walk('doc'): - if 'conf.py' in filenames: - self.source_dir = root - self.announce('Using source directory %s' % root) - break - self.ensure_dirname('source_dir') - self.source_dir = os.path.abspath(self.source_dir) - - if self.build_dir is None: - build = self.get_finalized_command('build') - self.build_dir = os.path.join(build.build_base, 'sphinx') - self.mkpath(self.build_dir) - self.ensure_dirname('build_dir') - self.doctree_dir = os.path.join(self.build_dir, 'doctrees') - self.mkpath(self.doctree_dir) - self.builder_target_dir = os.path.join(self.build_dir, self.builder) - self.mkpath(self.builder_target_dir) - - def run(self): - if not sys.stdout.isatty() or sys.platform == 'win32': - # Windows' poor cmd box doesn't understand ANSI sequences - nocolor() - if not self.verbose: - status_stream = StringIO() - else: - status_stream = sys.stdout - app = Sphinx(self.source_dir, self.source_dir, - self.builder_target_dir, self.doctree_dir, - self.builder, {}, status_stream, - freshenv=self.fresh_env) - - try: - if self.all_files: - app.builder.build_all() - else: - app.builder.build_update() - except Exception, err: - from docutils.utils import SystemMessage - if isinstance(err, SystemMessage): - sys.stderr, darkred('reST markup error:') - print >>sys.stderr, err.args[0].encode('ascii', 'backslashreplace') - else: - raise diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/contents.png Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/contents.png has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/default.css --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/default.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,657 +0,0 @@ -/** - * Sphinx Doc Design - */ - -body { - font-family: sans-serif; - font-size: 100%; - background-color: #11303d; - color: #000; - margin: 0; - padding: 0; -} - -/* :::: LAYOUT :::: */ - -div.document { - background-color: #1c4e63; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 230px; -} - -div.body { - background-color: white; - padding: 0 20px 30px 20px; -} - -div.sphinxsidebarwrapper { - padding: 10px 5px 0 10px; -} - -div.sphinxsidebar { - float: left; - width: 230px; - margin-left: -100%; - font-size: 90%; -} - -div.clearer { - clear: both; -} - -div.footer { - color: #fff; - width: 100%; - padding: 9px 0 9px 0; - text-align: center; - font-size: 75%; -} - -div.footer a { - color: #fff; - text-decoration: underline; -} - -div.related { - background-color: #133f52; - color: #fff; - width: 100%; - line-height: 30px; - font-size: 90%; -} - -div.related h3 { - display: none; -} - -div.related ul { - margin: 0; - padding: 0 0 0 10px; - list-style: none; -} - -div.related li { - display: inline; -} - -div.related li.right { - float: right; - margin-right: 5px; -} - -div.related a { - color: white; -} - -/* ::: TOC :::: */ -div.sphinxsidebar h3 { - font-family: 'Trebuchet MS', sans-serif; - color: white; - font-size: 1.4em; - font-weight: normal; - margin: 0; - padding: 0; -} - -div.sphinxsidebar h3 a { - color: white; -} - -div.sphinxsidebar h4 { - font-family: 'Trebuchet MS', sans-serif; - color: white; - font-size: 1.3em; - font-weight: normal; - margin: 5px 0 0 0; - padding: 0; -} - -div.sphinxsidebar p { - color: white; -} - -div.sphinxsidebar p.topless { - margin: 5px 10px 10px 10px; -} - -div.sphinxsidebar ul { - margin: 10px; - padding: 0; - list-style: none; - color: white; -} - -div.sphinxsidebar ul ul, -div.sphinxsidebar ul.want-points { - margin-left: 20px; - list-style: square; -} - -div.sphinxsidebar ul ul { - margin-top: 0; - margin-bottom: 0; -} - -div.sphinxsidebar a { - color: #98dbcc; -} - -div.sphinxsidebar form { - margin-top: 10px; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - -/* :::: MODULE CLOUD :::: */ -div.modulecloud { - margin: -5px 10px 5px 10px; - padding: 10px; - line-height: 160%; - border: 1px solid #cbe7e5; - background-color: #f2fbfd; -} - -div.modulecloud a { - padding: 0 5px 0 5px; -} - -/* :::: SEARCH :::: */ -ul.search { - margin: 10px 0 0 20px; - padding: 0; -} - -ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; -} - -ul.search li a { - font-weight: bold; -} - -ul.search li div.context { - color: #888; - margin: 2px 0 0 30px; - text-align: left; -} - -ul.keywordmatches li.goodmatch a { - font-weight: bold; -} - -/* :::: COMMON FORM STYLES :::: */ - -div.actions { - padding: 5px 10px 5px 10px; - border-top: 1px solid #cbe7e5; - border-bottom: 1px solid #cbe7e5; - background-color: #e0f6f4; -} - -form dl { - color: #333; -} - -form dt { - clear: both; - float: left; - min-width: 110px; - margin-right: 10px; - padding-top: 2px; -} - -input#homepage { - display: none; -} - -div.error { - margin: 5px 20px 0 0; - padding: 5px; - border: 1px solid #d00; - font-weight: bold; -} - -/* :::: INDEX PAGE :::: */ - -table.contentstable { - width: 90%; -} - -table.contentstable p.biglink { - line-height: 150%; -} - -a.biglink { - font-size: 1.3em; -} - -span.linkdescr { - font-style: italic; - padding-top: 5px; - font-size: 90%; -} - -/* :::: INDEX STYLES :::: */ - -table.indextable td { - text-align: left; - vertical-align: top; -} - -table.indextable dl, table.indextable dd { - margin-top: 0; - margin-bottom: 0; -} - -table.indextable tr.pcap { - height: 10px; -} - -table.indextable tr.cap { - margin-top: 10px; - background-color: #f2f2f2; -} - -img.toggler { - margin-right: 3px; - margin-top: 3px; - cursor: pointer; -} - -form.pfform { - margin: 10px 0 20px 0; -} - -/* :::: GLOBAL STYLES :::: */ - -.docwarning { - background-color: #ffe4e4; - padding: 10px; - margin: 0 -20px 0 -20px; - border-bottom: 1px solid #f66; -} - -p.subhead { - font-weight: bold; - margin-top: 20px; -} - -a { - color: #355f7c; - text-decoration: none; -} - -a:hover { - text-decoration: underline; -} - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: 'Trebuchet MS', sans-serif; - background-color: #f2f2f2; - font-weight: normal; - color: #20435c; - border-bottom: 1px solid #ccc; - margin: 20px -20px 10px -20px; - padding: 3px 0 3px 10px; -} - -div.body h1 { margin-top: 0; font-size: 200%; } -div.body h2 { font-size: 160%; } -div.body h3 { font-size: 140%; } -div.body h4 { font-size: 120%; } -div.body h5 { font-size: 110%; } -div.body h6 { font-size: 100%; } - -a.headerlink { - color: #c60f0f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; - visibility: hidden; -} - -h1:hover > a.headerlink, -h2:hover > a.headerlink, -h3:hover > a.headerlink, -h4:hover > a.headerlink, -h5:hover > a.headerlink, -h6:hover > a.headerlink, -dt:hover > a.headerlink { - visibility: visible; -} - -a.headerlink:hover { - background-color: #c60f0f; - color: white; -} - -div.body p, div.body dd, div.body li { - text-align: justify; - line-height: 130%; -} - -div.body p.caption { - text-align: inherit; -} - -div.body td { - text-align: left; -} - -ul.fakelist { - list-style: none; - margin: 10px 0 10px 20px; - padding: 0; -} - -.field-list ul { - padding-left: 1em; -} - -.first { - margin-top: 0 !important; -} - -/* "Footnotes" heading */ -p.rubric { - margin-top: 30px; - font-weight: bold; -} - -/* Sidebars */ - -div.sidebar { - margin: 0 0 0.5em 1em; - border: 1px solid #ddb; - padding: 7px 7px 0 7px; - background-color: #ffe; - width: 40%; - float: right; -} - -p.sidebar-title { - font-weight: bold; -} - -/* "Topics" */ - -div.topic { - background-color: #eee; - border: 1px solid #ccc; - padding: 7px 7px 0 7px; - margin: 10px 0 10px 0; -} - -p.topic-title { - font-size: 1.1em; - font-weight: bold; - margin-top: 10px; -} - -/* Admonitions */ - -div.admonition { - margin-top: 10px; - margin-bottom: 10px; - padding: 7px; -} - -div.admonition dt { - font-weight: bold; -} - -div.admonition dl { - margin-bottom: 0; -} - -div.admonition p.admonition-title + p { - display: inline; -} - -div.seealso { - background-color: #ffc; - border: 1px solid #ff6; -} - -div.warning { - background-color: #ffe4e4; - border: 1px solid #f66; -} - -div.note { - background-color: #eee; - border: 1px solid #ccc; -} - -p.admonition-title { - margin: 0px 10px 5px 0px; - font-weight: bold; - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -div.body p.centered { - text-align: center; - margin-top: 25px; -} - -table.docutils { - border: 0; -} - -table.docutils td, table.docutils th { - padding: 1px 8px 1px 0; - border-top: 0; - border-left: 0; - border-right: 0; - border-bottom: 1px solid #aaa; -} - -table.field-list td, table.field-list th { - border: 0 !important; -} - -table.footnote td, table.footnote th { - border: 0 !important; -} - -.field-list ul { - margin: 0; - padding-left: 1em; -} - -.field-list p { - margin: 0; -} - -dl { - margin-bottom: 15px; - clear: both; -} - -dd p { - margin-top: 0px; -} - -dd ul, dd table { - margin-bottom: 10px; -} - -dd { - margin-top: 3px; - margin-bottom: 10px; - margin-left: 30px; -} - -.refcount { - color: #060; -} - -dt:target, -.highlight { - background-color: #fbe54e; -} - -dl.glossary dt { - font-weight: bold; - font-size: 1.1em; -} - -th { - text-align: left; - padding-right: 5px; -} - -pre { - padding: 5px; - background-color: #efc; - color: #333; - border: 1px solid #ac9; - border-left: none; - border-right: none; - overflow: auto; -} - -td.linenos pre { - padding: 5px 0px; - border: 0; - background-color: transparent; - color: #aaa; -} - -table.highlighttable { - margin-left: 0.5em; -} - -table.highlighttable td { - padding: 0 0.5em 0 0.5em; -} - -tt { - background-color: #ecf0f3; - padding: 0 1px 0 1px; - font-size: 0.95em; -} - -tt.descname { - background-color: transparent; - font-weight: bold; - font-size: 1.2em; -} - -tt.descclassname { - background-color: transparent; -} - -tt.xref, a tt { - background-color: transparent; - font-weight: bold; -} - -.footnote:target { background-color: #ffa } - -h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { - background-color: transparent; -} - -.optional { - font-size: 1.3em; -} - -.versionmodified { - font-style: italic; -} - -form.comment { - margin: 0; - padding: 10px 30px 10px 30px; - background-color: #eee; -} - -form.comment h3 { - background-color: #326591; - color: white; - margin: -10px -30px 10px -30px; - padding: 5px; - font-size: 1.4em; -} - -form.comment input, -form.comment textarea { - border: 1px solid #ccc; - padding: 2px; - font-family: sans-serif; - font-size: 100%; -} - -form.comment input[type="text"] { - width: 240px; -} - -form.comment textarea { - width: 100%; - height: 200px; - margin-bottom: 10px; -} - -.system-message { - background-color: #fda; - padding: 5px; - border: 3px solid red; -} - -img.math { - vertical-align: middle; -} - -div.math p { - text-align: center; -} - -span.eqno { - float: right; -} - -img.logo { - border: 0; -} - -/* :::: PRINT :::: */ -@media print { - div.document, - div.documentwrapper, - div.bodywrapper { - margin: 0; - width : 100%; - } - - div.sphinxsidebar, - div.related, - div.footer, - div#comments div.new-comment-box, - #top-link { - display: none; - } -} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/doctools.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/doctools.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,232 +0,0 @@ -/// XXX: make it cross browser - -/** - * make the code below compatible with browsers without - * an installed firebug like debugger - */ -if (!window.console || !console.firebug) { - var names = ["log", "debug", "info", "warn", "error", "assert", "dir", "dirxml", - "group", "groupEnd", "time", "timeEnd", "count", "trace", "profile", "profileEnd"]; - window.console = {}; - for (var i = 0; i < names.length; ++i) - window.console[names[i]] = function() {} -} - -/** - * small helper function to urldecode strings - */ -jQuery.urldecode = function(x) { - return decodeURIComponent(x).replace(/\+/g, ' '); -} - -/** - * small helper function to urlencode strings - */ -jQuery.urlencode = encodeURIComponent; - -/** - * This function returns the parsed url parameters of the - * current request. Multiple values per key are supported, - * it will always return arrays of strings for the value parts. - */ -jQuery.getQueryParameters = function(s) { - if (typeof s == 'undefined') - s = document.location.search; - var parts = s.substr(s.indexOf('?') + 1).split('&'); - var result = {}; - for (var i = 0; i < parts.length; i++) { - var tmp = parts[i].split('=', 2); - var key = jQuery.urldecode(tmp[0]); - var value = jQuery.urldecode(tmp[1]); - if (key in result) - result[key].push(value); - else - result[key] = [value]; - } - return result; -} - -/** - * small function to check if an array contains - * a given item. - */ -jQuery.contains = function(arr, item) { - for (var i = 0; i < arr.length; i++) { - if (arr[i] == item) - return true; - } - return false; -} - -/** - * highlight a given string on a jquery object by wrapping it in - * span elements with the given class name. - */ -jQuery.fn.highlightText = function(text, className) { - function highlight(node) { - if (node.nodeType == 3) { - var val = node.nodeValue; - var pos = val.toLowerCase().indexOf(text); - if (pos >= 0 && !jQuery.className.has(node.parentNode, className)) { - var span = document.createElement("span"); - span.className = className; - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - node.parentNode.insertBefore(span, node.parentNode.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling)); - node.nodeValue = val.substr(0, pos); - } - } - else if (!jQuery(node).is("button, select, textarea")) { - jQuery.each(node.childNodes, function() { - highlight(this) - }); - } - } - return this.each(function() { - highlight(this); - }); -} - -/** - * Small JavaScript module for the documentation. - */ -var Documentation = { - - init : function() { - this.fixFirefoxAnchorBug(); - this.highlightSearchWords(); - this.initModIndex(); - }, - - /** - * i18n support - */ - TRANSLATIONS : {}, - PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, - LOCALE : 'unknown', - - // gettext and ngettext don't access this so that the functions - // can savely bound to a different name (_ = Documentation.gettext) - gettext : function(string) { - var translated = Documentation.TRANSLATIONS[string]; - if (typeof translated == 'undefined') - return string; - return (typeof translated == 'string') ? translated : translated[0]; - }, - - ngettext : function(singular, plural, n) { - var translated = Documentation.TRANSLATIONS[singular]; - if (typeof translated == 'undefined') - return (n == 1) ? singular : plural; - return translated[Documentation.PLURALEXPR(n)]; - }, - - addTranslations : function(catalog) { - for (var key in catalog.messages) - this.TRANSLATIONS[key] = catalog.messages[key]; - this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); - this.LOCALE = catalog.locale; - }, - - /** - * add context elements like header anchor links - */ - addContextElements : function() { - $('div[@id] > :header:first').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this headline')). - appendTo(this); - }); - $('dt[@id]').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this definition')). - appendTo(this); - }); - }, - - /** - * workaround a firefox stupidity - */ - fixFirefoxAnchorBug : function() { - if (document.location.hash && $.browser.mozilla) - window.setTimeout(function() { - document.location.href += ''; - }, 10); - }, - - /** - * highlight the search words provided in the url in the text - */ - highlightSearchWords : function() { - var params = $.getQueryParameters(); - var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; - if (terms.length) { - var body = $('div.body'); - window.setTimeout(function() { - $.each(terms, function() { - body.highlightText(this.toLowerCase(), 'highlight'); - }); - }, 10); - $('
  • ') - .appendTo($('.sidebar .this-page-menu')); - } - }, - - /** - * init the modindex toggle buttons - */ - initModIndex : function() { - var togglers = $('img.toggler').click(function() { - var src = $(this).attr('src'); - var idnum = $(this).attr('id').substr(7); - console.log($('tr.cg-' + idnum).toggle()); - if (src.substr(-9) == 'minus.png') - $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); - else - $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); - }).css('display', ''); - if (DOCUMENTATION_OPTIONS.COLLAPSE_MODINDEX) { - togglers.click(); - } - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords : function() { - $('.sidebar .this-page-menu li.highlight-link').fadeOut(300); - $('span.highlight').removeClass('highlight'); - }, - - /** - * make the url absolute - */ - makeURL : function(relativeURL) { - return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; - }, - - /** - * get the current relative url - */ - getCurrentURL : function() { - var path = document.location.pathname; - var parts = path.split(/\//); - $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { - if (this == '..') - parts.pop(); - }); - var url = parts.join('/'); - return path.substring(url.lastIndexOf('/') + 1, path.length - 1); - } -}; - -// quick alias for translations -_ = Documentation.gettext; - -$(document).ready(function() { - Documentation.init(); -}); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/file.png Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/file.png has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/jquery.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/jquery.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ -/* - * jQuery 1.2.6 - New Wave Javascript - * - * Copyright (c) 2008 John Resig (jquery.com) - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. - * - * $Date: 2008-05-24 14:22:17 -0400 (Sat, 24 May 2008) $ - * $Rev: 5685 $ - */ -(function(){var _jQuery=window.jQuery,_$=window.$;var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);};var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;if(selector.nodeType){this[0]=selector;this.length=1;return this;}if(typeof selector=="string"){var match=quickExpr.exec(selector);if(match&&(match[1]||!context)){if(match[1])selector=jQuery.clean([match[1]],context);else{var elem=document.getElementById(match[3]);if(elem){if(elem.id!=match[3])return jQuery().find(selector);return jQuery(elem);}selector=[];}}else -return jQuery(context).find(selector);}else if(jQuery.isFunction(selector))return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);return this.setArray(jQuery.makeArray(selector));},jquery:"1.2.6",size:function(){return this.length;},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];},pushStack:function(elems){var ret=jQuery(elems);ret.prevObject=this;return ret;},setArray:function(elems){this.length=0;Array.prototype.push.apply(this,elems);return this;},each:function(callback,args){return jQuery.each(this,callback,args);},index:function(elem){var ret=-1;return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);},attr:function(name,value,type){var options=name;if(name.constructor==String)if(value===undefined)return this[0]&&jQuery[type||"attr"](this[0],name);else{options={};options[name]=value;}return this.each(function(i){for(name in options)jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));});},css:function(key,value){if((key=='width'||key=='height')&&parseFloat(value)<0)value=undefined;return this.attr(key,value,"curCSS");},text:function(text){if(typeof text!="object"&&text!=null)return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));var ret="";jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8)ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);});});return ret;},wrapAll:function(html){if(this[0])jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;while(elem.firstChild)elem=elem.firstChild;return elem;}).append(this);return this;},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);});},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);});},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1)this.appendChild(elem);});},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1)this.insertBefore(elem,this.firstChild);});},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);});},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);});},end:function(){return this.prevObject||jQuery([]);},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);});return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");container.appendChild(clone);return jQuery.clean([container.innerHTML])[0];}else -return this.cloneNode(true);});var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined)this[expando]=null;});if(events===true)this.find("*").andSelf().each(function(i){if(this.nodeType==3)return;var events=jQuery.data(this,"events");for(var type in events)for(var handler in events[type])jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);});return ret;},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);})||jQuery.multiFilter(selector,this));},not:function(selector){if(selector.constructor==String)if(isSimple.test(selector))return this.pushStack(jQuery.multiFilter(selector,this,true));else -selector=jQuery.multiFilter(selector,this);var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;});},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=='string'?jQuery(selector):jQuery.makeArray(selector))));},is:function(selector){return!!selector&&jQuery.multiFilter(selector,this).length>0;},hasClass:function(selector){return this.is("."+selector);},val:function(value){if(value==undefined){if(this.length){var elem=this[0];if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";if(index<0)return null;for(var i=one?index:0,max=one?index+1:options.length;i=0||jQuery.inArray(this.name,value)>=0);else if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);});if(!values.length)this.selectedIndex=-1;}else -this.value=value;});},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);},replaceWith:function(value){return this.after(value).remove();},eq:function(i){return this.slice(i,i+1);},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);}));},andSelf:function(){return this.add(this.prevObject);},data:function(key,value){var parts=key.split(".");parts[1]=parts[1]?"."+parts[1]:"";if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);if(data===undefined&&this.length)data=jQuery.data(this[0],key);return data===undefined&&parts[1]?this.data(parts[0]):data;}else -return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);});},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);});},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);if(reverse)elems.reverse();}var obj=this;if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr"))obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));var scripts=jQuery([]);jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;if(jQuery.nodeName(elem,"script"))scripts=scripts.add(elem);else{if(elem.nodeType==1)scripts=scripts.add(jQuery("script",elem).remove());callback.call(obj,elem);}});scripts.each(evalScript);});}};jQuery.fn.init.prototype=jQuery.fn;function evalScript(i,elem){if(elem.src)jQuery.ajax({url:elem.src,async:false,dataType:"script"});else -jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");if(elem.parentNode)elem.parentNode.removeChild(elem);}function now(){return+new Date;}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;if(target.constructor==Boolean){deep=target;target=arguments[1]||{};i=2;}if(typeof target!="object"&&typeof target!="function")target={};if(length==i){target=this;--i;}for(;i-1;}},swap:function(elem,options,callback){var old={};for(var name in options){old[name]=elem.style[name];elem.style[name]=options[name];}callback.call(elem);for(var name in options)elem.style[name]=old[name];},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;var padding=0,border=0;jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;});val-=Math.round(padding+border);}if(jQuery(elem).is(":visible"))getWH();else -jQuery.swap(elem,props,getWH);return Math.max(0,val);}return jQuery.curCSS(elem,name,force);},curCSS:function(elem,name,force){var ret,style=elem.style;function color(elem){if(!jQuery.browser.safari)return false;var ret=defaultView.getComputedStyle(elem,null);return!ret||ret.getPropertyValue("color")=="";}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");return ret==""?"1":ret;}if(jQuery.browser.opera&&name=="display"){var save=style.outline;style.outline="0 solid black";style.outline=save;}if(name.match(/float/i))name=styleFloat;if(!force&&style&&style[name])ret=style[name];else if(defaultView.getComputedStyle){if(name.match(/float/i))name="float";name=name.replace(/([A-Z])/g,"-$1").toLowerCase();var computedStyle=defaultView.getComputedStyle(elem,null);if(computedStyle&&!color(elem))ret=computedStyle.getPropertyValue(name);else{var swap=[],stack=[],a=elem,i=0;for(;a&&color(a);a=a.parentNode)stack.unshift(a);for(;i]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+">";});var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");var wrap=!tags.indexOf("",""]||!tags.indexOf("",""]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"","
    "]||!tags.indexOf("",""]||(!tags.indexOf("",""]||!tags.indexOf("",""]||jQuery.browser.msie&&[1,"div
    ","
    "]||[0,"",""];div.innerHTML=wrap[1]+elem+wrap[2];while(wrap[0]--)div=div.lastChild;if(jQuery.browser.msie){var tbody=!tags.indexOf(""&&tags.indexOf("=0;--j)if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length)tbody[j].parentNode.removeChild(tbody[j]);if(/^\s/.test(elem))div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);}elem=jQuery.makeArray(div.childNodes);}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select")))return;if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options)ret.push(elem);else -ret=jQuery.merge(ret,elem);});return ret;},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8)return undefined;var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;name=notxml&&jQuery.props[name]||name;if(elem.tagName){var special=/href|src|style/.test(name);if(name=="selected"&&jQuery.browser.safari)elem.parentNode.selectedIndex;if(name in elem&¬xml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode)throw"type property can't be changed";elem[name]=value;}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name))return elem.getAttributeNode(name).nodeValue;return elem[name];}if(msie&¬xml&&name=="style")return jQuery.attr(elem.style,"cssText",value);if(set)elem.setAttribute(name,""+value);var attr=msie&¬xml&&special?elem.getAttribute(name,2):elem.getAttribute(name);return attr===null?undefined:attr;}if(msie&&name=="opacity"){if(set){elem.zoom=1;elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+''=="NaN"?"":"alpha(opacity="+value*100+")");}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+'':"";}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();});if(set)elem[name]=value;return elem[name];},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");},makeArray:function(array){var ret=[];if(array!=null){var i=array.length;if(i==null||array.split||array.setInterval||array.call)ret[0]=array;else -while(i)ret[--i]=array[i];}return ret;},inArray:function(elem,array){for(var i=0,length=array.length;i*",this).remove();while(this.firstChild)this.removeChild(this.firstChild);}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);};});jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");};});function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);},"#":function(a,i,m){return a.getAttribute("id")==m[2];},":":{lt:function(a,i,m){return im[3]-0;},nth:function(a,i,m){return m[3]-0==i;},eq:function(a,i,m){return m[3]-0==i;},first:function(a,i){return i==0;},last:function(a,i,m,r){return i==r.length-1;},even:function(a,i){return i%2==0;},odd:function(a,i){return i%2;},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;},"only-child":function(a){return!jQuery.nth(a.parentNode.lastChild,2,"previousSibling");},parent:function(a){return a.firstChild;},empty:function(a){return!a.firstChild;},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";},enabled:function(a){return!a.disabled;},disabled:function(a){return a.disabled;},checked:function(a){return a.checked;},selected:function(a){return a.selected||jQuery.attr(a,"selected");},text:function(a){return"text"==a.type;},radio:function(a){return"radio"==a.type;},checkbox:function(a){return"checkbox"==a.type;},file:function(a){return"file"==a.type;},password:function(a){return"password"==a.type;},submit:function(a){return"submit"==a.type;},image:function(a){return"image"==a.type;},reset:function(a){return"reset"==a.type;},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);},has:function(a,i,m){return jQuery.find(m[3],a).length;},header:function(a){return/h\d/i.test(a.nodeName);},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;}).length;}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];while(expr&&expr!=old){old=expr;var f=jQuery.filter(expr,elems,not);expr=f.t.replace(/^\s*,\s*/,"");cur=not?elems=f.r:jQuery.merge(cur,f.r);}return cur;},find:function(t,context){if(typeof t!="string")return[t];if(context&&context.nodeType!=1&&context.nodeType!=9)return[];context=context||document;var ret=[context],done=[],last,nodeName;while(t&&last!=t){var r=[];last=t;t=jQuery.trim(t);var foundToken=false,re=quickChild,m=re.exec(t);if(m){nodeName=m[1].toUpperCase();for(var i=0;ret[i];i++)for(var c=ret[i].firstChild;c;c=c.nextSibling)if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName))r.push(c);ret=r;t=t.replace(re,"");if(t.indexOf(" ")==0)continue;foundToken=true;}else{re=/^([>+~])\s*(\w*)/i;if((m=re.exec(t))!=null){r=[];var merge={};nodeName=m[2].toUpperCase();m=m[1];for(var j=0,rl=ret.length;j=0;if(!not&&pass||not&&!pass)tmp.push(r[i]);}return tmp;},filter:function(t,r,not){var last;while(t&&t!=last){last=t;var p=jQuery.parse,m;for(var i=0;p[i];i++){m=p[i].exec(t);if(m){t=t.substring(m[0].length);m[2]=m[2].replace(/\\/g,"");break;}}if(!m)break;if(m[1]==":"&&m[2]=="not")r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);else if(m[1]==".")r=jQuery.classFilter(r,m[2],not);else if(m[1]=="["){var tmp=[],type=m[3];for(var i=0,rl=r.length;i=0)^not)tmp.push(a);}r=tmp;}else if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;for(var i=0,rl=r.length;i=0)add=true;if(add^not)tmp.push(node);}r=tmp;}else{var fn=jQuery.expr[m[1]];if(typeof fn=="object")fn=fn[m[2]];if(typeof fn=="string")fn=eval("false||function(a,i){return "+fn+";}");r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);},not);}}return{r:r,t:t};},dir:function(elem,dir){var matched=[],cur=elem[dir];while(cur&&cur!=document){if(cur.nodeType==1)matched.push(cur);cur=cur[dir];}return matched;},nth:function(cur,result,dir,elem){result=result||1;var num=0;for(;cur;cur=cur[dir])if(cur.nodeType==1&&++num==result)break;return cur;},sibling:function(n,elem){var r=[];for(;n;n=n.nextSibling){if(n.nodeType==1&&n!=elem)r.push(n);}return r;}});jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8)return;if(jQuery.browser.msie&&elem.setInterval)elem=window;if(!handler.guid)handler.guid=this.guid++;if(data!=undefined){var fn=handler;handler=this.proxy(fn,function(){return fn.apply(this,arguments);});handler.data=data;}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered)return jQuery.event.handle.apply(arguments.callee.elem,arguments);});handle.elem=elem;jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];handler.type=parts[1];var handlers=events[type];if(!handlers){handlers=events[type]={};if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener)elem.addEventListener(type,handle,false);else if(elem.attachEvent)elem.attachEvent("on"+type,handle);}}handlers[handler.guid]=handler;jQuery.event.global[type]=true;});elem=null;},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8)return;var events=jQuery.data(elem,"events"),ret,index;if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)=="."))for(var type in events)this.remove(elem,type+(types||""));else{if(types.type){handler=types.handler;types=types.type;}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];if(events[type]){if(handler)delete events[type][handler.guid];else -for(handler in events[type])if(!parts[1]||events[type][handler].type==parts[1])delete events[type][handler];for(ret in events[type])break;if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener)elem.removeEventListener(type,jQuery.data(elem,"handle"),false);else if(elem.detachEvent)elem.detachEvent("on"+type,jQuery.data(elem,"handle"));}ret=null;delete events[type];}}});}for(ret in events)break;if(!ret){var handle=jQuery.data(elem,"handle");if(handle)handle.elem=null;jQuery.removeData(elem,"events");jQuery.removeData(elem,"handle");}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);if(type.indexOf("!")>=0){type=type.slice(0,-1);var exclusive=true;}if(!elem){if(this.global[type])jQuery("*").add([window,document]).trigger(type,data);}else{if(elem.nodeType==3||elem.nodeType==8)return undefined;var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});data[0][expando]=true;}data[0].type=type;if(exclusive)data[0].exclusive=true;var handle=jQuery.data(elem,"handle");if(handle)val=handle.apply(elem,data);if((!fn||(jQuery.nodeName(elem,'a')&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false)val=false;if(event)data.shift();if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));if(ret!==undefined)val=ret;}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,'a')&&type=="click")){this.triggered=true;try{elem[type]();}catch(e){}}this.triggered=false;}return val;},handle:function(event){var val,ret,namespace,all,handlers;event=arguments[0]=jQuery.event.fix(event||window.event);namespace=event.type.split(".");event.type=namespace[0];namespace=namespace[1];all=!namespace&&!event.exclusive;handlers=(jQuery.data(this,"events")||{})[event.type];for(var j in handlers){var handler=handlers[j];if(all||handler.type==namespace){event.handler=handler;event.data=handler.data;ret=handler.apply(this,arguments);if(val!==false)val=ret;if(ret===false){event.preventDefault();event.stopPropagation();}}}return val;},fix:function(event){if(event[expando]==true)return event;var originalEvent=event;event={originalEvent:originalEvent};var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");for(var i=props.length;i;i--)event[props[i]]=originalEvent[props[i]];event[expando]=true;event.preventDefault=function(){if(originalEvent.preventDefault)originalEvent.preventDefault();originalEvent.returnValue=false;};event.stopPropagation=function(){if(originalEvent.stopPropagation)originalEvent.stopPropagation();originalEvent.cancelBubble=true;};event.timeStamp=event.timeStamp||now();if(!event.target)event.target=event.srcElement||document;if(event.target.nodeType==3)event.target=event.target.parentNode;if(!event.relatedTarget&&event.fromElement)event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode))event.which=event.charCode||event.keyCode;if(!event.metaKey&&event.ctrlKey)event.metaKey=event.ctrlKey;if(!event.which&&event.button)event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));return event;},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;return proxy;},special:{ready:{setup:function(){bindReady();return;},teardown:function(){return;}},mouseenter:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseenter";return jQuery.event.handle.apply(this,arguments);}},mouseleave:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseleave";return jQuery.event.handle.apply(this,arguments);}}}};jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);});},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);return(fn||data).apply(this,arguments);});return this.each(function(){jQuery.event.add(this,type,one,fn&&data);});},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);});},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);});},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);},toggle:function(fn){var args=arguments,i=1;while(i=0){var selector=url.slice(off,url.length);url=url.slice(0,off);}callback=callback||function(){};var type="GET";if(params)if(jQuery.isFunction(params)){callback=params;params=null;}else{params=jQuery.param(params);type="POST";}var self=this;jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified")self.html(selector?jQuery("
    ").append(res.responseText.replace(//g,"")).find(selector):res.responseText);self.each(callback,[res.responseText,status,res]);}});return this;},serialize:function(){return jQuery.param(this.serializeArray());},serializeArray:function(){return this.map(function(){return jQuery.nodeName(this,"form")?jQuery.makeArray(this.elements):this;}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password/i.test(this.type));}).map(function(i,elem){var val=jQuery(this).val();return val==null?null:val.constructor==Array?jQuery.map(val,function(val,i){return{name:elem.name,value:val};}):{name:elem.name,value:val};}).get();}});jQuery.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(i,o){jQuery.fn[o]=function(f){return this.bind(o,f);};});var jsc=now();jQuery.extend({get:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data=null;}return jQuery.ajax({type:"GET",url:url,data:data,success:callback,dataType:type});},getScript:function(url,callback){return jQuery.get(url,null,callback,"script");},getJSON:function(url,data,callback){return jQuery.get(url,data,callback,"json");},post:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data={};}return jQuery.ajax({type:"POST",url:url,data:data,success:callback,dataType:type});},ajaxSetup:function(settings){jQuery.extend(jQuery.ajaxSettings,settings);},ajaxSettings:{url:location.href,global:true,type:"GET",timeout:0,contentType:"application/x-www-form-urlencoded",processData:true,async:true,data:null,username:null,password:null,accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(s){s=jQuery.extend(true,s,jQuery.extend(true,{},jQuery.ajaxSettings,s));var jsonp,jsre=/=\?(&|$)/g,status,data,type=s.type.toUpperCase();if(s.data&&s.processData&&typeof s.data!="string")s.data=jQuery.param(s.data);if(s.dataType=="jsonp"){if(type=="GET"){if(!s.url.match(jsre))s.url+=(s.url.match(/\?/)?"&":"?")+(s.jsonp||"callback")+"=?";}else if(!s.data||!s.data.match(jsre))s.data=(s.data?s.data+"&":"")+(s.jsonp||"callback")+"=?";s.dataType="json";}if(s.dataType=="json"&&(s.data&&s.data.match(jsre)||s.url.match(jsre))){jsonp="jsonp"+jsc++;if(s.data)s.data=(s.data+"").replace(jsre,"="+jsonp+"$1");s.url=s.url.replace(jsre,"="+jsonp+"$1");s.dataType="script";window[jsonp]=function(tmp){data=tmp;success();complete();window[jsonp]=undefined;try{delete window[jsonp];}catch(e){}if(head)head.removeChild(script);};}if(s.dataType=="script"&&s.cache==null)s.cache=false;if(s.cache===false&&type=="GET"){var ts=now();var ret=s.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+ts+"$2");s.url=ret+((ret==s.url)?(s.url.match(/\?/)?"&":"?")+"_="+ts:"");}if(s.data&&type=="GET"){s.url+=(s.url.match(/\?/)?"&":"?")+s.data;s.data=null;}if(s.global&&!jQuery.active++)jQuery.event.trigger("ajaxStart");var remote=/^(?:\w+:)?\/\/([^\/?#]+)/;if(s.dataType=="script"&&type=="GET"&&remote.test(s.url)&&remote.exec(s.url)[1]!=location.host){var head=document.getElementsByTagName("head")[0];var script=document.createElement("script");script.src=s.url;if(s.scriptCharset)script.charset=s.scriptCharset;if(!jsonp){var done=false;script.onload=script.onreadystatechange=function(){if(!done&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){done=true;success();complete();head.removeChild(script);}};}head.appendChild(script);return undefined;}var requestDone=false;var xhr=window.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest();if(s.username)xhr.open(type,s.url,s.async,s.username,s.password);else -xhr.open(type,s.url,s.async);try{if(s.data)xhr.setRequestHeader("Content-Type",s.contentType);if(s.ifModified)xhr.setRequestHeader("If-Modified-Since",jQuery.lastModified[s.url]||"Thu, 01 Jan 1970 00:00:00 GMT");xhr.setRequestHeader("X-Requested-With","XMLHttpRequest");xhr.setRequestHeader("Accept",s.dataType&&s.accepts[s.dataType]?s.accepts[s.dataType]+", */*":s.accepts._default);}catch(e){}if(s.beforeSend&&s.beforeSend(xhr,s)===false){s.global&&jQuery.active--;xhr.abort();return false;}if(s.global)jQuery.event.trigger("ajaxSend",[xhr,s]);var onreadystatechange=function(isTimeout){if(!requestDone&&xhr&&(xhr.readyState==4||isTimeout=="timeout")){requestDone=true;if(ival){clearInterval(ival);ival=null;}status=isTimeout=="timeout"&&"timeout"||!jQuery.httpSuccess(xhr)&&"error"||s.ifModified&&jQuery.httpNotModified(xhr,s.url)&&"notmodified"||"success";if(status=="success"){try{data=jQuery.httpData(xhr,s.dataType,s.dataFilter);}catch(e){status="parsererror";}}if(status=="success"){var modRes;try{modRes=xhr.getResponseHeader("Last-Modified");}catch(e){}if(s.ifModified&&modRes)jQuery.lastModified[s.url]=modRes;if(!jsonp)success();}else -jQuery.handleError(s,xhr,status);complete();if(s.async)xhr=null;}};if(s.async){var ival=setInterval(onreadystatechange,13);if(s.timeout>0)setTimeout(function(){if(xhr){xhr.abort();if(!requestDone)onreadystatechange("timeout");}},s.timeout);}try{xhr.send(s.data);}catch(e){jQuery.handleError(s,xhr,null,e);}if(!s.async)onreadystatechange();function success(){if(s.success)s.success(data,status);if(s.global)jQuery.event.trigger("ajaxSuccess",[xhr,s]);}function complete(){if(s.complete)s.complete(xhr,status);if(s.global)jQuery.event.trigger("ajaxComplete",[xhr,s]);if(s.global&&!--jQuery.active)jQuery.event.trigger("ajaxStop");}return xhr;},handleError:function(s,xhr,status,e){if(s.error)s.error(xhr,status,e);if(s.global)jQuery.event.trigger("ajaxError",[xhr,s,e]);},active:0,httpSuccess:function(xhr){try{return!xhr.status&&location.protocol=="file:"||(xhr.status>=200&&xhr.status<300)||xhr.status==304||xhr.status==1223||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpNotModified:function(xhr,url){try{var xhrRes=xhr.getResponseHeader("Last-Modified");return xhr.status==304||xhrRes==jQuery.lastModified[url]||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpData:function(xhr,type,filter){var ct=xhr.getResponseHeader("content-type"),xml=type=="xml"||!type&&ct&&ct.indexOf("xml")>=0,data=xml?xhr.responseXML:xhr.responseText;if(xml&&data.documentElement.tagName=="parsererror")throw"parsererror";if(filter)data=filter(data,type);if(type=="script")jQuery.globalEval(data);if(type=="json")data=eval("("+data+")");return data;},param:function(a){var s=[];if(a.constructor==Array||a.jquery)jQuery.each(a,function(){s.push(encodeURIComponent(this.name)+"="+encodeURIComponent(this.value));});else -for(var j in a)if(a[j]&&a[j].constructor==Array)jQuery.each(a[j],function(){s.push(encodeURIComponent(j)+"="+encodeURIComponent(this));});else -s.push(encodeURIComponent(j)+"="+encodeURIComponent(jQuery.isFunction(a[j])?a[j]():a[j]));return s.join("&").replace(/%20/g,"+");}});jQuery.fn.extend({show:function(speed,callback){return speed?this.animate({height:"show",width:"show",opacity:"show"},speed,callback):this.filter(":hidden").each(function(){this.style.display=this.oldblock||"";if(jQuery.css(this,"display")=="none"){var elem=jQuery("<"+this.tagName+" />").appendTo("body");this.style.display=elem.css("display");if(this.style.display=="none")this.style.display="block";elem.remove();}}).end();},hide:function(speed,callback){return speed?this.animate({height:"hide",width:"hide",opacity:"hide"},speed,callback):this.filter(":visible").each(function(){this.oldblock=this.oldblock||jQuery.css(this,"display");this.style.display="none";}).end();},_toggle:jQuery.fn.toggle,toggle:function(fn,fn2){return jQuery.isFunction(fn)&&jQuery.isFunction(fn2)?this._toggle.apply(this,arguments):fn?this.animate({height:"toggle",width:"toggle",opacity:"toggle"},fn,fn2):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();});},slideDown:function(speed,callback){return this.animate({height:"show"},speed,callback);},slideUp:function(speed,callback){return this.animate({height:"hide"},speed,callback);},slideToggle:function(speed,callback){return this.animate({height:"toggle"},speed,callback);},fadeIn:function(speed,callback){return this.animate({opacity:"show"},speed,callback);},fadeOut:function(speed,callback){return this.animate({opacity:"hide"},speed,callback);},fadeTo:function(speed,to,callback){return this.animate({opacity:to},speed,callback);},animate:function(prop,speed,easing,callback){var optall=jQuery.speed(speed,easing,callback);return this[optall.queue===false?"each":"queue"](function(){if(this.nodeType!=1)return false;var opt=jQuery.extend({},optall),p,hidden=jQuery(this).is(":hidden"),self=this;for(p in prop){if(prop[p]=="hide"&&hidden||prop[p]=="show"&&!hidden)return opt.complete.call(this);if(p=="height"||p=="width"){opt.display=jQuery.css(this,"display");opt.overflow=this.style.overflow;}}if(opt.overflow!=null)this.style.overflow="hidden";opt.curAnim=jQuery.extend({},prop);jQuery.each(prop,function(name,val){var e=new jQuery.fx(self,opt,name);if(/toggle|show|hide/.test(val))e[val=="toggle"?hidden?"show":"hide":val](prop);else{var parts=val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),start=e.cur(true)||0;if(parts){var end=parseFloat(parts[2]),unit=parts[3]||"px";if(unit!="px"){self.style[name]=(end||1)+unit;start=((end||1)/e.cur(true))*start;self.style[name]=start+unit;}if(parts[1])end=((parts[1]=="-="?-1:1)*end)+start;e.custom(start,end,unit);}else -e.custom(start,val,"");}});return true;});},queue:function(type,fn){if(jQuery.isFunction(type)||(type&&type.constructor==Array)){fn=type;type="fx";}if(!type||(typeof type=="string"&&!fn))return queue(this[0],type);return this.each(function(){if(fn.constructor==Array)queue(this,type,fn);else{queue(this,type).push(fn);if(queue(this,type).length==1)fn.call(this);}});},stop:function(clearQueue,gotoEnd){var timers=jQuery.timers;if(clearQueue)this.queue([]);this.each(function(){for(var i=timers.length-1;i>=0;i--)if(timers[i].elem==this){if(gotoEnd)timers[i](true);timers.splice(i,1);}});if(!gotoEnd)this.dequeue();return this;}});var queue=function(elem,type,array){if(elem){type=type||"fx";var q=jQuery.data(elem,type+"queue");if(!q||array)q=jQuery.data(elem,type+"queue",jQuery.makeArray(array));}return q;};jQuery.fn.dequeue=function(type){type=type||"fx";return this.each(function(){var q=queue(this,type);q.shift();if(q.length)q[0].call(this);});};jQuery.extend({speed:function(speed,easing,fn){var opt=speed&&speed.constructor==Object?speed:{complete:fn||!fn&&easing||jQuery.isFunction(speed)&&speed,duration:speed,easing:fn&&easing||easing&&easing.constructor!=Function&&easing};opt.duration=(opt.duration&&opt.duration.constructor==Number?opt.duration:jQuery.fx.speeds[opt.duration])||jQuery.fx.speeds.def;opt.old=opt.complete;opt.complete=function(){if(opt.queue!==false)jQuery(this).dequeue();if(jQuery.isFunction(opt.old))opt.old.call(this);};return opt;},easing:{linear:function(p,n,firstNum,diff){return firstNum+diff*p;},swing:function(p,n,firstNum,diff){return((-Math.cos(p*Math.PI)/2)+0.5)*diff+firstNum;}},timers:[],timerId:null,fx:function(elem,options,prop){this.options=options;this.elem=elem;this.prop=prop;if(!options.orig)options.orig={};}});jQuery.fx.prototype={update:function(){if(this.options.step)this.options.step.call(this.elem,this.now,this);(jQuery.fx.step[this.prop]||jQuery.fx.step._default)(this);if(this.prop=="height"||this.prop=="width")this.elem.style.display="block";},cur:function(force){if(this.elem[this.prop]!=null&&this.elem.style[this.prop]==null)return this.elem[this.prop];var r=parseFloat(jQuery.css(this.elem,this.prop,force));return r&&r>-10000?r:parseFloat(jQuery.curCSS(this.elem,this.prop))||0;},custom:function(from,to,unit){this.startTime=now();this.start=from;this.end=to;this.unit=unit||this.unit||"px";this.now=this.start;this.pos=this.state=0;this.update();var self=this;function t(gotoEnd){return self.step(gotoEnd);}t.elem=this.elem;jQuery.timers.push(t);if(jQuery.timerId==null){jQuery.timerId=setInterval(function(){var timers=jQuery.timers;for(var i=0;ithis.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var done=true;for(var i in this.options.curAnim)if(this.options.curAnim[i]!==true)done=false;if(done){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(jQuery.css(this.elem,"display")=="none")this.elem.style.display="block";}if(this.options.hide)this.elem.style.display="none";if(this.options.hide||this.options.show)for(var p in this.options.curAnim)jQuery.attr(this.elem.style,p,this.options.orig[p]);}if(done)this.options.complete.call(this.elem);return false;}else{var n=t-this.startTime;this.state=n/this.options.duration;this.pos=jQuery.easing[this.options.easing||(jQuery.easing.swing?"swing":"linear")](this.state,n,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update();}return true;}};jQuery.extend(jQuery.fx,{speeds:{slow:600,fast:200,def:400},step:{scrollLeft:function(fx){fx.elem.scrollLeft=fx.now;},scrollTop:function(fx){fx.elem.scrollTop=fx.now;},opacity:function(fx){jQuery.attr(fx.elem.style,"opacity",fx.now);},_default:function(fx){fx.elem.style[fx.prop]=fx.now+fx.unit;}}});jQuery.fn.offset=function(){var left=0,top=0,elem=this[0],results;if(elem)with(jQuery.browser){var parent=elem.parentNode,offsetChild=elem,offsetParent=elem.offsetParent,doc=elem.ownerDocument,safari2=safari&&parseInt(version)<522&&!/adobeair/i.test(userAgent),css=jQuery.curCSS,fixed=css(elem,"position")=="fixed";if(elem.getBoundingClientRect){var box=elem.getBoundingClientRect();add(box.left+Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),box.top+Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));add(-doc.documentElement.clientLeft,-doc.documentElement.clientTop);}else{add(elem.offsetLeft,elem.offsetTop);while(offsetParent){add(offsetParent.offsetLeft,offsetParent.offsetTop);if(mozilla&&!/^t(able|d|h)$/i.test(offsetParent.tagName)||safari&&!safari2)border(offsetParent);if(!fixed&&css(offsetParent,"position")=="fixed")fixed=true;offsetChild=/^body$/i.test(offsetParent.tagName)?offsetChild:offsetParent;offsetParent=offsetParent.offsetParent;}while(parent&&parent.tagName&&!/^body|html$/i.test(parent.tagName)){if(!/^inline|table.*$/i.test(css(parent,"display")))add(-parent.scrollLeft,-parent.scrollTop);if(mozilla&&css(parent,"overflow")!="visible")border(parent);parent=parent.parentNode;}if((safari2&&(fixed||css(offsetChild,"position")=="absolute"))||(mozilla&&css(offsetChild,"position")!="absolute"))add(-doc.body.offsetLeft,-doc.body.offsetTop);if(fixed)add(Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));}results={top:top,left:left};}function border(elem){add(jQuery.curCSS(elem,"borderLeftWidth",true),jQuery.curCSS(elem,"borderTopWidth",true));}function add(l,t){left+=parseInt(l,10)||0;top+=parseInt(t,10)||0;}return results;};jQuery.fn.extend({position:function(){var left=0,top=0,results;if(this[0]){var offsetParent=this.offsetParent(),offset=this.offset(),parentOffset=/^body|html$/i.test(offsetParent[0].tagName)?{top:0,left:0}:offsetParent.offset();offset.top-=num(this,'marginTop');offset.left-=num(this,'marginLeft');parentOffset.top+=num(offsetParent,'borderTopWidth');parentOffset.left+=num(offsetParent,'borderLeftWidth');results={top:offset.top-parentOffset.top,left:offset.left-parentOffset.left};}return results;},offsetParent:function(){var offsetParent=this[0].offsetParent;while(offsetParent&&(!/^body|html$/i.test(offsetParent.tagName)&&jQuery.css(offsetParent,'position')=='static'))offsetParent=offsetParent.offsetParent;return jQuery(offsetParent);}});jQuery.each(['Left','Top'],function(i,name){var method='scroll'+name;jQuery.fn[method]=function(val){if(!this[0])return;return val!=undefined?this.each(function(){this==window||this==document?window.scrollTo(!i?val:jQuery(window).scrollLeft(),i?val:jQuery(window).scrollTop()):this[method]=val;}):this[0]==window||this[0]==document?self[i?'pageYOffset':'pageXOffset']||jQuery.boxModel&&document.documentElement[method]||document.body[method]:this[0][method];};});jQuery.each(["Height","Width"],function(i,name){var tl=i?"Left":"Top",br=i?"Right":"Bottom";jQuery.fn["inner"+name]=function(){return this[name.toLowerCase()]()+num(this,"padding"+tl)+num(this,"padding"+br);};jQuery.fn["outer"+name]=function(margin){return this["inner"+name]()+num(this,"border"+tl+"Width")+num(this,"border"+br+"Width")+(margin?num(this,"margin"+tl)+num(this,"margin"+br):0);};});})(); \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/minus.png Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/minus.png has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/navigation.png Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/navigation.png has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/plus.png Binary file buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/plus.png has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/rightsidebar.css --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/rightsidebar.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -/** - * Sphinx Doc Design -- Right Side Bar Overrides - */ - - -div.sphinxsidebar { - float: right; -} - -div.bodywrapper { - margin: 0 230px 0 0; -} - -div.inlinecomments { - right: 250px; -} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/searchtools.js --- a/buildframework/helium/external/python/lib/2.5/Sphinx-0.5.1-py2.5.egg/sphinx/static/searchtools.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,467 +0,0 @@ -/** - * helper function to return a node containing the - * search summary for a given text. keywords is a list - * of stemmed words, hlwords is the list of normal, unstemmed - * words. the first one is used to find the occurance, the - * latter for highlighting it. - */ - -jQuery.makeSearchSummary = function(text, keywords, hlwords) { - var textLower = text.toLowerCase(); - var start = 0; - $.each(keywords, function() { - var i = textLower.indexOf(this.toLowerCase()); - if (i > -1) - start = i; - }); - start = Math.max(start - 120, 0); - var excerpt = ((start > 0) ? '...' : '') + - $.trim(text.substr(start, 240)) + - ((start + 240 - text.length) ? '...' : ''); - var rv = $('
    ').text(excerpt); - $.each(hlwords, function() { - rv = rv.highlightText(this, 'highlight'); - }); - return rv; -} - -/** - * Porter Stemmer - */ -var PorterStemmer = function() { - - var step2list = { - ational: 'ate', - tional: 'tion', - enci: 'ence', - anci: 'ance', - izer: 'ize', - bli: 'ble', - alli: 'al', - entli: 'ent', - eli: 'e', - ousli: 'ous', - ization: 'ize', - ation: 'ate', - ator: 'ate', - alism: 'al', - iveness: 'ive', - fulness: 'ful', - ousness: 'ous', - aliti: 'al', - iviti: 'ive', - biliti: 'ble', - logi: 'log' - }; - - var step3list = { - icate: 'ic', - ative: '', - alize: 'al', - iciti: 'ic', - ical: 'ic', - ful: '', - ness: '' - }; - - var c = "[^aeiou]"; // consonant - var v = "[aeiouy]"; // vowel - var C = c + "[^aeiouy]*"; // consonant sequence - var V = v + "[aeiou]*"; // vowel sequence - - var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 - var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 - var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 - var s_v = "^(" + C + ")?" + v; // vowel in stem - - this.stemWord = function (w) { - var stem; - var suffix; - var firstch; - var origword = w; - - if (w.length < 3) - return w; - - var re; - var re2; - var re3; - var re4; - - firstch = w.substr(0,1); - if (firstch == "y") - w = firstch.toUpperCase() + w.substr(1); - - // Step 1a - re = /^(.+?)(ss|i)es$/; - re2 = /^(.+?)([^s])s$/; - - if (re.test(w)) - w = w.replace(re,"$1$2"); - else if (re2.test(w)) - w = w.replace(re2,"$1$2"); - - // Step 1b - re = /^(.+?)eed$/; - re2 = /^(.+?)(ed|ing)$/; - if (re.test(w)) { - var fp = re.exec(w); - re = new RegExp(mgr0); - if (re.test(fp[1])) { - re = /.$/; - w = w.replace(re,""); - } - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1]; - re2 = new RegExp(s_v); - if (re2.test(stem)) { - w = stem; - re2 = /(at|bl|iz)$/; - re3 = new RegExp("([^aeiouylsz])\\1$"); - re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re2.test(w)) - w = w + "e"; - else if (re3.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - else if (re4.test(w)) - w = w + "e"; - } - } - - // Step 1c - re = /^(.+?)y$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(s_v); - if (re.test(stem)) - w = stem + "i"; - } - - // Step 2 - re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step2list[suffix]; - } - - // Step 3 - re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step3list[suffix]; - } - - // Step 4 - re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; - re2 = /^(.+?)(s|t)(ion)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - if (re.test(stem)) - w = stem; - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1] + fp[2]; - re2 = new RegExp(mgr1); - if (re2.test(stem)) - w = stem; - } - - // Step 5 - re = /^(.+?)e$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - re2 = new RegExp(meq1); - re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) - w = stem; - } - re = /ll$/; - re2 = new RegExp(mgr1); - if (re.test(w) && re2.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - - // and turn initial Y back to y - if (firstch == "y") - w = firstch.toLowerCase() + w.substr(1); - return w; - } -} - - -/** - * Search Module - */ -var Search = { - - _index : null, - _queued_query : null, - _pulse_status : -1, - - init : function() { - var params = $.getQueryParameters(); - if (params.q) { - var query = params.q[0]; - $('input[@name="q"]')[0].value = query; - this.performSearch(query); - } - }, - - /** - * Sets the index - */ - setIndex : function(index) { - var q; - this._index = index; - if ((q = this._queued_query) !== null) { - this._queued_query = null; - Search.query(q); - } - }, - - hasIndex : function() { - return this._index !== null; - }, - - deferQuery : function(query) { - this._queued_query = query; - }, - - stopPulse : function() { - this._pulse_status = 0; - }, - - startPulse : function() { - if (this._pulse_status >= 0) - return; - function pulse() { - Search._pulse_status = (Search._pulse_status + 1) % 4; - var dotString = ''; - for (var i = 0; i < Search._pulse_status; i++) - dotString += '.'; - Search.dots.text(dotString); - if (Search._pulse_status > -1) - window.setTimeout(pulse, 500); - }; - pulse(); - }, - - /** - * perform a search for something - */ - performSearch : function(query) { - // create the required interface elements - this.out = $('#search-results'); - this.title = $('

    ' + _('Searching') + '

    ').appendTo(this.out); - this.dots = $('').appendTo(this.title); - this.status = $('

    ').appendTo(this.out); - this.output = $('
    \n') - # skip content-type meta tag with interpolated charset value: - self.html_head.extend(self.head[1:]) - self.html_body.extend(self.body_prefix[1:] + self.body_pre_docinfo - + self.docinfo + self.body - + self.body_suffix[:-1]) - assert not self.context, 'len(context) = %s' % len(self.context) - - def visit_emphasis(self, node): - self.body.append('') - - def depart_emphasis(self, node): - self.body.append('') - - def visit_entry(self, node): - atts = {'class': []} - if isinstance(node.parent.parent, nodes.thead): - atts['class'].append('head') - if node.parent.parent.parent.stubs[node.parent.column]: - # "stubs" list is an attribute of the tgroup element - atts['class'].append('stub') - if atts['class']: - tagname = 'th' - atts['class'] = ' '.join(atts['class']) - else: - tagname = 'td' - del atts['class'] - node.parent.column += 1 - if node.has_key('morerows'): - atts['rowspan'] = node['morerows'] + 1 - if node.has_key('morecols'): - atts['colspan'] = node['morecols'] + 1 - node.parent.column += node['morecols'] - self.body.append(self.starttag(node, tagname, '', **atts)) - self.context.append('\n' % tagname.lower()) - if len(node) == 0: # empty cell - self.body.append(' ') - self.set_first_last(node) - - def depart_entry(self, node): - self.body.append(self.context.pop()) - - def visit_enumerated_list(self, node): - """ - The 'start' attribute does not conform to HTML 4.01's strict.dtd, but - CSS1 doesn't help. CSS2 isn't widely enough supported yet to be - usable. - """ - atts = {} - if node.has_key('start'): - atts['start'] = node['start'] - if node.has_key('enumtype'): - atts['class'] = node['enumtype'] - # @@@ To do: prefix, suffix. How? Change prefix/suffix to a - # single "format" attribute? Use CSS2? - old_compact_simple = self.compact_simple - self.context.append((self.compact_simple, self.compact_p)) - self.compact_p = None - self.compact_simple = self.is_compactable(node) - if self.compact_simple and not old_compact_simple: - atts['class'] = (atts.get('class', '') + ' simple').strip() - self.body.append(self.starttag(node, 'ol', **atts)) - - def depart_enumerated_list(self, node): - self.compact_simple, self.compact_p = self.context.pop() - self.body.append('\n') - - def visit_field(self, node): - self.body.append(self.starttag(node, 'tr', '', CLASS='field')) - - def depart_field(self, node): - self.body.append('\n') - - def visit_field_body(self, node): - self.body.append(self.starttag(node, 'td', '', CLASS='field-body')) - self.set_class_on_child(node, 'first', 0) - field = node.parent - if (self.compact_field_list or - isinstance(field.parent, nodes.docinfo) or - field.parent.index(field) == len(field.parent) - 1): - # If we are in a compact list, the docinfo, or if this is - # the last field of the field list, do not add vertical - # space after last element. - self.set_class_on_child(node, 'last', -1) - - def depart_field_body(self, node): - self.body.append('\n') - - def visit_field_list(self, node): - self.context.append((self.compact_field_list, self.compact_p)) - self.compact_p = None - if 'compact' in node['classes']: - self.compact_field_list = 1 - elif (self.settings.compact_field_lists - and 'open' not in node['classes']): - self.compact_field_list = 1 - if self.compact_field_list: - for field in node: - field_body = field[-1] - assert isinstance(field_body, nodes.field_body) - children = [n for n in field_body - if not isinstance(n, nodes.Invisible)] - if not (len(children) == 0 or - len(children) == 1 and - isinstance(children[0], nodes.paragraph)): - self.compact_field_list = 0 - break - self.body.append(self.starttag(node, 'table', frame='void', - rules='none', - CLASS='docutils field-list')) - self.body.append('\n' - '\n' - '\n') - - def depart_field_list(self, node): - self.body.append('\n\n') - self.compact_field_list, self.compact_p = self.context.pop() - - def visit_field_name(self, node): - atts = {} - if self.in_docinfo: - atts['class'] = 'docinfo-name' - else: - atts['class'] = 'field-name' - if ( self.settings.field_name_limit - and len(node.astext()) > self.settings.field_name_limit): - atts['colspan'] = 2 - self.context.append('\n ') - else: - self.context.append('') - self.body.append(self.starttag(node, 'th', '', **atts)) - - def depart_field_name(self, node): - self.body.append(':') - self.body.append(self.context.pop()) - - def visit_figure(self, node): - atts = {'class': 'figure'} - if node.get('width'): - atts['style'] = 'width: %spx' % node['width'] - if node.get('align'): - atts['align'] = node['align'] - self.body.append(self.starttag(node, 'div', **atts)) - - def depart_figure(self, node): - self.body.append('\n') - - def visit_footer(self, node): - self.context.append(len(self.body)) - - def depart_footer(self, node): - start = self.context.pop() - footer = [self.starttag(node, 'div', CLASS='footer'), - '\n'] - footer.extend(self.body[start:]) - footer.append('\n\n') - self.footer.extend(footer) - self.body_suffix[:0] = footer - del self.body[start:] - - def visit_footnote(self, node): - self.body.append(self.starttag(node, 'table', - CLASS='docutils footnote', - frame="void", rules="none")) - self.body.append('\n' - '\n' - '') - self.footnote_backrefs(node) - - def footnote_backrefs(self, node): - backlinks = [] - backrefs = node['backrefs'] - if self.settings.footnote_backlinks and backrefs: - if len(backrefs) == 1: - self.context.append('') - self.context.append('') - self.context.append('' - % backrefs[0]) - else: - i = 1 - for backref in backrefs: - backlinks.append('%s' - % (backref, i)) - i += 1 - self.context.append('(%s) ' % ', '.join(backlinks)) - self.context += ['', ''] - else: - self.context.append('') - self.context += ['', ''] - # If the node does not only consist of a label. - if len(node) > 1: - # If there are preceding backlinks, we do not set class - # 'first', because we need to retain the top-margin. - if not backlinks: - node[1]['classes'].append('first') - node[-1]['classes'].append('last') - - def depart_footnote(self, node): - self.body.append('\n' - '\n\n') - - def visit_footnote_reference(self, node): - href = '#' + node['refid'] - format = self.settings.footnote_references - if format == 'brackets': - suffix = '[' - self.context.append(']') - else: - assert format == 'superscript' - suffix = '' - self.context.append('') - self.body.append(self.starttag(node, 'a', suffix, - CLASS='footnote-reference', href=href)) - - def depart_footnote_reference(self, node): - self.body.append(self.context.pop() + '') - - def visit_generated(self, node): - pass - - def depart_generated(self, node): - pass - - def visit_header(self, node): - self.context.append(len(self.body)) - - def depart_header(self, node): - start = self.context.pop() - header = [self.starttag(node, 'div', CLASS='header')] - header.extend(self.body[start:]) - header.append('\n
    \n\n') - self.body_prefix.extend(header) - self.header.extend(header) - del self.body[start:] - - def visit_image(self, node): - atts = {} - atts['src'] = node['uri'] - if node.has_key('width'): - atts['width'] = node['width'] - if node.has_key('height'): - atts['height'] = node['height'] - if node.has_key('scale'): - if Image and not (node.has_key('width') - and node.has_key('height')): - try: - im = Image.open(str(atts['src'])) - except (IOError, # Source image can't be found or opened - UnicodeError): # PIL doesn't like Unicode paths. - pass - else: - if not atts.has_key('width'): - atts['width'] = str(im.size[0]) - if not atts.has_key('height'): - atts['height'] = str(im.size[1]) - del im - for att_name in 'width', 'height': - if atts.has_key(att_name): - match = re.match(r'([0-9.]+)(\S*)$', atts[att_name]) - assert match - atts[att_name] = '%s%s' % ( - float(match.group(1)) * (float(node['scale']) / 100), - match.group(2)) - style = [] - for att_name in 'width', 'height': - if atts.has_key(att_name): - if re.match(r'^[0-9.]+$', atts[att_name]): - # Interpret unitless values as pixels. - atts[att_name] += 'px' - style.append('%s: %s;' % (att_name, atts[att_name])) - del atts[att_name] - if style: - atts['style'] = ' '.join(style) - atts['alt'] = node.get('alt', atts['src']) - if (isinstance(node.parent, nodes.TextElement) or - (isinstance(node.parent, nodes.reference) and - not isinstance(node.parent.parent, nodes.TextElement))): - # Inline context or surrounded by .... - suffix = '' - else: - suffix = '\n' - if node.has_key('align'): - if node['align'] == 'center': - # "align" attribute is set in surrounding "div" element. - self.body.append('
    ') - self.context.append('
    \n') - suffix = '' - else: - # "align" attribute is set in "img" element. - atts['align'] = node['align'] - self.context.append('') - atts['class'] = 'align-%s' % node['align'] - else: - self.context.append('') - self.body.append(self.emptytag(node, 'img', suffix, **atts)) - - def depart_image(self, node): - self.body.append(self.context.pop()) - - def visit_inline(self, node): - self.body.append(self.starttag(node, 'span', '')) - - def depart_inline(self, node): - self.body.append('') - - def visit_label(self, node): - # Context added in footnote_backrefs. - self.body.append(self.starttag(node, 'td', '%s[' % self.context.pop(), - CLASS='label')) - - def depart_label(self, node): - # Context added in footnote_backrefs. - self.body.append(']%s%s' % (self.context.pop(), self.context.pop())) - - def visit_legend(self, node): - self.body.append(self.starttag(node, 'div', CLASS='legend')) - - def depart_legend(self, node): - self.body.append('\n') - - def visit_line(self, node): - self.body.append(self.starttag(node, 'div', suffix='', CLASS='line')) - if not len(node): - self.body.append('
    ') - - def depart_line(self, node): - self.body.append('\n') - - def visit_line_block(self, node): - self.body.append(self.starttag(node, 'div', CLASS='line-block')) - - def depart_line_block(self, node): - self.body.append('\n') - - def visit_list_item(self, node): - self.body.append(self.starttag(node, 'li', '')) - if len(node): - node[0]['classes'].append('first') - - def depart_list_item(self, node): - self.body.append('
  • \n') - - def visit_literal(self, node): - """Process text to prevent tokens from wrapping.""" - self.body.append( - self.starttag(node, 'tt', '', CLASS='docutils literal')) - text = node.astext() - for token in self.words_and_spaces.findall(text): - if token.strip(): - # Protect text like "--an-option" from bad line wrapping: - self.body.append('%s' - % self.encode(token)) - elif token in ('\n', ' '): - # Allow breaks at whitespace: - self.body.append(token) - else: - # Protect runs of multiple spaces; the last space can wrap: - self.body.append(' ' * (len(token) - 1) + ' ') - self.body.append('') - # Content already processed: - raise nodes.SkipNode - - def visit_literal_block(self, node): - self.body.append(self.starttag(node, 'pre', CLASS='literal-block')) - - def depart_literal_block(self, node): - self.body.append('\n\n') - - def visit_meta(self, node): - meta = self.emptytag(node, 'meta', **node.non_default_attributes()) - self.add_meta(meta) - - def depart_meta(self, node): - pass - - def add_meta(self, tag): - self.meta.append(tag) - self.head.append(tag) - - def visit_option(self, node): - if self.context[-1]: - self.body.append(', ') - self.body.append(self.starttag(node, 'span', '', CLASS='option')) - - def depart_option(self, node): - self.body.append('') - self.context[-1] += 1 - - def visit_option_argument(self, node): - self.body.append(node.get('delimiter', ' ')) - self.body.append(self.starttag(node, 'var', '')) - - def depart_option_argument(self, node): - self.body.append('') - - def visit_option_group(self, node): - atts = {} - if ( self.settings.option_limit - and len(node.astext()) > self.settings.option_limit): - atts['colspan'] = 2 - self.context.append('\n ') - else: - self.context.append('') - self.body.append( - self.starttag(node, 'td', CLASS='option-group', **atts)) - self.body.append('') - self.context.append(0) # count number of options - - def depart_option_group(self, node): - self.context.pop() - self.body.append('\n') - self.body.append(self.context.pop()) - - def visit_option_list(self, node): - self.body.append( - self.starttag(node, 'table', CLASS='docutils option-list', - frame="void", rules="none")) - self.body.append('\n' - '\n' - '\n') - - def depart_option_list(self, node): - self.body.append('\n\n') - - def visit_option_list_item(self, node): - self.body.append(self.starttag(node, 'tr', '')) - - def depart_option_list_item(self, node): - self.body.append('\n') - - def visit_option_string(self, node): - pass - - def depart_option_string(self, node): - pass - - def visit_organization(self, node): - self.visit_docinfo_item(node, 'organization') - - def depart_organization(self, node): - self.depart_docinfo_item() - - def should_be_compact_paragraph(self, node): - """ - Determine if the

    tags around paragraph ``node`` can be omitted. - """ - if (isinstance(node.parent, nodes.document) or - isinstance(node.parent, nodes.compound)): - # Never compact paragraphs in document or compound. - return 0 - for key, value in node.attlist(): - if (node.is_not_default(key) and - not (key == 'classes' and value in - ([], ['first'], ['last'], ['first', 'last']))): - # Attribute which needs to survive. - return 0 - first = isinstance(node.parent[0], nodes.label) # skip label - for child in node.parent.children[first:]: - # only first paragraph can be compact - if isinstance(child, nodes.Invisible): - continue - if child is node: - break - return 0 - parent_length = len([n for n in node.parent if not isinstance( - n, (nodes.Invisible, nodes.label))]) - if ( self.compact_simple - or self.compact_field_list - or self.compact_p and parent_length == 1): - return 1 - return 0 - - def visit_paragraph(self, node): - if self.should_be_compact_paragraph(node): - self.context.append('') - else: - self.body.append(self.starttag(node, 'p', '')) - self.context.append('

    \n') - - def depart_paragraph(self, node): - self.body.append(self.context.pop()) - - def visit_problematic(self, node): - if node.hasattr('refid'): - self.body.append('' % node['refid']) - self.context.append('') - else: - self.context.append('') - self.body.append(self.starttag(node, 'span', '', CLASS='problematic')) - - def depart_problematic(self, node): - self.body.append('') - self.body.append(self.context.pop()) - - def visit_raw(self, node): - if 'html' in node.get('format', '').split(): - t = isinstance(node.parent, nodes.TextElement) and 'span' or 'div' - if node['classes']: - self.body.append(self.starttag(node, t, suffix='')) - self.body.append(node.astext()) - if node['classes']: - self.body.append('' % t) - # Keep non-HTML raw text out of output: - raise nodes.SkipNode - - def visit_reference(self, node): - atts = {'class': 'reference'} - if node.has_key('refuri'): - atts['href'] = node['refuri'] - if ( self.settings.cloak_email_addresses - and atts['href'].startswith('mailto:')): - atts['href'] = self.cloak_mailto(atts['href']) - self.in_mailto = 1 - atts['class'] += ' external' - else: - assert node.has_key('refid'), \ - 'References must have "refuri" or "refid" attribute.' - atts['href'] = '#' + node['refid'] - atts['class'] += ' internal' - if not isinstance(node.parent, nodes.TextElement): - assert len(node) == 1 and isinstance(node[0], nodes.image) - atts['class'] += ' image-reference' - self.body.append(self.starttag(node, 'a', '', **atts)) - - def depart_reference(self, node): - self.body.append('') - if not isinstance(node.parent, nodes.TextElement): - self.body.append('\n') - self.in_mailto = 0 - - def visit_revision(self, node): - self.visit_docinfo_item(node, 'revision', meta=None) - - def depart_revision(self, node): - self.depart_docinfo_item() - - def visit_row(self, node): - self.body.append(self.starttag(node, 'tr', '')) - node.column = 0 - - def depart_row(self, node): - self.body.append('\n') - - def visit_rubric(self, node): - self.body.append(self.starttag(node, 'p', '', CLASS='rubric')) - - def depart_rubric(self, node): - self.body.append('

    \n') - - def visit_section(self, node): - self.section_level += 1 - self.body.append( - self.starttag(node, 'div', CLASS='section')) - - def depart_section(self, node): - self.section_level -= 1 - self.body.append('\n') - - def visit_sidebar(self, node): - self.body.append( - self.starttag(node, 'div', CLASS='sidebar')) - self.set_first_last(node) - self.in_sidebar = 1 - - def depart_sidebar(self, node): - self.body.append('\n') - self.in_sidebar = None - - def visit_status(self, node): - self.visit_docinfo_item(node, 'status', meta=None) - - def depart_status(self, node): - self.depart_docinfo_item() - - def visit_strong(self, node): - self.body.append('') - - def depart_strong(self, node): - self.body.append('') - - def visit_subscript(self, node): - self.body.append(self.starttag(node, 'sub', '')) - - def depart_subscript(self, node): - self.body.append('') - - def visit_substitution_definition(self, node): - """Internal only.""" - raise nodes.SkipNode - - def visit_substitution_reference(self, node): - self.unimplemented_visit(node) - - def visit_subtitle(self, node): - if isinstance(node.parent, nodes.sidebar): - self.body.append(self.starttag(node, 'p', '', - CLASS='sidebar-subtitle')) - self.context.append('

    \n') - elif isinstance(node.parent, nodes.document): - self.body.append(self.starttag(node, 'h2', '', CLASS='subtitle')) - self.context.append('\n') - self.in_document_title = len(self.body) - elif isinstance(node.parent, nodes.section): - tag = 'h%s' % (self.section_level + self.initial_header_level - 1) - self.body.append( - self.starttag(node, tag, '', CLASS='section-subtitle') + - self.starttag({}, 'span', '', CLASS='section-subtitle')) - self.context.append('\n' % tag) - - def depart_subtitle(self, node): - self.body.append(self.context.pop()) - if self.in_document_title: - self.subtitle = self.body[self.in_document_title:-1] - self.in_document_title = 0 - self.body_pre_docinfo.extend(self.body) - self.html_subtitle.extend(self.body) - del self.body[:] - - def visit_superscript(self, node): - self.body.append(self.starttag(node, 'sup', '')) - - def depart_superscript(self, node): - self.body.append('') - - def visit_system_message(self, node): - self.body.append(self.starttag(node, 'div', CLASS='system-message')) - self.body.append('

    ') - backref_text = '' - if len(node['backrefs']): - backrefs = node['backrefs'] - if len(backrefs) == 1: - backref_text = ('; backlink' - % backrefs[0]) - else: - i = 1 - backlinks = [] - for backref in backrefs: - backlinks.append('%s' % (backref, i)) - i += 1 - backref_text = ('; backlinks: %s' - % ', '.join(backlinks)) - if node.hasattr('line'): - line = ', line %s' % node['line'] - else: - line = '' - self.body.append('System Message: %s/%s ' - '(%s%s)%s

    \n' - % (node['type'], node['level'], - self.encode(node['source']), line, backref_text)) - - def depart_system_message(self, node): - self.body.append('\n') - - def visit_table(self, node): - self.body.append( - self.starttag(node, 'table', CLASS='docutils', border="1")) - - def depart_table(self, node): - self.body.append('\n') - - def visit_target(self, node): - if not (node.has_key('refuri') or node.has_key('refid') - or node.has_key('refname')): - self.body.append(self.starttag(node, 'span', '', CLASS='target')) - self.context.append('') - else: - self.context.append('') - - def depart_target(self, node): - self.body.append(self.context.pop()) - - def visit_tbody(self, node): - self.write_colspecs() - self.body.append(self.context.pop()) # '\n' or '' - self.body.append(self.starttag(node, 'tbody', valign='top')) - - def depart_tbody(self, node): - self.body.append('\n') - - def visit_term(self, node): - self.body.append(self.starttag(node, 'dt', '')) - - def depart_term(self, node): - """ - Leave the end tag to `self.visit_definition()`, in case there's a - classifier. - """ - pass - - def visit_tgroup(self, node): - # Mozilla needs : - self.body.append(self.starttag(node, 'colgroup')) - # Appended by thead or tbody: - self.context.append('\n') - node.stubs = [] - - def depart_tgroup(self, node): - pass - - def visit_thead(self, node): - self.write_colspecs() - self.body.append(self.context.pop()) # '\n' - # There may or may not be a ; this is for to use: - self.context.append('') - self.body.append(self.starttag(node, 'thead', valign='bottom')) - - def depart_thead(self, node): - self.body.append('\n') - - def visit_title(self, node): - """Only 6 section levels are supported by HTML.""" - check_id = 0 - close_tag = '

    \n' - if isinstance(node.parent, nodes.topic): - self.body.append( - self.starttag(node, 'p', '', CLASS='topic-title first')) - elif isinstance(node.parent, nodes.sidebar): - self.body.append( - self.starttag(node, 'p', '', CLASS='sidebar-title')) - elif isinstance(node.parent, nodes.Admonition): - self.body.append( - self.starttag(node, 'p', '', CLASS='admonition-title')) - elif isinstance(node.parent, nodes.table): - self.body.append( - self.starttag(node, 'caption', '')) - close_tag = '\n' - elif isinstance(node.parent, nodes.document): - self.body.append(self.starttag(node, 'h1', '', CLASS='title')) - close_tag = '\n' - self.in_document_title = len(self.body) - else: - assert isinstance(node.parent, nodes.section) - h_level = self.section_level + self.initial_header_level - 1 - atts = {} - if (len(node.parent) >= 2 and - isinstance(node.parent[1], nodes.subtitle)): - atts['CLASS'] = 'with-subtitle' - self.body.append( - self.starttag(node, 'h%s' % h_level, '', **atts)) - atts = {} - if node.hasattr('refid'): - atts['class'] = 'toc-backref' - atts['href'] = '#' + node['refid'] - if atts: - self.body.append(self.starttag({}, 'a', '', **atts)) - close_tag = '\n' % (h_level) - else: - close_tag = '\n' % (h_level) - self.context.append(close_tag) - - def depart_title(self, node): - self.body.append(self.context.pop()) - if self.in_document_title: - self.title = self.body[self.in_document_title:-1] - self.in_document_title = 0 - self.body_pre_docinfo.extend(self.body) - self.html_title.extend(self.body) - del self.body[:] - - def visit_title_reference(self, node): - self.body.append(self.starttag(node, 'cite', '')) - - def depart_title_reference(self, node): - self.body.append('') - - def visit_topic(self, node): - self.body.append(self.starttag(node, 'div', CLASS='topic')) - self.topic_classes = node['classes'] - - def depart_topic(self, node): - self.body.append('\n') - self.topic_classes = [] - - def visit_transition(self, node): - self.body.append(self.emptytag(node, 'hr', CLASS='docutils')) - - def depart_transition(self, node): - pass - - def visit_version(self, node): - self.visit_docinfo_item(node, 'version', meta=None) - - def depart_version(self, node): - self.depart_docinfo_item() - - def unimplemented_visit(self, node): - raise NotImplementedError('visiting unimplemented node type: %s' - % node.__class__.__name__) - - -class SimpleListChecker(nodes.GenericNodeVisitor): - - """ - Raise `nodes.NodeFound` if non-simple list item is encountered. - - Here "simple" means a list item containing nothing other than a single - paragraph, a simple list, or a paragraph followed by a simple list. - """ - - def default_visit(self, node): - raise nodes.NodeFound - - def visit_bullet_list(self, node): - pass - - def visit_enumerated_list(self, node): - pass - - def visit_list_item(self, node): - children = [] - for child in node.children: - if not isinstance(child, nodes.Invisible): - children.append(child) - if (children and isinstance(children[0], nodes.paragraph) - and (isinstance(children[-1], nodes.bullet_list) - or isinstance(children[-1], nodes.enumerated_list))): - children.pop() - if len(children) <= 1: - return - else: - raise nodes.NodeFound - - def visit_paragraph(self, node): - raise nodes.SkipNode - - def invisible_visit(self, node): - """Invisible nodes should be ignored.""" - raise nodes.SkipNode - - visit_comment = invisible_visit - visit_substitution_definition = invisible_visit - visit_target = invisible_visit - visit_pending = invisible_visit diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/html4css1/html4css1.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/html4css1/html4css1.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,273 +0,0 @@ -/* -:Author: David Goodger (goodger@python.org) -:Id: $Id: html4css1.css 5196 2007-06-03 20:25:28Z wiemann $ -:Copyright: This stylesheet has been placed in the public domain. - -Default cascading style sheet for the HTML output of Docutils. - -See http://docutils.sf.net/docs/howto/html-stylesheets.html for how to -customize this style sheet. -*/ - -/* used to remove borders from tables and images */ -.borderless, table.borderless td, table.borderless th { - border: 0 } - -table.borderless td, table.borderless th { - /* Override padding for "table.docutils td" with "! important". - The right padding separates the table cells. */ - padding: 0 0.5em 0 0 ! important } - -.first { - /* Override more specific margin styles with "! important". */ - margin-top: 0 ! important } - -.last, .with-subtitle { - margin-bottom: 0 ! important } - -.hidden { - display: none } - -a.toc-backref { - text-decoration: none ; - color: black } - -blockquote.epigraph { - margin: 2em 5em ; } - -dl.docutils dd { - margin-bottom: 0.5em } - -/* Uncomment (and remove this text!) to get bold-faced definition list terms -dl.docutils dt { - font-weight: bold } -*/ - -div.abstract { - margin: 2em 5em } - -div.abstract p.topic-title { - font-weight: bold ; - text-align: center } - -div.admonition, div.attention, div.caution, div.danger, div.error, -div.hint, div.important, div.note, div.tip, div.warning { - margin: 2em ; - border: medium outset ; - padding: 1em } - -div.admonition p.admonition-title, div.hint p.admonition-title, -div.important p.admonition-title, div.note p.admonition-title, -div.tip p.admonition-title { - font-weight: bold ; - font-family: sans-serif } - -div.attention p.admonition-title, div.caution p.admonition-title, -div.danger p.admonition-title, div.error p.admonition-title, -div.warning p.admonition-title { - color: red ; - font-weight: bold ; - font-family: sans-serif } - -/* Uncomment (and remove this text!) to get reduced vertical space in - compound paragraphs. -div.compound .compound-first, div.compound .compound-middle { - margin-bottom: 0.5em } - -div.compound .compound-last, div.compound .compound-middle { - margin-top: 0.5em } -*/ - -div.dedication { - margin: 2em 5em ; - text-align: center ; - font-style: italic } - -div.dedication p.topic-title { - font-weight: bold ; - font-style: normal } - -div.figure { - margin-left: 2em ; - margin-right: 2em } - -div.footer, div.header { - clear: both; - font-size: smaller } - -div.line-block { - display: block ; - margin-top: 1em ; - margin-bottom: 1em } - -div.line-block div.line-block { - margin-top: 0 ; - margin-bottom: 0 ; - margin-left: 1.5em } - -div.sidebar { - margin: 0 0 0.5em 1em ; - border: medium outset ; - padding: 1em ; - background-color: #ffffee ; - width: 40% ; - float: right ; - clear: right } - -div.sidebar p.rubric { - font-family: sans-serif ; - font-size: medium } - -div.system-messages { - margin: 5em } - -div.system-messages h1 { - color: red } - -div.system-message { - border: medium outset ; - padding: 1em } - -div.system-message p.system-message-title { - color: red ; - font-weight: bold } - -div.topic { - margin: 2em } - -h1.section-subtitle, h2.section-subtitle, h3.section-subtitle, -h4.section-subtitle, h5.section-subtitle, h6.section-subtitle { - margin-top: 0.4em } - -h1.title { - text-align: center } - -h2.subtitle { - text-align: center } - -hr.docutils { - width: 75% } - -img.align-left { - clear: left } - -img.align-right { - clear: right } - -ol.simple, ul.simple { - margin-bottom: 1em } - -ol.arabic { - list-style: decimal } - -ol.loweralpha { - list-style: lower-alpha } - -ol.upperalpha { - list-style: upper-alpha } - -ol.lowerroman { - list-style: lower-roman } - -ol.upperroman { - list-style: upper-roman } - -p.attribution { - text-align: right ; - margin-left: 50% } - -p.caption { - font-style: italic } - -p.credits { - font-style: italic ; - font-size: smaller } - -p.label { - white-space: nowrap } - -p.rubric { - font-weight: bold ; - font-size: larger ; - color: maroon ; - text-align: center } - -p.sidebar-title { - font-family: sans-serif ; - font-weight: bold ; - font-size: larger } - -p.sidebar-subtitle { - font-family: sans-serif ; - font-weight: bold } - -p.topic-title { - font-weight: bold } - -pre.address { - margin-bottom: 0 ; - margin-top: 0 ; - font-family: serif ; - font-size: 100% } - -pre.literal-block, pre.doctest-block { - margin-left: 2em ; - margin-right: 2em } - -span.classifier { - font-family: sans-serif ; - font-style: oblique } - -span.classifier-delimiter { - font-family: sans-serif ; - font-weight: bold } - -span.interpreted { - font-family: sans-serif } - -span.option { - white-space: nowrap } - -span.pre { - white-space: pre } - -span.problematic { - color: red } - -span.section-subtitle { - /* font-size relative to parent (h1..h6 element) */ - font-size: 80% } - -table.citation { - border-left: solid 1px gray; - margin-left: 1px } - -table.docinfo { - margin: 2em 4em } - -table.docutils { - margin-top: 0.5em ; - margin-bottom: 0.5em } - -table.footnote { - border-left: solid 1px black; - margin-left: 1px } - -table.docutils td, table.docutils th, -table.docinfo td, table.docinfo th { - padding-left: 0.5em ; - padding-right: 0.5em ; - vertical-align: top } - -table.docutils th.field-name, table.docinfo th.docinfo-name { - font-weight: bold ; - text-align: left ; - white-space: nowrap ; - padding-left: 0 } - -h1 tt.docutils, h2 tt.docutils, h3 tt.docutils, -h4 tt.docutils, h5 tt.docutils, h6 tt.docutils { - font-size: 100% } - -ul.auto-toc { - list-style-type: none } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/html4css1/template.txt --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/html4css1/template.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -%(head_prefix)s -%(head)s -%(stylesheet)s -%(body_prefix)s -%(body_pre_docinfo)s -%(docinfo)s -%(body)s -%(body_suffix)s diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/latex2e/__init__.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/latex2e/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2177 +0,0 @@ -# $Id: __init__.py 5333 2007-07-10 17:31:28Z grubert $ -# Author: Engelbert Gruber -# Copyright: This module has been placed in the public domain. - -""" -LaTeX2e document tree Writer. -""" - -__docformat__ = 'reStructuredText' - -# code contributions from several people included, thanks to all. -# some named: David Abrahams, Julien Letessier, Lele Gaifax, and others. -# -# convention deactivate code by two # e.g. ##. - -import sys -import time -import re -import string -from types import ListType -from docutils import frontend, nodes, languages, writers, utils -from docutils.writers.newlatex2e import unicode_map - -from docutils.transforms.references import DanglingReferencesVisitor - -class Writer(writers.Writer): - - supported = ('latex','latex2e') - """Formats this writer supports.""" - - settings_spec = ( - 'LaTeX-Specific Options', - 'The LaTeX "--output-encoding" default is "latin-1:strict".', - (('Specify documentclass. Default is "article".', - ['--documentclass'], - {'default': 'article', }), - ('Specify document options. Multiple options can be given, ' - 'separated by commas. Default is "10pt,a4paper".', - ['--documentoptions'], - {'default': '10pt,a4paper', }), - ('Use LaTeX footnotes. LaTeX supports only numbered footnotes (does it?). ' - 'Default: no, uses figures.', - ['--use-latex-footnotes'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Format for footnote references: one of "superscript" or ' - '"brackets". Default is "superscript".', - ['--footnote-references'], - {'choices': ['superscript', 'brackets'], 'default': 'superscript', - 'metavar': '', - 'overrides': 'trim_footnote_reference_space'}), - ('Use LaTeX citations. ' - 'Default: no, uses figures which might get mixed with images.', - ['--use-latex-citations'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Format for block quote attributions: one of "dash" (em-dash ' - 'prefix), "parentheses"/"parens", or "none". Default is "dash".', - ['--attribution'], - {'choices': ['dash', 'parentheses', 'parens', 'none'], - 'default': 'dash', 'metavar': ''}), - ('Specify a stylesheet file. The file will be "input" by latex in ' - 'the document header. Default is no stylesheet (""). ' - 'Overrides --stylesheet-path.', - ['--stylesheet'], - {'default': '', 'metavar': '', - 'overrides': 'stylesheet_path'}), - ('Specify a stylesheet file, relative to the current working ' - 'directory. Overrides --stylesheet.', - ['--stylesheet-path'], - {'metavar': '', 'overrides': 'stylesheet'}), - ('Table of contents by docutils (default) or LaTeX. LaTeX (writer) ' - 'supports only one ToC per document, but docutils does not know of ' - 'pagenumbers. LaTeX table of contents also means LaTeX generates ' - 'sectionnumbers.', - ['--use-latex-toc'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Add parts on top of the section hierarchy.', - ['--use-part-section'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Let LaTeX print author and date, do not show it in docutils ' - 'document info.', - ['--use-latex-docinfo'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Use LaTeX abstract environment for the documents abstract.' - 'Per default the abstract is an unnumbered section.', - ['--use-latex-abstract'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Color of any hyperlinks embedded in text ' - '(default: "blue", "0" to disable).', - ['--hyperlink-color'], {'default': 'blue'}), - ('Enable compound enumerators for nested enumerated lists ' - '(e.g. "1.2.a.ii"). Default: disabled.', - ['--compound-enumerators'], - {'default': None, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Disable compound enumerators for nested enumerated lists. This is ' - 'the default.', - ['--no-compound-enumerators'], - {'action': 'store_false', 'dest': 'compound_enumerators'}), - ('Enable section ("." subsection ...) prefixes for compound ' - 'enumerators. This has no effect without --compound-enumerators. ' - 'Default: disabled.', - ['--section-prefix-for-enumerators'], - {'default': None, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Disable section prefixes for compound enumerators. ' - 'This is the default.', - ['--no-section-prefix-for-enumerators'], - {'action': 'store_false', 'dest': 'section_prefix_for_enumerators'}), - ('Set the separator between section number and enumerator ' - 'for compound enumerated lists. Default is "-".', - ['--section-enumerator-separator'], - {'default': '-', 'metavar': ''}), - ('When possibile, use verbatim for literal-blocks. ' - 'Default is to always use the mbox environment.', - ['--use-verbatim-when-possible'], - {'default': 0, 'action': 'store_true', - 'validator': frontend.validate_boolean}), - ('Table style. "standard" with horizontal and vertical lines, ' - '"booktabs" (LaTeX booktabs style) only horizontal lines ' - 'above and below the table and below the header or "nolines". ' - 'Default: "standard"', - ['--table-style'], - {'choices': ['standard', 'booktabs','nolines'], 'default': 'standard', - 'metavar': ''}), - ('LaTeX graphicx package option. ' - 'Possible values are "dvips", "pdftex". "auto" includes LaTeX code ' - 'to use "pdftex" if processing with pdf(la)tex and dvips otherwise. ' - 'Default is no option.', - ['--graphicx-option'], - {'default': ''}), - ('LaTeX font encoding. ' - 'Possible values are "T1", "OT1", "" or some other fontenc option. ' - 'The font encoding influences available symbols, e.g. "<<" as one ' - 'character. Default is "" which leads to package "ae" (a T1 ' - 'emulation using CM fonts).', - ['--font-encoding'], - {'default': ''}), - ('Per default the latex-writer puts the reference title into ' - 'hyperreferences. Specify "ref*" or "pageref*" to get the section ' - 'number or the page number.', - ['--reference-label'], - {'default': None, }), - ('Specify style and database for bibtex, for example ' - '"--use-bibtex=mystyle,mydb1,mydb2".', - ['--use-bibtex'], - {'default': None, }), - ),) - - settings_defaults = {'output_encoding': 'latin-1'} - - relative_path_settings = ('stylesheet_path',) - - config_section = 'latex2e writer' - config_section_dependencies = ('writers',) - - visitor_attributes = ("head_prefix", "head", - "body_prefix", "body", "body_suffix") - - output = None - """Final translated form of `document`.""" - - def __init__(self): - writers.Writer.__init__(self) - self.translator_class = LaTeXTranslator - - def translate(self): - visitor = self.translator_class(self.document) - self.document.walkabout(visitor) - self.output = visitor.astext() - # copy parts - for attr in self.visitor_attributes: - setattr(self, attr, getattr(visitor, attr)) - - def assemble_parts(self): - writers.Writer.assemble_parts(self) - for part in self.visitor_attributes: - self.parts[part] = ''.join(getattr(self, part)) - - -""" -Notes on LaTeX --------------- - -* LaTeX does not support multiple tocs in one document. - (might be no limitation except for docutils documentation) - - The "minitoc" latex package can produce per-chapter tocs in - book and report document classes. - -* width - - * linewidth - width of a line in the local environment - * textwidth - the width of text on the page - - Maybe always use linewidth ? - - *Bug* inside a minipage a (e.g. Sidebar) the linewidth is - not changed, needs fix in docutils so that tables - are not too wide. - - So we add locallinewidth set it initially and - on entering sidebar and reset on exit. -""" - -class Babel: - """Language specifics for LaTeX.""" - # country code by a.schlock. - # partly manually converted from iso and babel stuff, dialects and some - _ISO639_TO_BABEL = { - 'no': 'norsk', #XXX added by hand ( forget about nynorsk?) - 'gd': 'scottish', #XXX added by hand - 'hu': 'magyar', #XXX added by hand - 'pt': 'portuguese',#XXX added by hand - 'sl': 'slovenian', - 'af': 'afrikaans', - 'bg': 'bulgarian', - 'br': 'breton', - 'ca': 'catalan', - 'cs': 'czech', - 'cy': 'welsh', - 'da': 'danish', - 'fr': 'french', - # french, francais, canadien, acadian - 'de': 'ngerman', #XXX rather than german - # ngerman, naustrian, german, germanb, austrian - 'el': 'greek', - 'en': 'english', - # english, USenglish, american, UKenglish, british, canadian - 'eo': 'esperanto', - 'es': 'spanish', - 'et': 'estonian', - 'eu': 'basque', - 'fi': 'finnish', - 'ga': 'irish', - 'gl': 'galician', - 'he': 'hebrew', - 'hr': 'croatian', - 'hu': 'hungarian', - 'is': 'icelandic', - 'it': 'italian', - 'la': 'latin', - 'nl': 'dutch', - 'pl': 'polish', - 'pt': 'portuguese', - 'ro': 'romanian', - 'ru': 'russian', - 'sk': 'slovak', - 'sr': 'serbian', - 'sv': 'swedish', - 'tr': 'turkish', - 'uk': 'ukrainian' - } - - def __init__(self,lang): - self.language = lang - # pdflatex does not produce double quotes for ngerman in tt. - self.double_quote_replacment = None - if re.search('^de',self.language): - #self.quotes = ("\"`", "\"'") - self.quotes = ('{\\glqq}', '{\\grqq}') - self.double_quote_replacment = "{\\dq}" - elif re.search('^it',self.language): - self.quotes = ("``", "''") - self.double_quote_replacment = r'{\char`\"}' - else: - self.quotes = ("``", "''") - self.quote_index = 0 - - def next_quote(self): - q = self.quotes[self.quote_index] - self.quote_index = (self.quote_index+1)%2 - return q - - def quote_quotes(self,text): - t = None - for part in text.split('"'): - if t == None: - t = part - else: - t += self.next_quote() + part - return t - - def double_quotes_in_tt (self,text): - if not self.double_quote_replacment: - return text - return text.replace('"', self.double_quote_replacment) - - def get_language(self): - if self._ISO639_TO_BABEL.has_key(self.language): - return self._ISO639_TO_BABEL[self.language] - else: - # support dialects. - l = self.language.split("_")[0] - if self._ISO639_TO_BABEL.has_key(l): - return self._ISO639_TO_BABEL[l] - return None - - -latex_headings = { - 'optionlist_environment' : [ - '\\newcommand{\\optionlistlabel}[1]{\\bf #1 \\hfill}\n' - '\\newenvironment{optionlist}[1]\n' - '{\\begin{list}{}\n' - ' {\\setlength{\\labelwidth}{#1}\n' - ' \\setlength{\\rightmargin}{1cm}\n' - ' \\setlength{\\leftmargin}{\\rightmargin}\n' - ' \\addtolength{\\leftmargin}{\\labelwidth}\n' - ' \\addtolength{\\leftmargin}{\\labelsep}\n' - ' \\renewcommand{\\makelabel}{\\optionlistlabel}}\n' - '}{\\end{list}}\n', - ], - 'lineblock_environment' : [ - '\\newlength{\\lineblockindentation}\n' - '\\setlength{\\lineblockindentation}{2.5em}\n' - '\\newenvironment{lineblock}[1]\n' - '{\\begin{list}{}\n' - ' {\\setlength{\\partopsep}{\\parskip}\n' - ' \\addtolength{\\partopsep}{\\baselineskip}\n' - ' \\topsep0pt\\itemsep0.15\\baselineskip\\parsep0pt\n' - ' \\leftmargin#1}\n' - ' \\raggedright}\n' - '{\\end{list}}\n' - ], - 'footnote_floats' : [ - '% begin: floats for footnotes tweaking.\n', - '\\setlength{\\floatsep}{0.5em}\n', - '\\setlength{\\textfloatsep}{\\fill}\n', - '\\addtolength{\\textfloatsep}{3em}\n', - '\\renewcommand{\\textfraction}{0.5}\n', - '\\renewcommand{\\topfraction}{0.5}\n', - '\\renewcommand{\\bottomfraction}{0.5}\n', - '\\setcounter{totalnumber}{50}\n', - '\\setcounter{topnumber}{50}\n', - '\\setcounter{bottomnumber}{50}\n', - '% end floats for footnotes\n', - ], - 'some_commands' : [ - '% some commands, that could be overwritten in the style file.\n' - '\\newcommand{\\rubric}[1]' - '{\\subsection*{~\\hfill {\\it #1} \\hfill ~}}\n' - '\\newcommand{\\titlereference}[1]{\\textsl{#1}}\n' - '% end of "some commands"\n', - ] - } - -class DocumentClass: - """Details of a LaTeX document class.""" - - def __init__(self, document_class, with_part=False): - self.document_class = document_class - self._with_part = with_part - - def section(self, level): - """ Return the section name at the given level for the specific - document class. - - Level is 1,2,3..., as level 0 is the title.""" - - sections = [ 'section', 'subsection', 'subsubsection', - 'paragraph', 'subparagraph' ] - if self.document_class in ('book', 'report', 'scrreprt', 'scrbook'): - sections.insert(0, 'chapter') - if self._with_part: - sections.insert(0, 'part') - if level <= len(sections): - return sections[level-1] - else: - return sections[-1] - -class Table: - """ Manage a table while traversing. - Maybe change to a mixin defining the visit/departs, but then - class Table internal variables are in the Translator. - - Table style might be - - * standard: horizontal and vertical lines - * booktabs (requires booktabs latex package): only horizontal lines - * nolines, borderless : no lines - """ - def __init__(self,latex_type,table_style): - self._latex_type = latex_type - self._table_style = table_style - self._open = 0 - # miscellaneous attributes - self._attrs = {} - self._col_width = [] - self._rowspan = [] - self.stubs = [] - - def open(self): - self._open = 1 - self._col_specs = [] - self.caption = None - self._attrs = {} - self._in_head = 0 # maybe context with search - def close(self): - self._open = 0 - self._col_specs = None - self.caption = None - self._attrs = {} - self.stubs = [] - def is_open(self): - return self._open - - def set_table_style(self, table_style): - if not table_style in ('standard','booktabs','borderless','nolines'): - return - self._table_style = table_style - - def used_packages(self): - if self._table_style == 'booktabs': - return '\\usepackage{booktabs}\n' - return '' - def get_latex_type(self): - return self._latex_type - - def set(self,attr,value): - self._attrs[attr] = value - def get(self,attr): - if self._attrs.has_key(attr): - return self._attrs[attr] - return None - def get_vertical_bar(self): - if self._table_style == 'standard': - return '|' - return '' - # horizontal lines are drawn below a row, because we. - def get_opening(self): - if self._latex_type == 'longtable': - # otherwise longtable might move before paragraph and subparagraph - prefix = '\\leavevmode\n' - else: - prefix = '' - return '%s\\begin{%s}[c]' % (prefix, self._latex_type) - def get_closing(self): - line = "" - if self._table_style == 'booktabs': - line = '\\bottomrule\n' - elif self._table_style == 'standard': - lines = '\\hline\n' - return '%s\\end{%s}' % (line,self._latex_type) - - def visit_colspec(self, node): - self._col_specs.append(node) - # "stubs" list is an attribute of the tgroup element: - self.stubs.append(node.attributes.get('stub')) - - def get_colspecs(self): - """ - Return column specification for longtable. - - Assumes reST line length being 80 characters. - Table width is hairy. - - === === - ABC DEF - === === - - usually gets to narrow, therefore we add 1 (fiddlefactor). - """ - width = 80 - - total_width = 0.0 - # first see if we get too wide. - for node in self._col_specs: - colwidth = float(node['colwidth']+1) / width - total_width += colwidth - self._col_width = [] - self._rowspan = [] - # donot make it full linewidth - factor = 0.93 - if total_width > 1.0: - factor /= total_width - bar = self.get_vertical_bar() - latex_table_spec = "" - for node in self._col_specs: - colwidth = factor * float(node['colwidth']+1) / width - self._col_width.append(colwidth+0.005) - self._rowspan.append(0) - latex_table_spec += "%sp{%.3f\\locallinewidth}" % (bar,colwidth+0.005) - return latex_table_spec+bar - - def get_column_width(self): - """ return columnwidth for current cell (not multicell) - """ - return "%.2f\\locallinewidth" % self._col_width[self._cell_in_row-1] - - def visit_thead(self): - self._in_thead = 1 - if self._table_style == 'standard': - return ['\\hline\n'] - elif self._table_style == 'booktabs': - return ['\\toprule\n'] - return [] - def depart_thead(self): - a = [] - #if self._table_style == 'standard': - # a.append('\\hline\n') - if self._table_style == 'booktabs': - a.append('\\midrule\n') - if self._latex_type == 'longtable': - a.append('\\endhead\n') - # for longtable one could add firsthead, foot and lastfoot - self._in_thead = 0 - return a - def visit_row(self): - self._cell_in_row = 0 - def depart_row(self): - res = [' \\\\\n'] - self._cell_in_row = None # remove cell counter - for i in range(len(self._rowspan)): - if (self._rowspan[i]>0): - self._rowspan[i] -= 1 - - if self._table_style == 'standard': - rowspans = [] - for i in range(len(self._rowspan)): - if (self._rowspan[i]<=0): - rowspans.append(i+1) - if len(rowspans)==len(self._rowspan): - res.append('\\hline\n') - else: - cline = '' - rowspans.reverse() - # TODO merge clines - while 1: - try: - c_start = rowspans.pop() - except: - break - cline += '\\cline{%d-%d}\n' % (c_start,c_start) - res.append(cline) - return res - - def set_rowspan(self,cell,value): - try: - self._rowspan[cell] = value - except: - pass - def get_rowspan(self,cell): - try: - return self._rowspan[cell] - except: - return 0 - def get_entry_number(self): - return self._cell_in_row - def visit_entry(self): - self._cell_in_row += 1 - def is_stub_column(self): - if len(self.stubs) >= self._cell_in_row: - return self.stubs[self._cell_in_row-1] - return False - - -class LaTeXTranslator(nodes.NodeVisitor): - - # When options are given to the documentclass, latex will pass them - # to other packages, as done with babel. - # Dummy settings might be taken from document settings - - # Templates - # --------- - - latex_head = '\\documentclass[%s]{%s}\n' - linking = '\\usepackage[colorlinks=%s,linkcolor=%s,urlcolor=%s]{hyperref}\n' - stylesheet = '\\input{%s}\n' - # add a generated on day , machine by user using docutils version. - generator = '% generated by Docutils \n' - # Config setting defaults - # ----------------------- - - # use latex tableofcontents or let docutils do it. - use_latex_toc = 0 - - # TODO: use mixins for different implementations. - # list environment for docinfo. else tabularx - use_optionlist_for_docinfo = 0 # NOT YET IN USE - - # Use compound enumerations (1.A.1.) - compound_enumerators = 0 - - # If using compound enumerations, include section information. - section_prefix_for_enumerators = 0 - - # This is the character that separates the section ("." subsection ...) - # prefix from the regular list enumerator. - section_enumerator_separator = '-' - - # default link color - hyperlink_color = "blue" - - def __init__(self, document): - nodes.NodeVisitor.__init__(self, document) - self.settings = settings = document.settings - self.latex_encoding = self.to_latex_encoding(settings.output_encoding) - self.use_latex_toc = settings.use_latex_toc - self.use_latex_docinfo = settings.use_latex_docinfo - self.use_latex_footnotes = settings.use_latex_footnotes - self._use_latex_citations = settings.use_latex_citations - self._reference_label = settings.reference_label - self.hyperlink_color = settings.hyperlink_color - self.compound_enumerators = settings.compound_enumerators - self.font_encoding = settings.font_encoding - self.section_prefix_for_enumerators = ( - settings.section_prefix_for_enumerators) - self.section_enumerator_separator = ( - settings.section_enumerator_separator.replace('_', '\\_')) - if self.hyperlink_color == '0': - self.hyperlink_color = 'black' - self.colorlinks = 'false' - else: - self.colorlinks = 'true' - - if self.settings.use_bibtex: - self.bibtex = self.settings.use_bibtex.split(",",1) - # TODO avoid errors on not declared citations. - else: - self.bibtex = None - # language: labels, bibliographic_fields, and author_separators. - # to allow writing labes for specific languages. - self.language = languages.get_language(settings.language_code) - self.babel = Babel(settings.language_code) - self.author_separator = self.language.author_separators[0] - self.d_options = self.settings.documentoptions - if self.babel.get_language(): - self.d_options += ',%s' % self.babel.get_language() - - self.d_class = DocumentClass(settings.documentclass, - settings.use_part_section) - # object for a table while proccessing. - self.table_stack = [] - self.active_table = Table('longtable',settings.table_style) - - # HACK. Should have more sophisticated typearea handling. - if settings.documentclass.find('scr') == -1: - self.typearea = '\\usepackage[DIV12]{typearea}\n' - else: - if self.d_options.find('DIV') == -1 and self.d_options.find('BCOR') == -1: - self.typearea = '\\typearea{12}\n' - else: - self.typearea = '' - - if self.font_encoding == 'OT1': - fontenc_header = '' - elif self.font_encoding == '': - fontenc_header = '\\usepackage{ae}\n\\usepackage{aeguill}\n' - else: - fontenc_header = '\\usepackage[%s]{fontenc}\n' % (self.font_encoding,) - if self.latex_encoding.startswith('utf8'): - input_encoding = '\\usepackage{ucs}\n\\usepackage[utf8x]{inputenc}\n' - else: - input_encoding = '\\usepackage[%s]{inputenc}\n' % self.latex_encoding - if self.settings.graphicx_option == '': - self.graphicx_package = '\\usepackage{graphicx}\n' - elif self.settings.graphicx_option.lower() == 'auto': - self.graphicx_package = '\n'.join( - ('%Check if we are compiling under latex or pdflatex', - '\\ifx\\pdftexversion\\undefined', - ' \\usepackage{graphicx}', - '\\else', - ' \\usepackage[pdftex]{graphicx}', - '\\fi\n')) - else: - self.graphicx_package = ( - '\\usepackage[%s]{graphicx}\n' % self.settings.graphicx_option) - - self.head_prefix = [ - self.latex_head % (self.d_options,self.settings.documentclass), - '\\usepackage{babel}\n', # language is in documents settings. - fontenc_header, - '\\usepackage{shortvrb}\n', # allows verb in footnotes. - input_encoding, - # * tabularx: for docinfo, automatic width of columns, always on one page. - '\\usepackage{tabularx}\n', - '\\usepackage{longtable}\n', - self.active_table.used_packages(), - # possible other packages. - # * fancyhdr - # * ltxtable is a combination of tabularx and longtable (pagebreaks). - # but ?? - # - # extra space between text in tables and the line above them - '\\setlength{\\extrarowheight}{2pt}\n', - '\\usepackage{amsmath}\n', # what fore amsmath. - self.graphicx_package, - '\\usepackage{color}\n', - '\\usepackage{multirow}\n', - '\\usepackage{ifthen}\n', # before hyperref! - self.linking % (self.colorlinks, self.hyperlink_color, self.hyperlink_color), - self.typearea, - self.generator, - # latex lengths - '\\newlength{\\admonitionwidth}\n', - '\\setlength{\\admonitionwidth}{0.9\\textwidth}\n' - # width for docinfo tablewidth - '\\newlength{\\docinfowidth}\n', - '\\setlength{\\docinfowidth}{0.9\\textwidth}\n' - # linewidth of current environment, so tables are not wider - # than the sidebar: using locallinewidth seems to defer evaluation - # of linewidth, this is fixing it. - '\\newlength{\\locallinewidth}\n', - # will be set later. - ] - self.head_prefix.extend( latex_headings['optionlist_environment'] ) - self.head_prefix.extend( latex_headings['lineblock_environment'] ) - self.head_prefix.extend( latex_headings['footnote_floats'] ) - self.head_prefix.extend( latex_headings['some_commands'] ) - ## stylesheet is last: so it might be possible to overwrite defaults. - stylesheet = utils.get_stylesheet_reference(settings) - if stylesheet: - settings.record_dependencies.add(stylesheet) - self.head_prefix.append(self.stylesheet % (stylesheet)) - - if self.linking: # and maybe check for pdf - self.pdfinfo = [ ] - self.pdfauthor = None - # pdftitle, pdfsubject, pdfauthor, pdfkeywords, - # pdfcreator, pdfproducer - else: - self.pdfinfo = None - # NOTE: Latex wants a date and an author, rst puts this into - # docinfo, so normally we do not want latex author/date handling. - # latex article has its own handling of date and author, deactivate. - # self.astext() adds \title{...} \author{...} \date{...}, even if the - # "..." are empty strings. - self.head = [ ] - # separate title, so we can appen subtitle. - self.title = '' - # if use_latex_docinfo: collects lists of author/organization/contact/address lines - self.author_stack = [] - self.date = '' - - self.body_prefix = ['\\raggedbottom\n'] - self.body = [] - self.body_suffix = ['\n'] - self.section_level = 0 - self.context = [] - self.topic_classes = [] - # column specification for tables - self.table_caption = None - - # Flags to encode - # --------------- - # verbatim: to tell encode not to encode. - self.verbatim = 0 - # insert_newline: to tell encode to replace blanks by "~". - self.insert_none_breaking_blanks = 0 - # insert_newline: to tell encode to add latex newline. - self.insert_newline = 0 - # mbox_newline: to tell encode to add mbox and newline. - self.mbox_newline = 0 - # inside citation reference labels underscores dont need to be escaped. - self.inside_citation_reference_label = 0 - - # Stack of section counters so that we don't have to use_latex_toc. - # This will grow and shrink as processing occurs. - # Initialized for potential first-level sections. - self._section_number = [0] - - # The current stack of enumerations so that we can expand - # them into a compound enumeration. - self._enumeration_counters = [] - - # The maximum number of enumeration counters we've used. - # If we go beyond this number, we need to create a new - # counter; otherwise, just reuse an old one. - self._max_enumeration_counters = 0 - - self._bibitems = [] - - # docinfo. - self.docinfo = None - # inside literal block: no quote mangling. - self.literal_block = 0 - self.literal_block_stack = [] - self.literal = 0 - # true when encoding in math mode - self.mathmode = 0 - - def to_latex_encoding(self,docutils_encoding): - """ - Translate docutils encoding name into latex's. - - Default fallback method is remove "-" and "_" chars from docutils_encoding. - - """ - tr = { "iso-8859-1": "latin1", # west european - "iso-8859-2": "latin2", # east european - "iso-8859-3": "latin3", # esperanto, maltese - "iso-8859-4": "latin4", # north european,scandinavian, baltic - "iso-8859-5": "iso88595", # cyrillic (ISO) - "iso-8859-9": "latin5", # turkish - "iso-8859-15": "latin9", # latin9, update to latin1. - "mac_cyrillic": "maccyr", # cyrillic (on Mac) - "windows-1251": "cp1251", # cyrillic (on Windows) - "koi8-r": "koi8-r", # cyrillic (Russian) - "koi8-u": "koi8-u", # cyrillic (Ukrainian) - "windows-1250": "cp1250", # - "windows-1252": "cp1252", # - "us-ascii": "ascii", # ASCII (US) - # unmatched encodings - #"": "applemac", - #"": "ansinew", # windows 3.1 ansi - #"": "ascii", # ASCII encoding for the range 32--127. - #"": "cp437", # dos latine us - #"": "cp850", # dos latin 1 - #"": "cp852", # dos latin 2 - #"": "decmulti", - #"": "latin10", - #"iso-8859-6": "" # arabic - #"iso-8859-7": "" # greek - #"iso-8859-8": "" # hebrew - #"iso-8859-10": "" # latin6, more complete iso-8859-4 - } - if tr.has_key(docutils_encoding.lower()): - return tr[docutils_encoding.lower()] - # convert: latin-1 and utf-8 and similar things - return docutils_encoding.replace("_", "").replace("-", "").lower() - - def language_label(self, docutil_label): - return self.language.labels[docutil_label] - - latex_equivalents = { - u'\u00A0' : '~', - u'\u2013' : '{--}', - u'\u2014' : '{---}', - u'\u2018' : '`', - u'\u2019' : '\'', - u'\u201A' : ',', - u'\u201C' : '``', - u'\u201D' : '\'\'', - u'\u201E' : ',,', - u'\u2020' : '{\\dag}', - u'\u2021' : '{\\ddag}', - u'\u2026' : '{\\dots}', - u'\u2122' : '{\\texttrademark}', - u'\u21d4' : '{$\\Leftrightarrow$}', - # greek alphabet ? - } - - def unicode_to_latex(self,text): - # see LaTeX codec - # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/252124 - # Only some special chracters are translated, for documents with many - # utf-8 chars one should use the LaTeX unicode package. - for uchar in self.latex_equivalents.keys(): - text = text.replace(uchar,self.latex_equivalents[uchar]) - return text - - def ensure_math(self, text): - if not self.__dict__.has_key('ensure_math_re'): - chars = { - # lnot,pm,twosuperior,threesuperior,mu,onesuperior,times,div - 'latin1' : '\xac\xb1\xb2\xb3\xb5\xb9\xd7\xf7' , - # also latin5 and latin9 - } - self.ensure_math_re = re.compile('([%s])' % chars['latin1']) - text = self.ensure_math_re.sub(r'\\ensuremath{\1}', text) - return text - - def encode(self, text): - """ - Encode special characters (``# $ % & ~ _ ^ \ { }``) in `text` & return - """ - # Escaping with a backslash does not help with backslashes, ~ and ^. - - # < > are only available in math-mode or tt font. (really ?) - # $ starts math- mode. - # AND quotes - if self.verbatim: - return text - # compile the regexps once. do it here so one can see them. - # - # first the braces. - if not self.__dict__.has_key('encode_re_braces'): - self.encode_re_braces = re.compile(r'([{}])') - text = self.encode_re_braces.sub(r'{\\\1}',text) - if not self.__dict__.has_key('encode_re_bslash'): - # find backslash: except in the form '{\{}' or '{\}}'. - self.encode_re_bslash = re.compile(r'(?", '{\\textgreater}') - # then - text = text.replace("&", '{\\&}') - # the ^: - # * verb|^| does not work in mbox. - # * mathmode has wedge. hat{~} would also work. - # text = text.replace("^", '{\\ensuremath{^\\wedge}}') - text = text.replace("^", '{\\textasciicircum}') - text = text.replace("%", '{\\%}') - text = text.replace("#", '{\\#}') - text = text.replace("~", '{\\textasciitilde}') - # Separate compound characters, e.g. "--" to "-{}-". (The - # actual separation is done later; see below.) - separate_chars = '-' - if self.literal_block or self.literal: - # In monospace-font, we also separate ",,", "``" and "''" - # and some other characters which can't occur in - # non-literal text. - separate_chars += ',`\'"<>' - # pdflatex does not produce doublequotes for ngerman. - text = self.babel.double_quotes_in_tt(text) - if self.font_encoding == 'OT1': - # We're using OT1 font-encoding and have to replace - # underscore by underlined blank, because this has - # correct width. - text = text.replace('_', '{\\underline{ }}') - # And the tt-backslash doesn't work in OT1, so we use - # a mirrored slash. - text = text.replace('\\textbackslash', '\\reflectbox{/}') - else: - text = text.replace('_', '{\\_}') - else: - text = self.babel.quote_quotes(text) - if not self.inside_citation_reference_label: - text = text.replace("_", '{\\_}') - for char in separate_chars * 2: - # Do it twice ("* 2") becaues otherwise we would replace - # "---" by "-{}--". - text = text.replace(char + char, char + '{}' + char) - if self.insert_newline or self.literal_block: - # Insert a blank before the newline, to avoid - # ! LaTeX Error: There's no line here to end. - text = text.replace("\n", '~\\\\\n') - elif self.mbox_newline: - if self.literal_block: - closings = "}" * len(self.literal_block_stack) - openings = "".join(self.literal_block_stack) - else: - closings = "" - openings = "" - text = text.replace("\n", "%s}\\\\\n\\mbox{%s" % (closings,openings)) - text = text.replace('[', '{[}').replace(']', '{]}') - if self.insert_none_breaking_blanks: - text = text.replace(' ', '~') - if self.latex_encoding != 'utf8': - text = self.unicode_to_latex(text) - text = self.ensure_math(text) - return text - - def attval(self, text, - whitespace=re.compile('[\n\r\t\v\f]')): - """Cleanse, encode, and return attribute value text.""" - return self.encode(whitespace.sub(' ', text)) - - def astext(self): - if self.pdfinfo is not None and self.pdfauthor: - self.pdfinfo.append('pdfauthor={%s}' % self.pdfauthor) - if self.pdfinfo: - pdfinfo = '\\hypersetup{\n' + ',\n'.join(self.pdfinfo) + '\n}\n' - else: - pdfinfo = '' - head = '\\title{%s}\n\\author{%s}\n\\date{%s}\n' % \ - (self.title, - ' \\and\n'.join(['~\\\\\n'.join(author_lines) - for author_lines in self.author_stack]), - self.date) - return ''.join(self.head_prefix + [head] + self.head + [pdfinfo] - + self.body_prefix + self.body + self.body_suffix) - - def visit_Text(self, node): - self.body.append(self.encode(node.astext())) - - def depart_Text(self, node): - pass - - def visit_address(self, node): - self.visit_docinfo_item(node, 'address') - - def depart_address(self, node): - self.depart_docinfo_item(node) - - def visit_admonition(self, node, name=''): - self.body.append('\\begin{center}\\begin{sffamily}\n') - self.body.append('\\fbox{\\parbox{\\admonitionwidth}{\n') - if name: - self.body.append('\\textbf{\\large '+ self.language.labels[name] + '}\n'); - self.body.append('\\vspace{2mm}\n') - - - def depart_admonition(self, node=None): - self.body.append('}}\n') # end parbox fbox - self.body.append('\\end{sffamily}\n\\end{center}\n'); - - def visit_attention(self, node): - self.visit_admonition(node, 'attention') - - def depart_attention(self, node): - self.depart_admonition() - - def visit_author(self, node): - self.visit_docinfo_item(node, 'author') - - def depart_author(self, node): - self.depart_docinfo_item(node) - - def visit_authors(self, node): - # not used: visit_author is called anyway for each author. - pass - - def depart_authors(self, node): - pass - - def visit_block_quote(self, node): - self.body.append( '\\begin{quote}\n') - - def depart_block_quote(self, node): - self.body.append( '\\end{quote}\n') - - def visit_bullet_list(self, node): - if 'contents' in self.topic_classes: - if self.use_latex_toc: - raise nodes.SkipNode - self.body.append( '\\begin{list}{}{}\n' ) - else: - self.body.append( '\\begin{itemize}\n' ) - - def depart_bullet_list(self, node): - if 'contents' in self.topic_classes: - self.body.append( '\\end{list}\n' ) - else: - self.body.append( '\\end{itemize}\n' ) - - # Imperfect superscript/subscript handling: mathmode italicizes - # all letters by default. - def visit_superscript(self, node): - self.body.append('$^{') - self.mathmode = 1 - - def depart_superscript(self, node): - self.body.append('}$') - self.mathmode = 0 - - def visit_subscript(self, node): - self.body.append('$_{') - self.mathmode = 1 - - def depart_subscript(self, node): - self.body.append('}$') - self.mathmode = 0 - - def visit_caption(self, node): - self.body.append( '\\caption{' ) - - def depart_caption(self, node): - self.body.append('}') - - def visit_caution(self, node): - self.visit_admonition(node, 'caution') - - def depart_caution(self, node): - self.depart_admonition() - - def visit_title_reference(self, node): - self.body.append( '\\titlereference{' ) - - def depart_title_reference(self, node): - self.body.append( '}' ) - - def visit_citation(self, node): - # TODO maybe use cite bibitems - if self._use_latex_citations: - self.context.append(len(self.body)) - else: - self.body.append('\\begin{figure}[b]') - for id in node['ids']: - self.body.append('\\hypertarget{%s}' % id) - - def depart_citation(self, node): - if self._use_latex_citations: - size = self.context.pop() - label = self.body[size] - text = ''.join(self.body[size+1:]) - del self.body[size:] - self._bibitems.append([label, text]) - else: - self.body.append('\\end{figure}\n') - - def visit_citation_reference(self, node): - if self._use_latex_citations: - self.body.append('\\cite{') - self.inside_citation_reference_label = 1 - else: - href = '' - if node.has_key('refid'): - href = node['refid'] - elif node.has_key('refname'): - href = self.document.nameids[node['refname']] - self.body.append('[\\hyperlink{%s}{' % href) - - def depart_citation_reference(self, node): - if self._use_latex_citations: - self.body.append('}') - self.inside_citation_reference_label = 0 - else: - self.body.append('}]') - - def visit_classifier(self, node): - self.body.append( '(\\textbf{' ) - - def depart_classifier(self, node): - self.body.append( '})\n' ) - - def visit_colspec(self, node): - self.active_table.visit_colspec(node) - - def depart_colspec(self, node): - pass - - def visit_comment(self, node): - # Escape end of line by a new comment start in comment text. - self.body.append('%% %s \n' % node.astext().replace('\n', '\n% ')) - raise nodes.SkipNode - - def visit_compound(self, node): - pass - - def depart_compound(self, node): - pass - - def visit_contact(self, node): - self.visit_docinfo_item(node, 'contact') - - def depart_contact(self, node): - self.depart_docinfo_item(node) - - def visit_container(self, node): - pass - - def depart_container(self, node): - pass - - def visit_copyright(self, node): - self.visit_docinfo_item(node, 'copyright') - - def depart_copyright(self, node): - self.depart_docinfo_item(node) - - def visit_danger(self, node): - self.visit_admonition(node, 'danger') - - def depart_danger(self, node): - self.depart_admonition() - - def visit_date(self, node): - self.visit_docinfo_item(node, 'date') - - def depart_date(self, node): - self.depart_docinfo_item(node) - - def visit_decoration(self, node): - pass - - def depart_decoration(self, node): - pass - - def visit_definition(self, node): - pass - - def depart_definition(self, node): - self.body.append('\n') - - def visit_definition_list(self, node): - self.body.append( '\\begin{description}\n' ) - - def depart_definition_list(self, node): - self.body.append( '\\end{description}\n' ) - - def visit_definition_list_item(self, node): - pass - - def depart_definition_list_item(self, node): - pass - - def visit_description(self, node): - self.body.append( ' ' ) - - def depart_description(self, node): - pass - - def visit_docinfo(self, node): - self.docinfo = [] - self.docinfo.append('%' + '_'*75 + '\n') - self.docinfo.append('\\begin{center}\n') - self.docinfo.append('\\begin{tabularx}{\\docinfowidth}{lX}\n') - - def depart_docinfo(self, node): - self.docinfo.append('\\end{tabularx}\n') - self.docinfo.append('\\end{center}\n') - self.body = self.docinfo + self.body - # clear docinfo, so field names are no longer appended. - self.docinfo = None - - def visit_docinfo_item(self, node, name): - if name == 'author': - if not self.pdfinfo == None: - if not self.pdfauthor: - self.pdfauthor = self.attval(node.astext()) - else: - self.pdfauthor += self.author_separator + self.attval(node.astext()) - if self.use_latex_docinfo: - if name in ('author', 'organization', 'contact', 'address'): - # We attach these to the last author. If any of them precedes - # the first author, put them in a separate "author" group (for - # no better semantics). - if name == 'author' or not self.author_stack: - self.author_stack.append([]) - if name == 'address': # newlines are meaningful - self.insert_newline = 1 - text = self.encode(node.astext()) - self.insert_newline = 0 - else: - text = self.attval(node.astext()) - self.author_stack[-1].append(text) - raise nodes.SkipNode - elif name == 'date': - self.date = self.attval(node.astext()) - raise nodes.SkipNode - self.docinfo.append('\\textbf{%s}: &\n\t' % self.language_label(name)) - if name == 'address': - self.insert_newline = 1 - self.docinfo.append('{\\raggedright\n') - self.context.append(' } \\\\\n') - else: - self.context.append(' \\\\\n') - self.context.append(self.docinfo) - self.context.append(len(self.body)) - - def depart_docinfo_item(self, node): - size = self.context.pop() - dest = self.context.pop() - tail = self.context.pop() - tail = self.body[size:] + [tail] - del self.body[size:] - dest.extend(tail) - # for address we did set insert_newline - self.insert_newline = 0 - - def visit_doctest_block(self, node): - self.body.append( '\\begin{verbatim}' ) - self.verbatim = 1 - - def depart_doctest_block(self, node): - self.body.append( '\\end{verbatim}\n' ) - self.verbatim = 0 - - def visit_document(self, node): - self.body_prefix.append('\\begin{document}\n') - # titled document? - if self.use_latex_docinfo or len(node) and isinstance(node[0], nodes.title): - self.body_prefix.append('\\maketitle\n') - # alternative use titlepage environment. - # \begin{titlepage} - # ... - self.body.append('\n\\setlength{\\locallinewidth}{\\linewidth}\n') - - def depart_document(self, node): - # TODO insertion point of bibliography should none automatic. - if self._use_latex_citations and len(self._bibitems)>0: - if not self.bibtex: - widest_label = "" - for bi in self._bibitems: - if len(widest_label) self._max_enumeration_counters: - self._max_enumeration_counters = len(self._enumeration_counters) - self.body.append('\\newcounter{%s}\n' % counter_name) - else: - self.body.append('\\setcounter{%s}{0}\n' % counter_name) - - self.body.append('\\begin{list}{%s\\%s{%s}%s}\n' % \ - (enum_prefix,enum_type,counter_name,enum_suffix)) - self.body.append('{\n') - self.body.append('\\usecounter{%s}\n' % counter_name) - # set start after usecounter, because it initializes to zero. - if node.has_key('start'): - self.body.append('\\addtocounter{%s}{%d}\n' \ - % (counter_name,node['start']-1)) - ## set rightmargin equal to leftmargin - self.body.append('\\setlength{\\rightmargin}{\\leftmargin}\n') - self.body.append('}\n') - - def depart_enumerated_list(self, node): - self.body.append('\\end{list}\n') - self._enumeration_counters.pop() - - def visit_error(self, node): - self.visit_admonition(node, 'error') - - def depart_error(self, node): - self.depart_admonition() - - def visit_field(self, node): - # real output is done in siblings: _argument, _body, _name - pass - - def depart_field(self, node): - self.body.append('\n') - ##self.body.append('%[depart_field]\n') - - def visit_field_argument(self, node): - self.body.append('%[visit_field_argument]\n') - - def depart_field_argument(self, node): - self.body.append('%[depart_field_argument]\n') - - def visit_field_body(self, node): - # BUG by attach as text we loose references. - if self.docinfo: - self.docinfo.append('%s \\\\\n' % self.encode(node.astext())) - raise nodes.SkipNode - # BUG: what happens if not docinfo - - def depart_field_body(self, node): - self.body.append( '\n' ) - - def visit_field_list(self, node): - if not self.docinfo: - self.body.append('\\begin{quote}\n') - self.body.append('\\begin{description}\n') - - def depart_field_list(self, node): - if not self.docinfo: - self.body.append('\\end{description}\n') - self.body.append('\\end{quote}\n') - - def visit_field_name(self, node): - # BUG this duplicates docinfo_item - if self.docinfo: - self.docinfo.append('\\textbf{%s}: &\n\t' % self.encode(node.astext())) - raise nodes.SkipNode - else: - self.body.append('\\item [') - - def depart_field_name(self, node): - if not self.docinfo: - self.body.append(':]') - - def visit_figure(self, node): - if (not node.attributes.has_key('align') or - node.attributes['align'] == 'center'): - # centering does not add vertical space like center. - align = '\n\\centering' - align_end = '' - else: - # TODO non vertical space for other alignments. - align = '\\begin{flush%s}' % node.attributes['align'] - align_end = '\\end{flush%s}' % node.attributes['align'] - self.body.append( '\\begin{figure}[htbp]%s\n' % align ) - self.context.append( '%s\\end{figure}\n' % align_end ) - - def depart_figure(self, node): - self.body.append( self.context.pop() ) - - def visit_footer(self, node): - self.context.append(len(self.body)) - - def depart_footer(self, node): - start = self.context.pop() - footer = (['\n\\begin{center}\small\n'] - + self.body[start:] + ['\n\\end{center}\n']) - self.body_suffix[:0] = footer - del self.body[start:] - - def visit_footnote(self, node): - if self.use_latex_footnotes: - num,text = node.astext().split(None,1) - num = self.encode(num.strip()) - self.body.append('\\footnotetext['+num+']') - self.body.append('{') - else: - self.body.append('\\begin{figure}[b]') - for id in node['ids']: - self.body.append('\\hypertarget{%s}' % id) - - def depart_footnote(self, node): - if self.use_latex_footnotes: - self.body.append('}\n') - else: - self.body.append('\\end{figure}\n') - - def visit_footnote_reference(self, node): - if self.use_latex_footnotes: - self.body.append("\\footnotemark["+self.encode(node.astext())+"]") - raise nodes.SkipNode - href = '' - if node.has_key('refid'): - href = node['refid'] - elif node.has_key('refname'): - href = self.document.nameids[node['refname']] - format = self.settings.footnote_references - if format == 'brackets': - suffix = '[' - self.context.append(']') - elif format == 'superscript': - suffix = '\\raisebox{.5em}[0em]{\\scriptsize' - self.context.append('}') - else: # shouldn't happen - raise AssertionError('Illegal footnote reference format.') - self.body.append('%s\\hyperlink{%s}{' % (suffix,href)) - - def depart_footnote_reference(self, node): - if self.use_latex_footnotes: - return - self.body.append('}%s' % self.context.pop()) - - # footnote/citation label - def label_delim(self, node, bracket, superscript): - if isinstance(node.parent, nodes.footnote): - if self.use_latex_footnotes: - raise nodes.SkipNode - if self.settings.footnote_references == 'brackets': - self.body.append(bracket) - else: - self.body.append(superscript) - else: - assert isinstance(node.parent, nodes.citation) - if not self._use_latex_citations: - self.body.append(bracket) - - def visit_label(self, node): - self.label_delim(node, '[', '$^{') - - def depart_label(self, node): - self.label_delim(node, ']', '}$') - - # elements generated by the framework e.g. section numbers. - def visit_generated(self, node): - pass - - def depart_generated(self, node): - pass - - def visit_header(self, node): - self.context.append(len(self.body)) - - def depart_header(self, node): - start = self.context.pop() - self.body_prefix.append('\n\\verb|begin_header|\n') - self.body_prefix.extend(self.body[start:]) - self.body_prefix.append('\n\\verb|end_header|\n') - del self.body[start:] - - def visit_hint(self, node): - self.visit_admonition(node, 'hint') - - def depart_hint(self, node): - self.depart_admonition() - - def latex_image_length(self, width_str): - match = re.match('(\d*\.?\d*)\s*(\S*)', width_str) - if not match: - # fallback - return width_str - res = width_str - amount, unit = match.groups()[:2] - if unit == "px": - # LaTeX does not know pixels but points - res = "%spt" % amount - elif unit == "%": - res = "%.3f\\linewidth" % (float(amount)/100.0) - return res - - def visit_image(self, node): - attrs = node.attributes - # Add image URI to dependency list, assuming that it's - # referring to a local file. - self.settings.record_dependencies.add(attrs['uri']) - pre = [] # in reverse order - post = [] - include_graphics_options = [] - inline = isinstance(node.parent, nodes.TextElement) - if attrs.has_key('scale'): - # Could also be done with ``scale`` option to - # ``\includegraphics``; doing it this way for consistency. - pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,)) - post.append('}') - if attrs.has_key('width'): - include_graphics_options.append('width=%s' % ( - self.latex_image_length(attrs['width']), )) - if attrs.has_key('height'): - include_graphics_options.append('height=%s' % ( - self.latex_image_length(attrs['height']), )) - if attrs.has_key('align'): - align_prepost = { - # By default latex aligns the top of an image. - (1, 'top'): ('', ''), - (1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'), - (1, 'bottom'): ('\\raisebox{-\\height}{', '}'), - (0, 'center'): ('{\\hfill', '\\hfill}'), - # These 2 don't exactly do the right thing. The image should - # be floated alongside the paragraph. See - # http://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG - (0, 'left'): ('{', '\\hfill}'), - (0, 'right'): ('{\\hfill', '}'),} - try: - pre.append(align_prepost[inline, attrs['align']][0]) - post.append(align_prepost[inline, attrs['align']][1]) - except KeyError: - pass # XXX complain here? - if not inline: - pre.append('\n') - post.append('\n') - pre.reverse() - self.body.extend( pre ) - options = '' - if len(include_graphics_options)>0: - options = '[%s]' % (','.join(include_graphics_options)) - self.body.append( '\\includegraphics%s{%s}' % ( - options, attrs['uri'] ) ) - self.body.extend( post ) - - def depart_image(self, node): - pass - - def visit_important(self, node): - self.visit_admonition(node, 'important') - - def depart_important(self, node): - self.depart_admonition() - - def visit_interpreted(self, node): - # @@@ Incomplete, pending a proper implementation on the - # Parser/Reader end. - self.visit_literal(node) - - def depart_interpreted(self, node): - self.depart_literal(node) - - def visit_legend(self, node): - self.body.append('{\\small ') - - def depart_legend(self, node): - self.body.append('}') - - def visit_line(self, node): - self.body.append('\item[] ') - - def depart_line(self, node): - self.body.append('\n') - - def visit_line_block(self, node): - if isinstance(node.parent, nodes.line_block): - self.body.append('\\item[] \n' - '\\begin{lineblock}{\\lineblockindentation}\n') - else: - self.body.append('\n\\begin{lineblock}{0em}\n') - - def depart_line_block(self, node): - self.body.append('\\end{lineblock}\n') - - def visit_list_item(self, node): - # Append "{}" in case the next character is "[", which would break - # LaTeX's list environment (no numbering and the "[" is not printed). - self.body.append('\\item {} ') - - def depart_list_item(self, node): - self.body.append('\n') - - def visit_literal(self, node): - self.literal = 1 - self.body.append('\\texttt{') - - def depart_literal(self, node): - self.body.append('}') - self.literal = 0 - - def visit_literal_block(self, node): - """ - Render a literal-block. - - Literal blocks are used for "::"-prefixed literal-indented - blocks of text, where the inline markup is not recognized, - but are also the product of the parsed-literal directive, - where the markup is respected. - """ - # In both cases, we want to use a typewriter/monospaced typeface. - # For "real" literal-blocks, we can use \verbatim, while for all - # the others we must use \mbox. - # - # We can distinguish between the two kinds by the number of - # siblings that compose this node: if it is composed by a - # single element, it's surely either a real one or a - # parsed-literal that does not contain any markup. - # - if not self.active_table.is_open(): - # no quote inside tables, to avoid vertical space between - # table border and literal block. - # BUG: fails if normal text preceeds the literal block. - self.body.append('\\begin{quote}') - self.context.append('\\end{quote}\n') - else: - self.body.append('\n') - self.context.append('\n') - if (self.settings.use_verbatim_when_possible and (len(node) == 1) - # in case of a parsed-literal containing just a "**bold**" word: - and isinstance(node[0], nodes.Text)): - self.verbatim = 1 - self.body.append('\\begin{verbatim}\n') - else: - self.literal_block = 1 - self.insert_none_breaking_blanks = 1 - self.body.append('{\\ttfamily \\raggedright \\noindent\n') - # * obey..: is from julien and never worked for me (grubert). - # self.body.append('{\\obeylines\\obeyspaces\\ttfamily\n') - - def depart_literal_block(self, node): - if self.verbatim: - self.body.append('\n\\end{verbatim}\n') - self.verbatim = 0 - else: - self.body.append('\n}') - self.insert_none_breaking_blanks = 0 - self.literal_block = 0 - # obey end: self.body.append('}\n') - self.body.append(self.context.pop()) - - def visit_meta(self, node): - self.body.append('[visit_meta]\n') - # BUG maybe set keywords for pdf - ##self.head.append(self.starttag(node, 'meta', **node.attributes)) - - def depart_meta(self, node): - self.body.append('[depart_meta]\n') - - def visit_note(self, node): - self.visit_admonition(node, 'note') - - def depart_note(self, node): - self.depart_admonition() - - def visit_option(self, node): - if self.context[-1]: - # this is not the first option - self.body.append(', ') - - def depart_option(self, node): - # flag tha the first option is done. - self.context[-1] += 1 - - def visit_option_argument(self, node): - """The delimiter betweeen an option and its argument.""" - self.body.append(node.get('delimiter', ' ')) - - def depart_option_argument(self, node): - pass - - def visit_option_group(self, node): - self.body.append('\\item [') - # flag for first option - self.context.append(0) - - def depart_option_group(self, node): - self.context.pop() # the flag - self.body.append('] ') - - def visit_option_list(self, node): - self.body.append('\\begin{optionlist}{3cm}\n') - - def depart_option_list(self, node): - self.body.append('\\end{optionlist}\n') - - def visit_option_list_item(self, node): - pass - - def depart_option_list_item(self, node): - pass - - def visit_option_string(self, node): - ##self.body.append(self.starttag(node, 'span', '', CLASS='option')) - pass - - def depart_option_string(self, node): - ##self.body.append('') - pass - - def visit_organization(self, node): - self.visit_docinfo_item(node, 'organization') - - def depart_organization(self, node): - self.depart_docinfo_item(node) - - def visit_paragraph(self, node): - index = node.parent.index(node) - if not ('contents' in self.topic_classes or - (isinstance(node.parent, nodes.compound) and - index > 0 and - not isinstance(node.parent[index - 1], nodes.paragraph) and - not isinstance(node.parent[index - 1], nodes.compound))): - self.body.append('\n') - - def depart_paragraph(self, node): - self.body.append('\n') - - def visit_problematic(self, node): - self.body.append('{\\color{red}\\bfseries{}') - - def depart_problematic(self, node): - self.body.append('}') - - def visit_raw(self, node): - if 'latex' in node.get('format', '').split(): - self.body.append(node.astext()) - raise nodes.SkipNode - - def visit_reference(self, node): - # BUG: hash_char "#" is trouble some in LaTeX. - # mbox and other environment do not like the '#'. - hash_char = '\\#' - if node.has_key('refuri'): - href = node['refuri'].replace('#',hash_char) - elif node.has_key('refid'): - href = hash_char + node['refid'] - elif node.has_key('refname'): - href = hash_char + self.document.nameids[node['refname']] - else: - raise AssertionError('Unknown reference.') - self.body.append('\\href{%s}{' % href) - if self._reference_label and not node.has_key('refuri'): - self.body.append('\\%s{%s}}' % (self._reference_label, - href.replace(hash_char, ''))) - raise nodes.SkipNode - - def depart_reference(self, node): - self.body.append('}') - - def visit_revision(self, node): - self.visit_docinfo_item(node, 'revision') - - def depart_revision(self, node): - self.depart_docinfo_item(node) - - def visit_section(self, node): - self.section_level += 1 - # Initialize counter for potential subsections: - self._section_number.append(0) - # Counter for this section's level (initialized by parent section): - self._section_number[self.section_level - 1] += 1 - - def depart_section(self, node): - # Remove counter for potential subsections: - self._section_number.pop() - self.section_level -= 1 - - def visit_sidebar(self, node): - # BUG: this is just a hack to make sidebars render something - self.body.append('\n\\setlength{\\locallinewidth}{0.9\\admonitionwidth}\n') - self.body.append('\\begin{center}\\begin{sffamily}\n') - self.body.append('\\fbox{\\colorbox[gray]{0.80}{\\parbox{\\admonitionwidth}{\n') - - def depart_sidebar(self, node): - self.body.append('}}}\n') # end parbox colorbox fbox - self.body.append('\\end{sffamily}\n\\end{center}\n'); - self.body.append('\n\\setlength{\\locallinewidth}{\\linewidth}\n') - - - attribution_formats = {'dash': ('---', ''), - 'parentheses': ('(', ')'), - 'parens': ('(', ')'), - 'none': ('', '')} - - def visit_attribution(self, node): - prefix, suffix = self.attribution_formats[self.settings.attribution] - self.body.append('\n\\begin{flushright}\n') - self.body.append(prefix) - self.context.append(suffix) - - def depart_attribution(self, node): - self.body.append(self.context.pop() + '\n') - self.body.append('\\end{flushright}\n') - - def visit_status(self, node): - self.visit_docinfo_item(node, 'status') - - def depart_status(self, node): - self.depart_docinfo_item(node) - - def visit_strong(self, node): - self.body.append('\\textbf{') - self.literal_block_stack.append('\\textbf{') - - def depart_strong(self, node): - self.body.append('}') - self.literal_block_stack.pop() - - def visit_substitution_definition(self, node): - raise nodes.SkipNode - - def visit_substitution_reference(self, node): - self.unimplemented_visit(node) - - def visit_subtitle(self, node): - if isinstance(node.parent, nodes.sidebar): - self.body.append('~\\\\\n\\textbf{') - self.context.append('}\n\\smallskip\n') - elif isinstance(node.parent, nodes.document): - self.title = self.title + \ - '\\\\\n\\large{%s}\n' % self.encode(node.astext()) - raise nodes.SkipNode - elif isinstance(node.parent, nodes.section): - self.body.append('\\textbf{') - self.context.append('}\\vspace{0.2cm}\n\n\\noindent ') - - def depart_subtitle(self, node): - self.body.append(self.context.pop()) - - def visit_system_message(self, node): - pass - - def depart_system_message(self, node): - self.body.append('\n') - - def visit_table(self, node): - if self.active_table.is_open(): - self.table_stack.append(self.active_table) - # nesting longtable does not work (e.g. 2007-04-18) - self.active_table = Table('tabular',self.settings.table_style) - self.active_table.open() - for cl in node['classes']: - self.active_table.set_table_style(cl) - self.body.append('\n' + self.active_table.get_opening()) - - def depart_table(self, node): - self.body.append(self.active_table.get_closing() + '\n') - self.active_table.close() - if len(self.table_stack)>0: - self.active_table = self.table_stack.pop() - else: - self.active_table.set_table_style(self.settings.table_style) - - def visit_target(self, node): - # BUG: why not (refuri or refid or refname) means not footnote ? - if not (node.has_key('refuri') or node.has_key('refid') - or node.has_key('refname')): - for id in node['ids']: - self.body.append('\\hypertarget{%s}{' % id) - self.context.append('}' * len(node['ids'])) - elif node.get("refid"): - self.body.append('\\hypertarget{%s}{' % node.get("refid")) - self.context.append('}') - else: - self.context.append('') - - def depart_target(self, node): - self.body.append(self.context.pop()) - - def visit_tbody(self, node): - # BUG write preamble if not yet done (colspecs not []) - # for tables without heads. - if not self.active_table.get('preamble written'): - self.visit_thead(None) - # self.depart_thead(None) - - def depart_tbody(self, node): - pass - - def visit_term(self, node): - self.body.append('\\item[{') - - def depart_term(self, node): - # definition list term. - # \leavevmode results in a line break if the term is followed by a item list. - self.body.append('}] \leavevmode ') - - def visit_tgroup(self, node): - #self.body.append(self.starttag(node, 'colgroup')) - #self.context.append('\n') - pass - - def depart_tgroup(self, node): - pass - - def visit_thead(self, node): - self.body.append('{%s}\n' % self.active_table.get_colspecs()) - if self.active_table.caption: - self.body.append('\\caption{%s}\\\\\n' % self.active_table.caption) - self.active_table.set('preamble written',1) - # TODO longtable supports firsthead and lastfoot too. - self.body.extend(self.active_table.visit_thead()) - - def depart_thead(self, node): - # the table header written should be on every page - # => \endhead - self.body.extend(self.active_table.depart_thead()) - # and the firsthead => \endfirsthead - # BUG i want a "continued from previous page" on every not - # firsthead, but then we need the header twice. - # - # there is a \endfoot and \endlastfoot too. - # but we need the number of columns to - # self.body.append('\\multicolumn{%d}{c}{"..."}\n' % number_of_columns) - # self.body.append('\\hline\n\\endfoot\n') - # self.body.append('\\hline\n') - # self.body.append('\\endlastfoot\n') - - def visit_tip(self, node): - self.visit_admonition(node, 'tip') - - def depart_tip(self, node): - self.depart_admonition() - - def bookmark(self, node): - """Append latex href and pdfbookmarks for titles. - """ - if node.parent['ids']: - for id in node.parent['ids']: - self.body.append('\\hypertarget{%s}{}\n' % id) - if not self.use_latex_toc: - # BUG level depends on style. pdflatex allows level 0 to 3 - # ToC would be the only on level 0 so i choose to decrement the rest. - # "Table of contents" bookmark to see the ToC. To avoid this - # we set all zeroes to one. - l = self.section_level - if l>0: - l = l-1 - # pdftex does not like "_" subscripts in titles - text = self.encode(node.astext()) - for id in node.parent['ids']: - self.body.append('\\pdfbookmark[%d]{%s}{%s}\n' % \ - (l, text, id)) - - def visit_title(self, node): - """Section and other titles.""" - - if isinstance(node.parent, nodes.topic): - # the table of contents. - self.bookmark(node) - if ('contents' in self.topic_classes - and self.use_latex_toc): - self.body.append('\\renewcommand{\\contentsname}{') - self.context.append('}\n\\tableofcontents\n\n\\bigskip\n') - elif ('abstract' in self.topic_classes - and self.settings.use_latex_abstract): - raise nodes.SkipNode - else: # or section titles before the table of contents. - # BUG: latex chokes on center environment with - # "perhaps a missing item", therefore we use hfill. - self.body.append('\\subsubsection*{~\\hfill ') - # the closing brace for subsection. - self.context.append('\\hfill ~}\n') - # TODO: for admonition titles before the first section - # either specify every possible node or ... ? - elif isinstance(node.parent, nodes.sidebar) \ - or isinstance(node.parent, nodes.admonition): - self.body.append('\\textbf{\\large ') - self.context.append('}\n\\smallskip\n') - elif isinstance(node.parent, nodes.table): - # caption must be written after column spec - self.active_table.caption = self.encode(node.astext()) - raise nodes.SkipNode - elif self.section_level == 0: - # document title - self.title = self.encode(node.astext()) - if not self.pdfinfo == None: - self.pdfinfo.append( 'pdftitle={%s}' % self.encode(node.astext()) ) - raise nodes.SkipNode - else: - self.body.append('\n\n') - self.body.append('%' + '_' * 75) - self.body.append('\n\n') - self.bookmark(node) - - if self.use_latex_toc: - section_star = "" - else: - section_star = "*" - - section_name = self.d_class.section(self.section_level) - self.body.append('\\%s%s{' % (section_name, section_star)) - # MAYBE postfix paragraph and subparagraph with \leavemode to - # ensure floatables stay in the section and text starts on a new line. - self.context.append('}\n') - - def depart_title(self, node): - self.body.append(self.context.pop()) - for id in node.parent['ids']: - self.body.append('\\label{%s}\n' % id) - - def visit_topic(self, node): - self.topic_classes = node['classes'] - if ('abstract' in self.topic_classes - and self.settings.use_latex_abstract): - self.body.append('\\begin{abstract}\n') - - def depart_topic(self, node): - if ('abstract' in self.topic_classes - and self.settings.use_latex_abstract): - self.body.append('\\end{abstract}\n') - self.topic_classes = [] - if 'contents' in node['classes'] and self.use_latex_toc: - pass - else: - self.body.append('\n') - - def visit_inline(self, node): # titlereference - classes = node.get('classes', ['Unknown', ]) - for cls in classes: - self.body.append( '\\docutilsrole%s{' % cls) - self.context.append('}'*len(classes)) - - def depart_inline(self, node): - self.body.append(self.context.pop()) - - def visit_rubric(self, node): - self.body.append('\\rubric{') - self.context.append('}\n') - - def depart_rubric(self, node): - self.body.append(self.context.pop()) - - def visit_transition(self, node): - self.body.append('\n\n') - self.body.append('%' + '_' * 75) - self.body.append('\n\\hspace*{\\fill}\\hrulefill\\hspace*{\\fill}') - self.body.append('\n\n') - - def depart_transition(self, node): - pass - - def visit_version(self, node): - self.visit_docinfo_item(node, 'version') - - def depart_version(self, node): - self.depart_docinfo_item(node) - - def visit_warning(self, node): - self.visit_admonition(node, 'warning') - - def depart_warning(self, node): - self.depart_admonition() - - def unimplemented_visit(self, node): - raise NotImplementedError('visiting unimplemented node type: %s' - % node.__class__.__name__) - -# def unknown_visit(self, node): -# def default_visit(self, node): - -# vim: set ts=4 et ai : diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/latex2e/latex2e.tex --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/latex2e/latex2e.tex Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -% latex include file for docutils latex writer -% -------------------------------------------- -% -% CVS: $Id: latex2e.tex 4163 2005-12-09 04:21:34Z goodger $ -% -% This is included at the end of the latex header in the generated file, -% to allow overwriting defaults, although this could get hairy. -% Generated files should process well standalone too, LaTeX might give a -% message about a missing file. - -% donot indent first line of paragraph. -\setlength{\parindent}{0pt} -\setlength{\parskip}{5pt plus 2pt minus 1pt} - -% sloppy -% ------ -% Less strict (opposite to default fussy) space size between words. Therefore -% less hyphenation. -\sloppy - -% fonts -% ----- -% times for pdf generation, gives smaller pdf files. -% -% But in standard postscript fonts: courier and times/helvetica do not fit. -% Maybe use pslatex. -\usepackage{times} - -% pagestyle -% --------- -% headings might put section titles in the page heading, but not if -% the table of contents is done by docutils. -% If pagestyle{headings} is used, \geometry{headheight=10pt,headsep=1pt} -% should be set too. -%\pagestyle{plain} -% -% or use fancyhdr (untested !) -%\usepackage{fancyhdr} -%\pagestyle{fancy} -%\addtolength{\headheight}{\\baselineskip} -%\renewcommand{\sectionmark}[1]{\markboth{#1}{}} -%\renewcommand{\subsectionmark}[1]{\markright{#1}} -%\fancyhf{} -%\fancyhead[LE,RO]{\\bfseries\\textsf{\Large\\thepage}} -%\fancyhead[LO]{\\textsf{\\footnotesize\\rightmark}} -%\fancyhead[RE]{\\textsc{\\textsf{\\footnotesize\leftmark}}} -%\\fancyfoot[LE,RO]{\\bfseries\\textsf{\scriptsize Docutils}} -%\fancyfoot[RE,LO]{\\textsf{\scriptsize\\today}} - -% geometry -% -------- -% = papersizes and margins -%\geometry{a4paper,twoside,tmargin=1.5cm, -% headheight=1cm,headsep=0.75cm} - -% Do section number display -% ------------------------- -%\makeatletter -%\def\@seccntformat#1{} -%\makeatother -% no numbers in toc -%\renewcommand{\numberline}[1]{} - - -% change maketitle -% ---------------- -%\renewcommand{\maketitle}{ -% \begin{titlepage} -% \begin{center} -% \textsf{TITLE \@title} \\ -% Date: \today -% \end{center} -% \end{titlepage} -%} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/__init__.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,825 +0,0 @@ -# $Id: __init__.py 5174 2007-05-31 00:01:52Z wiemann $ -# Author: Lea Wiemann -# Copyright: This module has been placed in the public domain. - -""" -LaTeX2e document tree Writer. -""" - -# Thanks to Engelbert Gruber and various contributors for the original -# LaTeX writer, some code and many ideas of which have been used for -# this writer. - -__docformat__ = 'reStructuredText' - - -import re -import os.path -from types import ListType - -import docutils -from docutils import nodes, writers, utils -from docutils.writers.newlatex2e import unicode_map -from docutils.transforms import writer_aux - - -class Writer(writers.Writer): - - supported = ('newlatex', 'newlatex2e') - """Formats this writer supports.""" - - default_stylesheet = 'base.tex' - - default_stylesheet_path = utils.relative_path( - os.path.join(os.getcwd(), 'dummy'), - os.path.join(os.path.dirname(__file__), default_stylesheet)) - - settings_spec = ( - 'LaTeX-Specific Options', - 'Note that this LaTeX writer is still EXPERIMENTAL and not ' - 'feature-complete. ', - (('Specify a stylesheet file. The path is used verbatim to include ' - 'the file. Overrides --stylesheet-path.', - ['--stylesheet'], - {'default': '', 'metavar': '', - 'overrides': 'stylesheet_path'}), - ('Specify a stylesheet file, relative to the current working ' - 'directory. Overrides --stylesheet. Default: "%s"' - % default_stylesheet_path, - ['--stylesheet-path'], - {'metavar': '', 'overrides': 'stylesheet', - 'default': default_stylesheet_path}), - ('Specify a user stylesheet file. See --stylesheet.', - ['--user-stylesheet'], - {'default': '', 'metavar': '', - 'overrides': 'user_stylesheet_path'}), - ('Specify a user stylesheet file. See --stylesheet-path.', - ['--user-stylesheet-path'], - {'metavar': '', 'overrides': 'user_stylesheet'}) - ),) - - settings_defaults = { - # Many Unicode characters are provided by unicode_map.py, so - # we can default to latin-1. - 'output_encoding': 'latin-1', - 'output_encoding_error_handler': 'strict', - # Since we are using superscript footnotes, it is necessary to - # trim whitespace in front of footnote references. - 'trim_footnote_reference_space': 1, - # Currently unsupported: - 'docinfo_xform': 0, - # During development: - 'traceback': 1 - } - - relative_path_settings = ('stylesheet_path', 'user_stylesheet_path') - - config_section = 'newlatex2e writer' - config_section_dependencies = ('writers',) - - output = None - """Final translated form of `document`.""" - - def get_transforms(self): - return writers.Writer.get_transforms(self) + [ - writer_aux.Compound, writer_aux.Admonitions] - - def __init__(self): - writers.Writer.__init__(self) - self.translator_class = LaTeXTranslator - - def translate(self): - visitor = self.translator_class(self.document) - self.document.walkabout(visitor) - assert not visitor.context, 'context not empty: %s' % visitor.context - self.output = visitor.astext() - self.head = visitor.header - self.body = visitor.body - - -class LaTeXException(Exception): - """ - Exception base class to for exceptions which influence the - automatic generation of LaTeX code. - """ - - -class SkipAttrParentLaTeX(LaTeXException): - """ - Do not generate ``\DECattr`` and ``\renewcommand{\DEVparent}{...}`` for this - node. - - To be raised from ``before_...`` methods. - """ - - -class SkipParentLaTeX(LaTeXException): - """ - Do not generate ``\renewcommand{\DEVparent}{...}`` for this node. - - To be raised from ``before_...`` methods. - """ - - -class LaTeXTranslator(nodes.SparseNodeVisitor): - - # Country code by a.schlock. - # Partly manually converted from iso and babel stuff. - iso639_to_babel = { - 'no': 'norsk', # added by hand - 'gd': 'scottish', # added by hand - 'sl': 'slovenian', - 'af': 'afrikaans', - 'bg': 'bulgarian', - 'br': 'breton', - 'ca': 'catalan', - 'cs': 'czech', - 'cy': 'welsh', - 'da': 'danish', - 'fr': 'french', - # french, francais, canadien, acadian - 'de': 'ngerman', - # ngerman, naustrian, german, germanb, austrian - 'el': 'greek', - 'en': 'english', - # english, USenglish, american, UKenglish, british, canadian - 'eo': 'esperanto', - 'es': 'spanish', - 'et': 'estonian', - 'eu': 'basque', - 'fi': 'finnish', - 'ga': 'irish', - 'gl': 'galician', - 'he': 'hebrew', - 'hr': 'croatian', - 'hu': 'hungarian', - 'is': 'icelandic', - 'it': 'italian', - 'la': 'latin', - 'nl': 'dutch', - 'pl': 'polish', - 'pt': 'portuguese', - 'ro': 'romanian', - 'ru': 'russian', - 'sk': 'slovak', - 'sr': 'serbian', - 'sv': 'swedish', - 'tr': 'turkish', - 'uk': 'ukrainian' - } - - # Start with left double quote. - left_quote = 1 - - def __init__(self, document): - nodes.NodeVisitor.__init__(self, document) - self.settings = document.settings - self.header = [] - self.body = [] - self.context = [] - self.stylesheet_path = utils.get_stylesheet_reference( - self.settings, os.path.join(os.getcwd(), 'dummy')) - if self.stylesheet_path: - self.settings.record_dependencies.add(self.stylesheet_path) - # This ugly hack will be cleaned up when refactoring the - # stylesheet mess. - self.settings.stylesheet = self.settings.user_stylesheet - self.settings.stylesheet_path = self.settings.user_stylesheet_path - self.user_stylesheet_path = utils.get_stylesheet_reference( - self.settings, os.path.join(os.getcwd(), 'dummy')) - if self.user_stylesheet_path: - self.settings.record_dependencies.add(self.user_stylesheet_path) - self.write_header() - - def write_header(self): - a = self.header.append - a('%% Generated by Docutils %s .' - % docutils.__version__) - a('') - a('% Docutils settings:') - lang = self.settings.language_code or '' - a(r'\providecommand{\DEVlanguageiso}{%s}' % lang) - a(r'\providecommand{\DEVlanguagebabel}{%s}' % self.iso639_to_babel.get( - lang, self.iso639_to_babel.get(lang.split('_')[0], ''))) - a('') - if self.user_stylesheet_path: - a('% User stylesheet:') - a(r'\input{%s}' % self.user_stylesheet_path) - a('% Docutils stylesheet:') - a(r'\input{%s}' % self.stylesheet_path) - a('') - a('% Default definitions for Docutils nodes:') - for node_name in nodes.node_class_names: - a(r'\providecommand{\DN%s}[1]{#1}' % node_name.replace('_', '')) - a('') - a('% Auxiliary definitions:') - for attr in (r'\DEVparent \DEVattrlen \DEVtitleastext ' - r'\DEVsinglebackref \DEVmultiplebackrefs' - ).split(): - # Later set using \renewcommand. - a(r'\providecommand{%s}{DOCUTILSUNINITIALIZEDVARIABLE}' % attr) - for attr in (r'\DEVparagraphindented \DEVhassubtitle').split(): - # Initialize as boolean variables. - a(r'\providecommand{%s}{false}' % attr) - a('\n\n') - - unicode_map = unicode_map.unicode_map # comprehensive Unicode map - # Fix problems with unimap.py. - unicode_map.update({ - # We have AE or T1 encoding, so "``" etc. work. The macros - # from unimap.py may *not* work. - u'\u201C': '{``}', - u'\u201D': "{''}", - u'\u201E': '{,,}', - }) - - character_map = { - '\\': r'{\textbackslash}', - '{': r'{\{}', - '}': r'{\}}', - '$': r'{\$}', - '&': r'{\&}', - '%': r'{\%}', - '#': r'{\#}', - '[': r'{[}', - ']': r'{]}', - '-': r'{-}', - '`': r'{`}', - "'": r"{'}", - ',': r'{,}', - '"': r'{"}', - '|': r'{\textbar}', - '<': r'{\textless}', - '>': r'{\textgreater}', - '^': r'{\textasciicircum}', - '~': r'{\textasciitilde}', - '_': r'{\DECtextunderscore}', - } - character_map.update(unicode_map) - #character_map.update(special_map) - - # `att_map` is for encoding attributes. According to - # , - # the following characters are special: # $ % & ~ _ ^ \ { } - # These work without special treatment in macro parameters: - # $, &, ~, _, ^ - att_map = {'#': '\\#', - '%': '\\%', - # We cannot do anything about backslashes. - '\\': '', - '{': '\\{', - '}': '\\}', - # The quotation mark may be redefined by babel. - '"': '"{}', - } - att_map.update(unicode_map) - - def encode(self, text, attval=None): - """ - Encode special characters in ``text`` and return it. - - If attval is true, preserve as much as possible verbatim (used - in attribute value encoding). If attval is 'width' or - 'height', `text` is interpreted as a length value. - """ - if attval in ('width', 'height'): - match = re.match(r'([0-9.]+)(\S*)$', text) - assert match, '%s="%s" must be a length' % (attval, text) - value, unit = match.groups() - if unit == '%': - value = str(float(value) / 100) - unit = r'\DECrelativeunit' - elif unit in ('', 'px'): - # If \DECpixelunit is "pt", this gives the same notion - # of pixels as graphicx. This is a bit of a hack. - value = str(float(value) * 0.75) - unit = '\DECpixelunit' - return '%s%s' % (value, unit) - if attval: - get = self.att_map.get - else: - get = self.character_map.get - text = ''.join([get(c, c) for c in text]) - if (self.literal_block or self.inline_literal) and not attval: - # NB: We can have inline literals within literal blocks. - # Shrink '\r\n'. - text = text.replace('\r\n', '\n') - # Convert space. If "{ }~~~~~" is wrapped (at the - # brace-enclosed space "{ }"), the following non-breaking - # spaces ("~~~~") do *not* wind up at the beginning of the - # next line. Also note that no hyphenation is done if the - # breaking space ("{ }") comes *after* the non-breaking - # spaces. - if self.literal_block: - # Replace newlines with real newlines. - text = text.replace('\n', '\mbox{}\\\\{}') - replace_fn = self.encode_replace_for_literal_block_spaces - else: - replace_fn = self.encode_replace_for_inline_literal_spaces - text = re.sub(r'\s+', replace_fn, text) - # Protect hyphens; if we don't, line breaks will be - # possible at the hyphens and even the \textnhtt macro - # from the hyphenat package won't change that. - text = text.replace('-', r'\mbox{-}') - text = text.replace("'", r'{\DECtextliteralsinglequote}') - return text - else: - if not attval: - # Replace space with single protected space. - text = re.sub(r'\s+', '{ }', text) - # Replace double quotes with macro calls. - L = [] - for part in text.split(self.character_map['"']): - if L: - # Insert quote. - L.append(self.left_quote and r'{\DECtextleftdblquote}' - or r'{\DECtextrightdblquote}') - self.left_quote = not self.left_quote - L.append(part) - return ''.join(L) - else: - return text - - def encode_replace_for_literal_block_spaces(self, match): - return '~' * len(match.group()) - - def encode_replace_for_inline_literal_spaces(self, match): - return '{ }' + '~' * (len(match.group()) - 1) - - def astext(self): - return '\n'.join(self.header) + (''.join(self.body)) - - def append(self, text, newline='%\n'): - """ - Append text, stripping newlines, producing nice LaTeX code. - """ - lines = [' ' * self.indentation_level + line + newline - for line in text.splitlines(0)] - self.body.append(''.join(lines)) - - def visit_Text(self, node): - self.append(self.encode(node.astext())) - - def depart_Text(self, node): - pass - - def is_indented(self, paragraph): - """Return true if `paragraph` should be first-line-indented.""" - assert isinstance(paragraph, nodes.paragraph) - siblings = [n for n in paragraph.parent if - self.is_visible(n) and not isinstance(n, nodes.Titular)] - index = siblings.index(paragraph) - if ('continued' in paragraph['classes'] or - index > 0 and isinstance(siblings[index-1], nodes.transition)): - return 0 - # Indent all but the first paragraphs. - return index > 0 - - def before_paragraph(self, node): - self.append(r'\renewcommand{\DEVparagraphindented}{%s}' - % (self.is_indented(node) and 'true' or 'false')) - - def before_title(self, node): - self.append(r'\renewcommand{\DEVtitleastext}{%s}' - % self.encode(node.astext())) - self.append(r'\renewcommand{\DEVhassubtitle}{%s}' - % ((len(node.parent) > 2 and - isinstance(node.parent[1], nodes.subtitle)) - and 'true' or 'false')) - - def before_generated(self, node): - if 'sectnum' in node['classes']: - node[0] = node[0].strip() - - literal_block = 0 - - def visit_literal_block(self, node): - self.literal_block = 1 - - def depart_literal_block(self, node): - self.literal_block = 0 - - visit_doctest_block = visit_literal_block - depart_doctest_block = depart_literal_block - - inline_literal = 0 - - def visit_literal(self, node): - self.inline_literal += 1 - - def depart_literal(self, node): - self.inline_literal -= 1 - - def _make_encodable(self, text): - """ - Return text (a unicode object) with all unencodable characters - replaced with '?'. - - Thus, the returned unicode string is guaranteed to be encodable. - """ - encoding = self.settings.output_encoding - return text.encode(encoding, 'replace').decode(encoding) - - def visit_comment(self, node): - """ - Insert the comment unchanged into the document, replacing - unencodable characters with '?'. - - (This is done in order not to fail if comments contain unencodable - characters, because our default encoding is not UTF-8.) - """ - self.append('\n'.join(['% ' + self._make_encodable(line) for line - in node.astext().splitlines(0)]), newline='\n') - raise nodes.SkipChildren - - def before_topic(self, node): - if 'contents' in node['classes']: - for bullet_list in list(node.traverse(nodes.bullet_list)): - p = bullet_list.parent - if isinstance(p, nodes.list_item): - p.parent.insert(p.parent.index(p) + 1, bullet_list) - del p[1] - for paragraph in node.traverse(nodes.paragraph): - paragraph.attributes.update(paragraph[0].attributes) - paragraph[:] = paragraph[0] - paragraph.parent['tocrefid'] = paragraph['refid'] - node['contents'] = 1 - else: - node['contents'] = 0 - - bullet_list_level = 0 - - def visit_bullet_list(self, node): - self.append(r'\DECsetbullet{\labelitem%s}' % - ['i', 'ii', 'iii', 'iv'][min(self.bullet_list_level, 3)]) - self.bullet_list_level += 1 - - def depart_bullet_list(self, node): - self.bullet_list_level -= 1 - - enum_styles = {'arabic': 'arabic', 'loweralpha': 'alph', 'upperalpha': - 'Alph', 'lowerroman': 'roman', 'upperroman': 'Roman'} - - enum_counter = 0 - - def visit_enumerated_list(self, node): - # We create our own enumeration list environment. This allows - # to set the style and starting value and unlimited nesting. - # Maybe the actual creation (\DEC) can be moved to the - # stylesheet? - self.enum_counter += 1 - enum_prefix = self.encode(node['prefix']) - enum_suffix = self.encode(node['suffix']) - enum_type = '\\' + self.enum_styles.get(node['enumtype'], r'arabic') - start = node.get('start', 1) - 1 - counter = 'Denumcounter%d' % self.enum_counter - self.append(r'\DECmakeenumeratedlist{%s}{%s}{%s}{%s}{%s}{' - % (enum_prefix, enum_type, enum_suffix, counter, start)) - # for Emacs: } - - def depart_enumerated_list(self, node): - self.append('}') # for Emacs: { - - def before_list_item(self, node): - # XXX needs cleanup. - if (len(node) and (isinstance(node[-1], nodes.TextElement) or - isinstance(node[-1], nodes.Text)) and - node.parent.index(node) == len(node.parent) - 1): - node['lastitem'] = 'true' - - before_line = before_list_item - - def before_raw(self, node): - if 'latex' in node.get('format', '').split(): - # We're inserting the text in before_raw and thus outside - # of \DN... and \DECattr in order to make grouping with - # curly brackets work. - self.append(node.astext()) - raise nodes.SkipChildren - - def process_backlinks(self, node, type): - """ - Add LaTeX handling code for backlinks of footnote or citation - node `node`. `type` is either 'footnote' or 'citation'. - """ - self.append(r'\renewcommand{\DEVsinglebackref}{}') - self.append(r'\renewcommand{\DEVmultiplebackrefs}{}') - if len(node['backrefs']) > 1: - refs = [] - for i in range(len(node['backrefs'])): - # \DECmulticitationbacklink or \DECmultifootnotebacklink. - refs.append(r'\DECmulti%sbacklink{%s}{%s}' - % (type, node['backrefs'][i], i + 1)) - self.append(r'\renewcommand{\DEVmultiplebackrefs}{(%s){ }}' - % ', '.join(refs)) - elif len(node['backrefs']) == 1: - self.append(r'\renewcommand{\DEVsinglebackref}{%s}' - % node['backrefs'][0]) - - def visit_footnote(self, node): - self.process_backlinks(node, 'footnote') - - def visit_citation(self, node): - self.process_backlinks(node, 'citation') - - def before_table(self, node): - # A table contains exactly one tgroup. See before_tgroup. - pass - - def before_tgroup(self, node): - widths = [] - total_width = 0 - for i in range(int(node['cols'])): - assert isinstance(node[i], nodes.colspec) - widths.append(int(node[i]['colwidth']) + 1) - total_width += widths[-1] - del node[:len(widths)] - tablespec = '|' - for w in widths: - # 0.93 is probably wrong in many cases. XXX Find a - # solution which works *always*. - tablespec += r'p{%s\textwidth}|' % (0.93 * w / - max(total_width, 60)) - self.append(r'\DECmaketable{%s}{' % tablespec) - self.context.append('}') - raise SkipAttrParentLaTeX - - def depart_tgroup(self, node): - self.append(self.context.pop()) - - def before_row(self, node): - raise SkipAttrParentLaTeX - - def before_thead(self, node): - raise SkipAttrParentLaTeX - - def before_tbody(self, node): - raise SkipAttrParentLaTeX - - def is_simply_entry(self, node): - return (len(node) == 1 and isinstance(node[0], nodes.paragraph) or - len(node) == 0) - - def before_entry(self, node): - is_leftmost = 0 - if node.hasattr('morerows'): - self.document.reporter.severe('Rowspans are not supported.') - # Todo: Add empty cells below rowspanning cell and issue - # warning instead of severe. - if node.hasattr('morecols'): - # The author got a headache trying to implement - # multicolumn support. - if not self.is_simply_entry(node): - self.document.reporter.severe( - 'Colspanning table cells may only contain one paragraph.') - # Todo: Same as above. - # The number of columns this entry spans (as a string). - colspan = int(node['morecols']) + 1 - del node['morecols'] - else: - colspan = 1 - # Macro to call -- \DECcolspan or \DECcolspanleft. - macro_name = r'\DECcolspan' - if node.parent.index(node) == 0: - # Leftmost column. - macro_name += 'left' - is_leftmost = 1 - if colspan > 1: - self.append('%s{%s}{' % (macro_name, colspan)) - self.context.append('}') - else: - # Do not add a multicolumn with colspan 1 beacuse we need - # at least one non-multicolumn cell per column to get the - # desired column widths, and we can only do colspans with - # cells consisting of only one paragraph. - if not is_leftmost: - self.append(r'\DECsubsequententry{') - self.context.append('}') - else: - self.context.append('') - if isinstance(node.parent.parent, nodes.thead): - node['tableheaderentry'] = 'true' - - # Don't add \renewcommand{\DEVparent}{...} because there must - # not be any non-expandable commands in front of \multicolumn. - raise SkipParentLaTeX - - def depart_entry(self, node): - self.append(self.context.pop()) - - def before_substitution_definition(self, node): - raise nodes.SkipNode - - indentation_level = 0 - - def node_name(self, node): - return node.__class__.__name__.replace('_', '') - - # Attribute propagation order. - attribute_order = ['align', 'classes', 'ids'] - - def attribute_cmp(self, a1, a2): - """ - Compare attribute names `a1` and `a2`. Used in - propagate_attributes to determine propagation order. - - See built-in function `cmp` for return value. - """ - if a1 in self.attribute_order and a2 in self.attribute_order: - return cmp(self.attribute_order.index(a1), - self.attribute_order.index(a2)) - if (a1 in self.attribute_order) != (a2 in self.attribute_order): - # Attributes not in self.attribute_order come last. - return a1 in self.attribute_order and -1 or 1 - else: - return cmp(a1, a2) - - def propagate_attributes(self, node): - # Propagate attributes using \DECattr macros. - node_name = self.node_name(node) - attlist = [] - if isinstance(node, nodes.Element): - attlist = node.attlist() - attlist.sort(lambda pair1, pair2: self.attribute_cmp(pair1[0], - pair2[0])) - # `numatts` may be greater than len(attlist) due to list - # attributes. - numatts = 0 - pass_contents = self.pass_contents(node) - for key, value in attlist: - if isinstance(value, ListType): - self.append(r'\renewcommand{\DEVattrlen}{%s}' % len(value)) - for i in range(len(value)): - self.append(r'\DECattr{%s}{%s}{%s}{%s}{' % - (i+1, key, self.encode(value[i], attval=key), - node_name)) - if not pass_contents: - self.append('}') - numatts += len(value) - else: - self.append(r'\DECattr{}{%s}{%s}{%s}{' % - (key, self.encode(unicode(value), attval=key), - node_name)) - if not pass_contents: - self.append('}') - numatts += 1 - if pass_contents: - self.context.append('}' * numatts) # for Emacs: { - else: - self.context.append('') - - def visit_docinfo(self, node): - raise NotImplementedError('Docinfo not yet implemented.') - - def visit_document(self, node): - document = node - # Move IDs into TextElements. This won't work for images. - # Need to review this. - for node in document.traverse(nodes.Element): - if node.has_key('ids') and not isinstance(node, - nodes.TextElement): - next_text_element = node.next_node(nodes.TextElement) - if next_text_element: - next_text_element['ids'].extend(node['ids']) - node['ids'] = [] - - def pass_contents(self, node): - r""" - Return True if the node contents should be passed in - \DN{} and \DECattr{}{}{}{}{}. - Return False if the node contents should be passed in - \DECvisit \DECdepart, and no - attribute handler should be called. - """ - # Passing the whole document or whole sections as parameters - # to \DN... or \DECattr causes LaTeX to run out of memory. - return not isinstance(node, (nodes.document, nodes.section)) - - def dispatch_visit(self, node): - skip_attr = skip_parent = 0 - # TreePruningException to be propagated. - tree_pruning_exception = None - if hasattr(self, 'before_' + node.__class__.__name__): - try: - getattr(self, 'before_' + node.__class__.__name__)(node) - except SkipParentLaTeX: - skip_parent = 1 - except SkipAttrParentLaTeX: - skip_attr = 1 - skip_parent = 1 - except nodes.SkipNode: - raise - except (nodes.SkipChildren, nodes.SkipSiblings), instance: - tree_pruning_exception = instance - except nodes.SkipDeparture: - raise NotImplementedError( - 'SkipDeparture not usable in LaTeX writer') - - if not isinstance(node, nodes.Text): - node_name = self.node_name(node) - # attribute_deleters will be appended to self.context. - attribute_deleters = [] - if not skip_parent and not isinstance(node, nodes.document): - self.append(r'\renewcommand{\DEVparent}{%s}' - % self.node_name(node.parent)) - for name, value in node.attlist(): - if not isinstance(value, ListType) and not ':' in name: - # For non-list and non-special (like - # 'xml:preserve') attributes, set - # \DEVcurrentNA to the - # attribute value, so that the value of the - # attribute is available in the node handler - # and all children. - macro = r'\DEVcurrentN%sA%s' % (node_name, name) - self.append(r'\def%s{%s}' % ( - macro, self.encode(unicode(value), attval=name))) - # Make the attribute undefined afterwards. - attribute_deleters.append(r'\let%s=\relax' % macro) - self.context.append('\n'.join(attribute_deleters)) - if self.pass_contents(node): - # Call \DN{}. - self.append(r'\DN%s{' % node_name) - self.context.append('}') - else: - # Call \DECvisit - # \DECdepart. (Maybe we should use LaTeX - # environments for this?) - self.append(r'\DECvisit%s' % node_name) - self.context.append(r'\DECdepart%s' % node_name) - self.indentation_level += 1 - if not skip_attr: - self.propagate_attributes(node) - else: - self.context.append('') - - if (isinstance(node, nodes.TextElement) and - not isinstance(node.parent, nodes.TextElement)): - # Reset current quote to left. - self.left_quote = 1 - - # Call visit_... method. - try: - nodes.SparseNodeVisitor.dispatch_visit(self, node) - except LaTeXException: - raise NotImplementedError( - 'visit_... methods must not raise LaTeXExceptions') - - if tree_pruning_exception: - # Propagate TreePruningException raised in before_... method. - raise tree_pruning_exception - - def is_invisible(self, node): - # Return true if node is invisible or moved away in the LaTeX - # rendering. - return (not isinstance(node, nodes.Text) and - (isinstance(node, nodes.Invisible) or - isinstance(node, nodes.footnote) or - isinstance(node, nodes.citation) or - # Assume raw nodes to be invisible. - isinstance(node, nodes.raw) or - # Floating image or figure. - node.get('align') in ('left', 'right'))) - - def is_visible(self, node): - return not self.is_invisible(node) - - def needs_space(self, node): - """Two nodes for which `needs_space` is true need auxiliary space.""" - # Return true if node is a visible block-level element. - return ((isinstance(node, nodes.Body) or - isinstance(node, nodes.topic)) and - not (self.is_invisible(node) or - isinstance(node.parent, nodes.TextElement))) - - def always_needs_space(self, node): - """ - Always add space around nodes for which `always_needs_space()` - is true, regardless of whether the other node needs space as - well. (E.g. transition next to section.) - """ - return isinstance(node, nodes.transition) - - def dispatch_departure(self, node): - # Call departure method. - nodes.SparseNodeVisitor.dispatch_departure(self, node) - - if not isinstance(node, nodes.Text): - # Close attribute and node handler call (\DN...{...}). - self.indentation_level -= 1 - self.append(self.context.pop() + self.context.pop()) - # Delete \DECcurrentN... attribute macros. - self.append(self.context.pop()) - # Get next sibling. - next_node = node.next_node( - ascend=0, siblings=1, descend=0, - condition=self.is_visible) - # Insert space if necessary. - if (self.needs_space(node) and self.needs_space(next_node) or - self.always_needs_space(node) or - self.always_needs_space(next_node)): - if isinstance(node, nodes.paragraph) and isinstance(next_node, nodes.paragraph): - # Space between paragraphs. - self.append(r'\DECparagraphspace') - else: - # One of the elements is not a paragraph. - self.append(r'\DECauxiliaryspace') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/base.tex --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/base.tex Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1180 +0,0 @@ -% System stylesheet for the new LaTeX writer, newlatex2e. - -% Major parts of the rendering are done in this stylesheet and not in the -% Python module. - -% For development notes, see notes.txt. - -% User documentation (in the stylesheet for now; that may change though): - -% Naming conventions: -% All uppercase letters in macro names have a specific meaning. -% \D...: All macros introduced by the Docutils LaTeX writer start with "D". -% \DS: Setup function (called at the bottom of this stylesheet). -% \DN{}: Handler for Docutils document tree node `node`; called by -% the Python module. -% \DEV: External variable, set by the Python module. -% \DEC: External command. It is called by the Python module and must be -% defined in this stylesheet. -% \DNA{}{}{}{}{}: -% Attribute handler for `attribute` set on nodes of type `nodename`. -% See below for a discussion of attribute handlers. -% \DA{}{}{}{}{}: -% Attribute handler for all `attribute`. Called only when no specific -% \DNA handler is defined. -% \DNC{}: -% Handler for `class`, when set on nodes of type `nodename`. -% \DC{}: -% Handler for `class`. Called only when no specific \DNC -% handler is defined. -% \D: Generic variable or function. - -% Attribute handlers: -% TODO - -% --------------------------------------------------------------------------- - -% Having to intersperse code with \makeatletter-\makeatother pairs is very -% annoying, so we call \makeatletter at the top and \makeatother at the -% bottom. Just be aware that you cannot use "@" as a text character inside -% this stylesheet. -\makeatletter - -% Print-mode (as opposed to online mode e.g. with Adobe Reader). -% This causes for example blue hyperlinks. -\providecommand{\Dprinting}{false} - -% \DSearly is called right after \documentclass. -\providecommand{\DSearly}{} -% \DSlate is called at the end of the stylesheet (right before the document -% tree). -\providecommand{\DSlate}{} - -% Use the KOMA script article class. -\providecommand{\Ddocumentclass}{scrartcl} -\providecommand{\Ddocumentoptions}{a4paper} -\providecommand{\DSdocumentclass}{ - \documentclass[\Ddocumentoptions]{\Ddocumentclass} } - -% Todo: This should be movable to the bottom, but it isn't as long as -% we use \usepackage commands at the top level of this stylesheet -% (which we shouldn't). -\DSdocumentclass - -\providecommand{\DSpackages}{ - % Load miscellaneous packages. - % Note 1: Many of the packages loaded here are used throughout this stylesheet. - % If one of these packages does not work on your system or in your scenario, - % please let us know, so we can consider making the package optional. - % Note 2: It would appear cleaner to load packages where they are used. - % However, since using a wrong package loading order can lead to *very* - % subtle bugs, we centralize the loading of most packages here. - \DSfontencoding % load font encoding packages - \DSlanguage % load babel - % Using \ifthenelse conditionals. - \usepackage{ifthen} % before hyperref (really!) - % There is not support for *not* using hyperref because it's used in many - % places. If this is a problem (e.g. because hyperref doesn't work on your - % system), please let us know. - \usepackage[colorlinks=false,pdfborder={0 0 0}]{hyperref} - % Get color, e.g. for links and system messages. - \usepackage{color} - % Get \textnhtt macro (non-hyphenating type writer). - \usepackage{hyphenat} - % For sidebars. - \usepackage{picins} - % We use longtable to create tables. - \usepackage{longtable} - % Images. - \usepackage{graphicx} - % These packages might be useful (some just add magic pixie dust), so - % evaluate them: - %\usepackage{fixmath} - %\usepackage{amsmath} - % Add some missing symbols like \textonehalf. - \usepackage{textcomp} -} - -\providecommand{\DSfontencoding}{ - % Set up font encoding. Called by \DSpackages. - % AE is a T1 emulation. It provides mostly the same characters and - % features as T1-encoded fonts but doesn't use bitmap fonts (which are - % unsuitable for online reading and subtle for printers). - \usepackage{ae} - % Provide the characters not contained in AE from EC bitmap fonts. - \usepackage{aecompl} - % Guillemets ("<<", ">>") in AE. - \usepackage{aeguill} -} - -\providecommand{\DSsymbols}{% - % Fix up symbols. - % The Euro symbol in Computer Modern looks, um, funny. Let's get a - % proper Euro symbol. - \usepackage{eurosym}% - \renewcommand{\texteuro}{\euro}% -} - -% Taken from -% -% and modified. Used with permission. -\providecommand{\Dprovidelength}[2]{% - \begingroup% - \escapechar\m@ne% - \xdef\@gtempa{{\string#1}}% - \endgroup% - \expandafter\@ifundefined\@gtempa% - {\newlength{#1}\setlength{#1}{#2}}% - {}% -} - -\providecommand{\Dprovidecounter}[2]{% - % Like \newcounter except that it doesn't crash if the counter - % already exists. - \@ifundefined{c@#1}{\newcounter{#1}\setcounter{#1}{#2}}{} -} - -\Dprovidelength{\Dboxparindent}{\parindent} - -\providecommand{\Dmakebox}[1]{% - % Make a centered, frameless box. Useful e.g. for block quotes. - % Do not use minipages here, but create pseudo-lists to allow - % page-breaking. (Don't use KOMA-script's addmargin environment - % because it messes up bullet lists.) - \Dmakelistenvironment{}{}{% - \setlength{\parskip}{0pt}% - \setlength{\parindent}{\Dboxparindent}% - \item{#1}% - }% -} - -\providecommand{\Dmakefbox}[1]{% - % Make a centered, framed box. Useful e.g. for admonitions. - \vspace{0.4\baselineskip}% - \begin{center}% - \fbox{% - \begin{minipage}[t]{0.9\linewidth}% - \setlength{\parindent}{\Dboxparindent}% - #1% - \end{minipage}% - }% - \end{center}% - \vspace{0.4\baselineskip}% -} - -% We do not currently recognize the difference between an end-sentence and a -% mid-sentence period (". " vs. ". " in plain text). So \frenchspacing is -% appropriate. -\providecommand{\DSfrenchspacing}{\frenchspacing} - - -\Dprovidelength{\Dblocklevelvspace}{% - % Space between block-level elements other than paragraphs. - 0.7\baselineskip plus 0.3\baselineskip minus 0.2\baselineskip% -} -\providecommand{\DECauxiliaryspace}{% - \ifthenelse{\equal{\Dneedvspace}{true}}{\vspace{\Dblocklevelvspace}}{}% - \par\noindent% -} -\providecommand{\DECparagraphspace}{\par} -\providecommand{\Dneedvspace}{true} - -\providecommand{\DSlanguage}{% - % Set up babel. - \usepackage[\DEVlanguagebabel]{babel} -} - -\providecommand{\Difdefined}[3]{\@ifundefined{#1}{#3}{#2}} - -% Handler for 'classes' attribute (called for each class attribute). -\providecommand{\DAclasses}[5]{% - % Dispatch to \DNC. - \Difdefined{DN#4C#3}{% - % Pass only contents, nothing else! - \csname DN#4C#3\endcsname{#5}% - }{% - % Otherwise, dispatch to \DC. - \Difdefined{DC#3}{% - \csname DC#3\endcsname{#5}% - }{% - #5% - }% - }% -} - -\providecommand{\DECattr}[5]{% - % Global attribute dispatcher, called inside the document tree. - % Parameters: - % 1. Attribute number. - % 2. Attribute name. - % 3. Attribute value. - % 4. Node name. - % 5. Node contents. - \Difdefined{DN#4A#2}{% - % Dispatch to \DNA. - \csname DN#4A#2\endcsname{#1}{#2}{#3}{#4}{#5}% - }{\Difdefined{DA#2}{% - % Otherwise dispatch to \DA. - \csname DA#2\endcsname{#1}{#2}{#3}{#4}{#5}% - }{% - % Otherwise simply run the contents without calling a handler. - #5% - }}% -} - -% ---------- Link handling ---------- -% Targets and references. - -\providecommand{\Draisedlink}[1]{% - % Anchors are placed on the base line by default. This is a bad thing for - % inline context, so we raise the anchor (normally by \baselineskip). - \Hy@raisedlink{#1}% -} - -% References. -% We're assuming here that the "refid" and "refuri" attributes occur -% only in inline context (in TextElements). -\providecommand{\DArefid}[5]{% - \ifthenelse{\equal{#4}{reference}}{% - \Dexplicitreference{\##3}{#5}% - }{% - % If this is not a target node (targets with refids are - % uninteresting and should be silently dropped). - \ifthenelse{\not\equal{#4}{target}}{% - % If this is a footnote reference, call special macro. - \ifthenelse{\equal{#4}{footnotereference}}{% - \Dimplicitfootnotereference{\##3}{#5}% - }{% - \ifthenelse{\equal{#4}{citationreference}}{% - \Dimplicitcitationreference{\##3}{#5}% - }{% - \Dimplicitreference{\##3}{#5}% - }% - }% - }{}% - }% -} -\providecommand{\DArefuri}[5]{% - \ifthenelse{\equal{#4}{target}}{% - % The node name is 'target', so this is a hyperlink target, like this: - % .. _mytarget: URI - % Hyperlink targets are ignored because they are invisible. - }{% - % If a non-target node has a refuri attribute, it must be an explicit URI - % reference (i.e. node name is 'reference'). - \Durireference{#3}{#5}% - }% -} -% Targets. -\providecommand{\DAids}[5]{% - \label{#3}% - \ifthenelse{\equal{#4}{footnotereference}}{% - {% - \renewcommand{\HyperRaiseLinkDefault}{% - % Dirty hack to make backrefs to footnote references work. - % For some reason, \baselineskip is 0pt in fn references. - 0.5\Doriginalbaselineskip% - }% - \Draisedlink{\hypertarget{#3}{}}#5% - }% - }{% - \Draisedlink{\hypertarget{#3}{}}#5% - }% -} -\providecommand{\Dimplicitreference}[2]{% - % Create implicit reference to ID. Implicit references occur - % e.g. in TOC-backlinks of section titles. Parameters: - % 1. Target. - % 2. Link text. - \href{#1}{#2}% -} -\providecommand{\Dimplicitfootnotereference}[2]{% - % Ditto, but for the special case of footnotes. - % We want them to be rendered like explicit references. - \Dexplicitreference{#1}{#2}% -} -\providecommand{\Dimplicitcitationreference}[2]{% - % Ditto for citation references. - \Dimplicitfootnotereference{#1}{#2}% -} -\providecommand{\Dcolorexplicitreference}{% - \ifthenelse{\equal{\Dprinting}{true}}{\color{black}}{\color{blue}}% -} -\providecommand{\Dexplicitreference}[2]{% - % Create explicit reference to ID, e.g. created with "foo_". - % Parameters: - % 1. Target. - % 2. Link text. - \href{#1}{{\Dcolorexplicitreference#2}}% -} -\providecommand{\Dcolorurireference}{\Dcolorexplicitreference} -\providecommand{\Durireference}[2]{% - % Create reference to URI. Parameters: - % 1. Target. - % 2. Link text. - \href{#1}{{\Dcolorurireference#2}}% -} - -\Dprovidecounter{Dpdfbookmarkid}{0}% -\providecommand{\Dpdfbookmark}[1]{% - % Temporarily decrement Desctionlevel counter. - \addtocounter{Dsectionlevel}{-1}% - %\typeout{\arabic{Dsectionlevel}}% - %\typeout{#1}% - %\typeout{docutils\roman{Dpdfbookmarkid}}% - %\typeout{}% - \pdfbookmark[\arabic{Dsectionlevel}]{#1}{docutils\arabic{Dpdfbookmarkid}}% - \addtocounter{Dsectionlevel}{1}% - \addtocounter{Dpdfbookmarkid}{1}% -} -% ---------- End of Link Handling ---------- - -\providecommand{\DNparagraph}[1]{% - \ifthenelse{\equal{\DEVparagraphindented}{true}}{\indent}{\noindent}% - #1% -} -\providecommand{\Dformatboxtitle}[1]{{\Large\textbf{#1}}} -\providecommand{\Dformatboxsubtitle}[1]{{\large\textbf{#1}}} -\providecommand{\Dtopictitle}[1]{% - \Difinsidetoc{\vspace{1em}\par}{}% - \noindent\Dformatboxtitle{#1}% - \ifthenelse{\equal{\DEVhassubtitle}{false}}{\vspace{1em}}{\vspace{0.5em}}% - \par% -} -\providecommand{\Dadmonitiontitle}[1]{% - \Dtopictitle{#1}% -} -\providecommand{\Dtopicsubtitle}[1]{% - \noindent\Dformatboxsubtitle{#1}% - \vspace{1em}% - \par% -} -\providecommand{\Dsidebartitle}[1]{\Dtopictitle{#1}} -\providecommand{\Dsidebarsubtitle}[1]{\Dtopicsubtitle{#1}} -\providecommand{\Ddocumenttitle}[1]{% - \begin{center}{\Huge#1}\end{center}% - \ifthenelse{\equal{\DEVhassubtitle}{true}}{\vspace{0.1cm}}{\vspace{1cm}}% -} -\providecommand{\Ddocumentsubtitle}[1]{% - \begin{center}{\huge#1}\end{center}% - \vspace{1cm}% -} -% Can be overwritten by user stylesheet. -\providecommand{\Dformatsectiontitle}[1]{#1} -\providecommand{\Dformatsectionsubtitle}[1]{\Dformatsectiontitle{#1}} -\providecommand{\Dbookmarksectiontitle}[1]{% - % Return text suitable for use in \section*, \subsection*, etc., - % containing a PDF bookmark. Parameter: The title (as node tree). - \Draisedlink{\Dpdfbookmark{\DEVtitleastext}}% - #1% -} -\providecommand{\Dsectiontitlehook}[1]{#1} -\providecommand{\Dsectiontitle}[1]{% - \Dsectiontitlehook{% - \Ddispatchsectiontitle{\Dbookmarksectiontitle{\Dformatsectiontitle{#1}}}% - }% -} -\providecommand{\Ddispatchsectiontitle}[1]{% - \@ifundefined{Dsectiontitle\roman{Dsectionlevel}}{% - \Ddeepsectiontitle{#1}% - }{% - \csname Dsectiontitle\roman{Dsectionlevel}\endcsname{#1}% - }% -} -\providecommand{\Ddispatchsectionsubtitle}[1]{% - \Ddispatchsectiontitle{#1}% -} -\providecommand{\Dsectiontitlei}[1]{\section*{#1}} -\providecommand{\Dsectiontitleii}[1]{\subsection*{#1}} -\providecommand{\Ddeepsectiontitle}[1]{% - % Anything below \subsubsection (like \paragraph or \subparagraph) - % is useless because it uses the same font. The only way to - % (visually) distinguish such deeply nested sections is to use - % section numbering. - \subsubsection*{#1}% -} -\providecommand{\Dsectionsubtitlehook}[1]{#1} -\Dprovidelength{\Dsectionsubtitleraisedistance}{0.7em} -\providecommand{\Dsectionsubtitlescaling}{0.85} -\providecommand{\Dsectionsubtitle}[1]{% - \Dsectionsubtitlehook{% - % Move the subtitle nearer to the title. - \vspace{-\Dsectionsubtitleraisedistance}% - % Don't create a PDF bookmark. - \Ddispatchsectionsubtitle{% - \Dformatsectionsubtitle{\scalebox{\Dsectionsubtitlescaling}{#1}}% - }% - }% -} -\providecommand{\DNtitle}[1]{% - % Dispatch to \Dtitle. - \csname D\DEVparent title\endcsname{#1}% -} -\providecommand{\DNsubtitle}[1]{% - % Dispatch to \Dsubtitle. - \csname D\DEVparent subtitle\endcsname{#1}% -} - -\providecommand{\DNliteralblock}[1]{% - \Dmakelistenvironment{}{% - \ifthenelse{\equal{\Dinsidetabular}{true}}{% - \setlength{\leftmargin}{0pt}% - }{}% - \setlength{\rightmargin}{0pt}% - }{% - \raggedright\item\noindent\nohyphens{\textnhtt{#1\Dfinalstrut}}% - }% -} -\providecommand{\DNdoctestblock}[1]{\DNliteralblock{#1}} -\providecommand{\DNliteral}[1]{\textnhtt{#1}} -\providecommand{\DNemphasis}[1]{\emph{#1}} -\providecommand{\DNstrong}[1]{\textbf{#1}} -\providecommand{\DECvisitdocument}{\begin{document}\noindent} -\providecommand{\DECdepartdocument}{\end{document}} -\providecommand{\DNtopic}[1]{% - \ifthenelse{\equal{\DEVcurrentNtopicAcontents}{1}}{% - \addtocounter{Dtoclevel}{1}% - \par\noindent% - #1% - \addtocounter{Dtoclevel}{-1}% - }{% - \par\noindent% - \Dmakebox{#1}% - }% -} -\providecommand{\DNadmonition}[1]{% - \DNtopic{#1}% -} -\providecommand{\Dformatrubric}[1]{\textbf{#1}} -\Dprovidelength{\Dprerubricspace}{0.3em} -\providecommand{\DNrubric}[1]{% - \vspace{\Dprerubricspace}\par\noindent\Dformatrubric{#1}\par% -} - -\providecommand{\Dbullet}{} -\providecommand{\DECsetbullet}[1]{\renewcommand{\Dbullet}{#1}} -\providecommand{\DNbulletlist}[1]{% - \Difinsidetoc{% - \Dtocbulletlist{#1}% - }{% - \Dmakelistenvironment{\Dbullet}{}{#1}% - }% -} -% Todo: So what on earth is @pnumwidth? -\renewcommand{\@pnumwidth}{2.2em} -\providecommand{\DNlistitem}[1]{% - \Difinsidetoc{% - \ifthenelse{\equal{\theDtoclevel}{1}\and\equal{\Dlocaltoc}{false}}{% - {% - \par\addvspace{1em}\noindent% - \sectfont% - #1\hfill\pageref{\DEVcurrentNlistitemAtocrefid}% - }% - }{% - \@dottedtocline{0}{\Dtocindent}{0em}{#1}{% - \pageref{\DEVcurrentNlistitemAtocrefid}% - }% - }% - }{% - \item{#1}% - }% -} -\providecommand{\DNenumeratedlist}[1]{#1} -\Dprovidecounter{Dsectionlevel}{0} -\providecommand{\Dvisitsectionhook}{} -\providecommand{\Ddepartsectionhook}{} -\providecommand{\DECvisitsection}{% - \addtocounter{Dsectionlevel}{1}% - \Dvisitsectionhook% -} -\providecommand{\DECdepartsection}{% - \Ddepartsectionhook% - \addtocounter{Dsectionlevel}{-1}% -} - -% Using \_ will cause hyphenation after _ even in \textnhtt-typewriter -% because the hyphenat package redefines \_. So we use -% \textunderscore here. -\providecommand{\DECtextunderscore}{\textunderscore} - -\providecommand{\Dtextinlineliteralfirstspace}{{ }} -\providecommand{\Dtextinlineliteralsecondspace}{{~}} - -\Dprovidelength{\Dlistspacing}{0.8\baselineskip} - -\providecommand{\Dsetlistrightmargin}{% - \ifthenelse{\lengthtest{\linewidth>12em}}{% - % Equal margins. - \setlength{\rightmargin}{\leftmargin}% - }{% - % If the line is narrower than 10em, we don't remove any further - % space from the right. - \setlength{\rightmargin}{0pt}% - }% -} -\providecommand{\Dresetlistdepth}{false} -\Dprovidelength{\Doriginallabelsep}{\labelsep} -\providecommand{\Dmakelistenvironment}[3]{% - % Make list environment with support for unlimited nesting and with - % reasonable default lengths. Parameters: - % 1. Label (same as in list environment). - % 2. Spacing (same as in list environment). - % 3. List contents (contents of list environment). - \ifthenelse{\equal{\Dinsidetabular}{true}}{% - % Unfortunately, vertical spacing doesn't work correctly when - % using lists inside tabular environments, so we use a minipage. - \begin{minipage}[t]{\linewidth}% - }{}% - {% - \renewcommand{\Dneedvspace}{false}% - % \parsep0.5\baselineskip - \renewcommand{\Dresetlistdepth}{false}% - \ifnum \@listdepth>5% - \protect\renewcommand{\Dresetlistdepth}{true}% - \@listdepth=5% - \fi% - \begin{list}{% - #1% - }{% - \setlength{\itemsep}{0pt}% - \setlength{\partopsep}{0pt}% - \setlength{\topsep}{0pt}% - % List should take 90% of total width. - \setlength{\leftmargin}{0.05\linewidth}% - \ifthenelse{\lengthtest{\leftmargin<1.8em}}{% - \setlength{\leftmargin}{1.8em}% - }{}% - \setlength{\labelsep}{\Doriginallabelsep}% - \Dsetlistrightmargin% - #2% - }{% - #3% - }% - \end{list}% - \ifthenelse{\equal{\Dresetlistdepth}{true}}{\@listdepth=5}{}% - }% - \ifthenelse{\equal{\Dinsidetabular}{true}}{\end{minipage}}{}% -} -\providecommand{\Dfinalstrut}{\@finalstrut\@arstrutbox} -\providecommand{\DAlastitem}[5]{#5\Dfinalstrut} - -\Dprovidelength{\Ditemsep}{0pt} -\providecommand{\DECmakeenumeratedlist}[6]{% - % Make enumerated list. - % Parameters: - % - prefix - % - type (\arabic, \roman, ...) - % - suffix - % - suggested counter name - % - start number - 1 - % - list contents - \newcounter{#4}% - \Dmakelistenvironment{#1#2{#4}#3}{% - % Use as much space as needed for the label. - \setlength{\labelwidth}{10em}% - % Reserve enough space so that the label doesn't go beyond the - % left margin of preceding paragraphs. Like that: - % - % A paragraph. - % - % 1. First item. - \setlength{\leftmargin}{2.5em}% - \Dsetlistrightmargin% - \setlength{\itemsep}{\Ditemsep}% - % Use counter recommended by Python module. - \usecounter{#4}% - % Set start value. - \addtocounter{#4}{#5}% - }{% - % The list contents. - #6% - }% -} - - -% Single quote in literal mode. \textquotesingle from package -% textcomp has wrong width when using package ae, so we use a normal -% single curly quote here. -\providecommand{\DECtextliteralsinglequote}{'} - - -% "Tabular lists" are field lists and options lists (not definition -% lists because there the term always appears on its own line). We'll -% use the terminology of field lists now ("field", "field name", -% "field body"), but the same is also analogously applicable to option -% lists. -% -% We want these lists to be breakable across pages. We cannot -% automatically get the narrowest possible size for the left column -% (i.e. the field names or option groups) because tabularx does not -% support multi-page tables, ltxtable needs to have the table in an -% external file and we don't want to clutter the user's directories -% with auxiliary files created by the filecontents environment, and -% ltablex is not included in teTeX. -% -% Thus we set a fixed length for the left column and use list -% environments. This also has the nice side effect that breaking is -% now possible anywhere, not just between fields. -% -% Note that we are creating a distinct list environment for each -% field. There is no macro for a whole tabular list! -\Dprovidelength{\Dtabularlistfieldnamewidth}{6em} -\Dprovidelength{\Dtabularlistfieldnamesep}{0.5em} -\providecommand{\Dinsidetabular}{false} -\providecommand{\Dsavefieldname}{} -\providecommand{\Dsavefieldbody}{} -\Dprovidelength{\Dusedfieldnamewidth}{0pt} -\Dprovidelength{\Drealfieldnamewidth}{0pt} -\providecommand{\Dtabularlistfieldname}[1]{\renewcommand{\Dsavefieldname}{#1}} -\providecommand{\Dtabularlistfieldbody}[1]{\renewcommand{\Dsavefieldbody}{#1}} -\Dprovidelength{\Dparskiptemp}{0pt} -\providecommand{\Dtabularlistfield}[1]{% - {% - % This only saves field name and field body in \Dsavefieldname and - % \Dsavefieldbody, resp. It does not insert any text into the - % document. - #1% - % Recalculate the real field name width everytime we encounter a - % tabular list field because it may have been changed using a - % "raw" node. - \setlength{\Drealfieldnamewidth}{\Dtabularlistfieldnamewidth}% - \addtolength{\Drealfieldnamewidth}{\Dtabularlistfieldnamesep}% - \Dmakelistenvironment{% - \makebox[\Drealfieldnamewidth][l]{\Dsavefieldname}% - }{% - \setlength{\labelwidth}{\Drealfieldnamewidth}% - \setlength{\leftmargin}{\Drealfieldnamewidth}% - \setlength{\rightmargin}{0pt}% - \setlength{\labelsep}{0pt}% - }{% - \item% - \settowidth{\Dusedfieldnamewidth}{\Dsavefieldname}% - \setlength{\Dparskiptemp}{\parskip}% - \ifthenelse{% - \lengthtest{\Dusedfieldnamewidth>\Dtabularlistfieldnamewidth}% - }{% - \mbox{}\par% - \setlength{\parskip}{0pt}% - }{}% - \Dsavefieldbody% - \setlength{\parskip}{\Dparskiptemp}% - %XXX Why did we need this? - %\@finalstrut\@arstrutbox% - }% - \par% - }% -} - -\providecommand{\Dformatfieldname}[1]{\textbf{#1:}} -\providecommand{\DNfieldlist}[1]{#1} -\providecommand{\DNfield}[1]{\Dtabularlistfield{#1}} -\providecommand{\DNfieldname}[1]{% - \Dtabularlistfieldname{% - \Dformatfieldname{#1}% - }% -} -\providecommand{\DNfieldbody}[1]{\Dtabularlistfieldbody{#1}} - -\providecommand{\Dformatoptiongroup}[1]{% - % Format option group, e.g. "-f file, --input file". - \texttt{#1}% -} -\providecommand{\Dformatoption}[1]{% - % Format option, e.g. "-f file". - % Put into mbox to avoid line-breaking at spaces. - \mbox{#1}% -} -\providecommand{\Dformatoptionstring}[1]{% - % Format option string, e.g. "-f". - #1% -} -\providecommand{\Dformatoptionargument}[1]{% - % Format option argument, e.g. "file". - \textsl{#1}% -} -\providecommand{\Dformatoptiondescription}[1]{% - % Format option description, e.g. - % "\DNparagraph{Read input data from file.}" - #1% -} -\providecommand{\DNoptionlist}[1]{#1} -\providecommand{\Doptiongroupjoiner}{,{ }} -\providecommand{\Disfirstoption}{% - % Auxiliary macro indicating if a given option is the first child - % of its option group (if it's not, it has to preceded by - % \Doptiongroupjoiner). - false% -} -\providecommand{\DNoptionlistitem}[1]{% - \Dtabularlistfield{#1}% -} -\providecommand{\DNoptiongroup}[1]{% - \renewcommand{\Disfirstoption}{true}% - \Dtabularlistfieldname{\Dformatoptiongroup{#1}}% -} -\providecommand{\DNoption}[1]{% - % If this is not the first option in this option group, add a - % joiner. - \ifthenelse{\equal{\Disfirstoption}{true}}{% - \renewcommand{\Disfirstoption}{false}% - }{% - \Doptiongroupjoiner% - }% - \Dformatoption{#1}% -} -\providecommand{\DNoptionstring}[1]{\Dformatoptionstring{#1}} -\providecommand{\DNoptionargument}[1]{{ }\Dformatoptionargument{#1}} -\providecommand{\DNdescription}[1]{% - \Dtabularlistfieldbody{\Dformatoptiondescription{#1}}% -} - -\providecommand{\DNdefinitionlist}[1]{% - \begin{description}% - \parskip0pt% - #1% - \end{description}% -} -\providecommand{\DNdefinitionlistitem}[1]{% - % LaTeX expects the label in square brackets; we provide an empty - % label. - \item[]#1% -} -\providecommand{\Dformatterm}[1]{#1} -\providecommand{\DNterm}[1]{\hspace{-5pt}\Dformatterm{#1}} -% I'm still not sure what's the best rendering for classifiers. The -% colon syntax is used by reStructuredText, so it's at least WYSIWYG. -% Use slanted text because italic would cause too much emphasis. -\providecommand{\Dformatclassifier}[1]{\textsl{#1}} -\providecommand{\DNclassifier}[1]{~:~\Dformatclassifier{#1}} -\providecommand{\Dformatdefinition}[1]{#1} -\providecommand{\DNdefinition}[1]{\par\Dformatdefinition{#1}} - -\providecommand{\Dlineblockindentation}{2.5em} -\providecommand{\DNlineblock}[1]{% - \Dmakelistenvironment{}{% - \ifthenelse{\equal{\DEVparent}{lineblock}}{% - % Parent is a line block, so indent. - \setlength{\leftmargin}{\Dlineblockindentation}% - }{% - % At top level; don't indent. - \setlength{\leftmargin}{0pt}% - }% - \setlength{\rightmargin}{0pt}% - \setlength{\parsep}{0pt}% - }{% - #1% - }% -} -\providecommand{\DNline}[1]{\item#1} - -\providecommand{\DNtransition}{% - \raisebox{0.25em}{\parbox{\linewidth}{\hspace*{\fill}\hrulefill\hrulefill\hspace*{\fill}}}% -} - -\providecommand{\Dformatblockquote}[1]{% - % Format contents of block quote. - % This occurs in block-level context, so we cannot use \textsl. - {\slshape#1}% -} -\providecommand{\Dformatattribution}[1]{---\textup{#1}} -\providecommand{\DNblockquote}[1]{% - \Dmakebox{% - \Dformatblockquote{#1} - }% -} -\providecommand{\DNattribution}[1]{% - \par% - \begin{flushright}\Dformatattribution{#1}\end{flushright}% -} - - -% Sidebars: -% Vertical and horizontal margins. -\Dprovidelength{\Dsidebarvmargin}{0.5em} -\Dprovidelength{\Dsidebarhmargin}{1em} -% Padding (space between contents and frame). -\Dprovidelength{\Dsidebarpadding}{1em} -% Frame width. -\Dprovidelength{\Dsidebarframewidth}{2\fboxrule} -% Position ("l" or "r"). -\providecommand{\Dsidebarposition}{r} -% Width. -\Dprovidelength{\Dsidebarwidth}{0.45\linewidth} -\providecommand{\DNsidebar}[1]{ - \parpic[\Dsidebarposition]{% - \begin{minipage}[t]{\Dsidebarwidth}% - % Doing this with nested minipages is ugly, but I haven't found - % another way to place vertical space before and after the fbox. - \vspace{\Dsidebarvmargin}% - {% - \setlength{\fboxrule}{\Dsidebarframewidth}% - \setlength{\fboxsep}{\Dsidebarpadding}% - \fbox{% - \begin{minipage}[t]{\linewidth}% - \setlength{\parindent}{\Dboxparindent}% - #1% - \end{minipage}% - }% - }% - \vspace{\Dsidebarvmargin}% - \end{minipage}% - }% -} - - -% Citations and footnotes. -\providecommand{\Dformatfootnote}[1]{% - % Format footnote. - {% - \footnotesize#1% - % \par is necessary for LaTeX to adjust baselineskip to the - % changed font size. - \par% - }% -} -\providecommand{\Dformatcitation}[1]{\Dformatfootnote{#1}} -\Dprovidelength{\Doriginalbaselineskip}{0pt} -\providecommand{\DNfootnotereference}[1]{% - {% - % \baselineskip is 0pt in \textsuperscript, so we save it here. - \setlength{\Doriginalbaselineskip}{\baselineskip}% - \textsuperscript{#1}% - }% -} -\providecommand{\DNcitationreference}[1]{{[}#1{]}} -\Dprovidelength{\Dfootnotesep}{3.5pt} -\providecommand{\Dsetfootnotespacing}{% - % Spacing commands executed at the beginning of footnotes. - \setlength{\parindent}{0pt}% - \hspace{1em}% -} -\providecommand{\DNfootnote}[1]{% - % See ltfloat.dtx for details. - {% - \insert\footins{% - % BUG: This is too small if the user adds - % \onehalfspacing or \doublespace. - \vspace{\Dfootnotesep}% - \Dsetfootnotespacing% - \Dformatfootnote{#1}% - }% - }% -} -\providecommand{\DNcitation}[1]{\DNfootnote{#1}} -\providecommand{\Dformatfootnotelabel}[1]{% - % Keep \footnotesize in footnote labels (\textsuperscript would - % reduce the font size even more). - \textsuperscript{\footnotesize#1{ }}% -} -\providecommand{\Dformatcitationlabel}[1]{{[}#1{]}{ }} -\providecommand{\Dformatmultiplebackrefs}[1]{% - % If in printing mode, do not write out multiple backrefs. - \ifthenelse{\equal{\Dprinting}{true}}{}{\textsl{#1}}% -} -\providecommand{\Dthislabel}{} -\providecommand{\DNlabel}[1]{% - % Footnote or citatation label. - \renewcommand{\Dthislabel}{#1}% - \ifthenelse{\not\equal{\DEVsinglebackref}{}}{% - \let\Doriginallabel=\Dthislabel% - \def\Dthislabel{% - \Dsinglefootnotebacklink{\DEVsinglebackref}{\Doriginallabel}% - }% - }{}% - \ifthenelse{\equal{\DEVparent}{footnote}}{% - % Footnote label. - \Dformatfootnotelabel{\Dthislabel}% - }{% - \ifthenelse{\equal{\DEVparent}{citation}}{% - % Citation label. - \Dformatcitationlabel{\Dthislabel}% - }{}% - }% - % If there are multiple backrefs, add them now. - \Dformatmultiplebackrefs{\DEVmultiplebackrefs}% -} -\providecommand{\Dsinglefootnotebacklink}[2]{% - % Create normal backlink of a footnote label. Parameters: - % 1. ID. - % 2. Link text. - % Treat like a footnote reference. - \Dimplicitfootnotereference{\##1}{#2}% -} -\providecommand{\DECmultifootnotebacklink}[2]{% - % Create generated backlink, as in (1, 2). Parameters: - % 1. ID. - % 2. Link text. - % Treat like a footnote reference. - \Dimplicitfootnotereference{\##1}{#2}% -} -\providecommand{\Dsinglecitationbacklink}[2]{\Dsinglefootnotebacklink{#1}{#2}} -\providecommand{\DECmulticitationbacklink}[2]{\DECmultifootnotebacklink{#1}{#2}} - - -\providecommand{\DECmaketable}[2]{% - % Make table. Parameters: - % 1. Table spec (like "|p|p|"). - % 2. Table contents. - {% - \ifthenelse{\equal{\Dinsidetabular}{true}}{% - % Inside longtable; we cannot have nested longtables. - \begin{tabular}{#1}% - \hline% - #2% - \end{tabular}% - }{% - \renewcommand{\Dinsidetabular}{true}% - \begin{longtable}{#1}% - \hline% - #2% - \end{longtable}% - }% - }% -} -\providecommand{\DNthead}[1]{% - #1% - \endhead% -} -\providecommand{\DNrow}[1]{% - #1\tabularnewline% - \hline% -} -\providecommand{\Dinsidemulticolumn}{false} -\providecommand{\Dcompensatingmulticol}[3]{% - \multicolumn{#1}{#2}{% - {% - \renewcommand{\Dinsidemulticolumn}{true}% - % Compensate for weird missing vertical space at top of paragraph. - \raisebox{-2.5pt}{#3}% - }% - }% -} -\providecommand{\DECcolspan}[2]{% - % Take care of the morecols attribute (but incremented by 1). - &% - \Dcompensatingmulticol{#1}{l|}{#2}% -} -\providecommand{\DECcolspanleft}[2]{% - % Like \Dmorecols, but called for the leftmost entries in a table - % row. - \Dcompensatingmulticol{#1}{|l|}{#2}% -} -\providecommand{\DECsubsequententry}[1]{% - % -} -\providecommand{\DNentry}[1]{% - % The following sequence adds minimal vertical space above the top - % lines of the first cell paragraph, so that vertical space is - % balanced at the top and bottom of table cells. - \ifthenelse{\equal{\Dinsidemulticolumn}{false}}{% - \vspace{-1em}\vspace{-\parskip}\par% - }{}% - #1% - % No need to add an ampersand ("&"); that's done by \DECsubsequententry. -} -\providecommand{\DAtableheaderentry}[5]{\Dformattableheaderentry{#5}} -\providecommand{\Dformattableheaderentry}[1]{{\bfseries#1}} - - -\providecommand{\DNsystemmessage}[1]{% - {% - \ifthenelse{\equal{\Dprinting}{false}}{\color{red}}{}% - \bfseries% - #1% - }% -} - - -\providecommand{\Dinsidehalign}{false} -\newsavebox{\Dalignedimagebox} -\Dprovidelength{\Dalignedimagewidth}{0pt} -\providecommand{\Dhalign}[2]{% - % Horizontally align the contents to the left or right so that the - % text flows around it. - % Parameters: - % 1. l or r - % 2. Contents. - \renewcommand{\Dinsidehalign}{true}% - % For some obscure reason \parpic consumes some vertical space. - \vspace{-3pt}% - % Now we do something *really* ugly, but this enables us to wrap the - % image in a minipage while still allowing tight frames when - % class=border (see \DNimageCborder). - \sbox{\Dalignedimagebox}{#2}% - \settowidth{\Dalignedimagewidth}{\usebox{\Dalignedimagebox}}% - \parpic[#1]{% - \begin{minipage}[b]{\Dalignedimagewidth}% - % Compensate for previously added space, but not entirely. - \vspace*{2.0pt}% - \vspace*{\Dfloatimagetopmargin}% - \usebox{\Dalignedimagebox}% - \vspace*{1.5pt}% - \vspace*{\Dfloatimagebottommargin}% - \end{minipage}% - }% - \renewcommand{\Dinsidehalign}{false}% -} - - -% Maximum width of an image. -\providecommand{\Dimagemaxwidth}{\linewidth} -\providecommand{\Dfloatimagemaxwidth}{0.5\linewidth} -% Auxiliary variable. -\Dprovidelength{\Dcurrentimagewidth}{0pt} -\providecommand{\DNimageAalign}[5]{% - \ifthenelse{\equal{#3}{left}}{% - \Dhalign{l}{#5}% - }{% - \ifthenelse{\equal{#3}{right}}{% - \Dhalign{r}{#5}% - }{% - \ifthenelse{\equal{#3}{center}}{% - % Text floating around centered figures is a bad idea. Thus - % we use a center environment. Note that no extra space is - % added by the writer, so the space added by the center - % environment is fine. - \begin{center}#5\end{center}% - }{% - #5% - }% - }% - }% -} -% Base path for images. -\providecommand{\Dimagebase}{} -% Auxiliary command. Current image path. -\providecommand{\Dimagepath}{} -\providecommand{\DNimageAuri}[5]{% - % Insert image. We treat the URI like a path here. - \renewcommand{\Dimagepath}{\Dimagebase#3}% - \Difdefined{DcurrentNimageAwidth}{% - \Dwidthimage{\DEVcurrentNimageAwidth}{\Dimagepath}% - }{% - \Dsimpleimage{\Dimagepath}% - }% -} -\Dprovidelength{\Dfloatimagevmargin}{0pt} -\providecommand{\Dfloatimagetopmargin}{\Dfloatimagevmargin} -\providecommand{\Dfloatimagebottommargin}{\Dfloatimagevmargin} -\providecommand{\Dwidthimage}[2]{% - % Image with specified width. - % Parameters: - % 1. Image width. - % 2. Image path. - % Need to make bottom-alignment dependent on align attribute (add - % functional test first). Need to observe height attribute. - %\begin{minipage}[b]{#1}% - \includegraphics[width=#1,height=\textheight,keepaspectratio]{#2}% - %\end{minipage}% -} -\providecommand{\Dcurrentimagemaxwidth}{} -\providecommand{\Dsimpleimage}[1]{% - % Insert image, without much parametrization. - \settowidth{\Dcurrentimagewidth}{\includegraphics{#1}}% - \ifthenelse{\equal{\Dinsidehalign}{true}}{% - \renewcommand{\Dcurrentimagemaxwidth}{\Dfloatimagemaxwidth}% - }{% - \renewcommand{\Dcurrentimagemaxwidth}{\Dimagemaxwidth}% - }% - \ifthenelse{\lengthtest{\Dcurrentimagewidth>\Dcurrentimagemaxwidth}}{% - \Dwidthimage{\Dcurrentimagemaxwidth}{#1}% - }{% - \Dwidthimage{\Dcurrentimagewidth}{#1}% - }% -} -\providecommand{\Dwidthimage}[2]{% - % Image with specified width. - % Parameters: - % 1. Image width. - % 2. Image path. - \Dwidthimage{#1}{#2}% -} - -% Figures. -\providecommand{\DNfigureAalign}[5]{% - % Hack to make it work Right Now. - %\def\DEVcurrentNimageAwidth{\DEVcurrentNfigureAwidth}% - % - %\def\DEVcurrentNimageAwidth{\linewidth}% - \DNimageAalign{#1}{#2}{#3}{#4}{% - \begin{minipage}[b]{0.4\linewidth}#5\end{minipage}}% - %\let\DEVcurrentNimageAwidth=\relax% - % - %\let\DEVcurrentNimageAwidth=\relax% -} -\providecommand{\DNcaption}[1]{\par\noindent{\slshape#1}} -\providecommand{\DNlegend}[1]{\DECauxiliaryspace#1} - -\providecommand{\DCborder}[1]{\fbox{#1}} -% No padding between image and border. -\providecommand{\DNimageCborder}[1]{\frame{#1}} - - -% Need to replace with language-specific stuff. Maybe look at -% csquotes.sty and ask the author for permission to use parts of it. -\providecommand{\DECtextleftdblquote}{``} -\providecommand{\DECtextrightdblquote}{''} - -% Table of contents: -\Dprovidelength{\Dtocininitialsectnumwidth}{2.4em} -\Dprovidelength{\Dtocadditionalsectnumwidth}{0.7em} -% Level inside a table of contents. While this is at -1, we are not -% inside a TOC. -\Dprovidecounter{Dtoclevel}{-1}% -\providecommand{\Dlocaltoc}{false}% -\providecommand{\DNtopicClocal}[1]{% - \renewcommand{\Dlocaltoc}{true}% - \addtolength{\Dtocsectnumwidth}{2\Dtocadditionalsectnumwidth}% - \addtolength{\Dtocindent}{-2\Dtocadditionalsectnumwidth}% - #1% - \addtolength{\Dtocindent}{2\Dtocadditionalsectnumwidth}% - \addtolength{\Dtocsectnumwidth}{-2\Dtocadditionalsectnumwidth}% - \renewcommand{\Dlocaltoc}{false}% -} -\Dprovidelength{\Dtocindent}{0pt}% -\Dprovidelength{\Dtocsectnumwidth}{\Dtocininitialsectnumwidth} -% Compensate for one additional TOC indentation space so that the -% top-level is unindented. -\addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth} -\addtolength{\Dtocindent}{-\Dtocsectnumwidth} -\providecommand{\Difinsidetoc}[2]{% - \ifthenelse{\not\equal{\theDtoclevel}{-1}}{#1}{#2}% -} -\providecommand{\DNgeneratedCsectnum}[1]{% - \Difinsidetoc{% - % Section number inside TOC. - \makebox[\Dtocsectnumwidth][l]{#1}% - }{% - % Section number inside section title. - #1\quad% - }% -} -\providecommand{\Dtocbulletlist}[1]{% - \addtocounter{Dtoclevel}{1}% - \addtolength{\Dtocindent}{\Dtocsectnumwidth}% - \addtolength{\Dtocsectnumwidth}{\Dtocadditionalsectnumwidth}% - #1% - \addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth}% - \addtolength{\Dtocindent}{-\Dtocsectnumwidth}% - \addtocounter{Dtoclevel}{-1}% -} - - -% For \DECpixelunit, the length value is pre-multiplied with 0.75, so by -% specifying "pt" we get the same notion of "pixel" as graphicx. -\providecommand{\DECpixelunit}{pt} -% Normally lengths are relative to the current linewidth. -\providecommand{\DECrelativeunit}{\linewidth} - - -% ACTION: These commands actually *do* something. -% Ultimately, everything should be done here, and no active content should be -% above (not even \usepackage). - -\DSearly -\DSpackages -\DSfrenchspacing -\DSsymbols -\DSlate - -\makeatother diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/unicode_map.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/unicode_map.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2369 +0,0 @@ -# $Id$ -# Author: Lea Wiemann -# Copyright: This file has been placed in the public domain. - -# This is a mapping of Unicode characters to LaTeX equivalents. -# The information has been extracted from -# , written by -# David Carlisle and Sebastian Rahtz. -# -# The extraction has been done by the "create_unimap.py" script -# located at . - -unicode_map = {u'\xa0': '$~$', -u'\xa1': '{\\textexclamdown}', -u'\xa2': '{\\textcent}', -u'\xa3': '{\\textsterling}', -u'\xa4': '{\\textcurrency}', -u'\xa5': '{\\textyen}', -u'\xa6': '{\\textbrokenbar}', -u'\xa7': '{\\textsection}', -u'\xa8': '{\\textasciidieresis}', -u'\xa9': '{\\textcopyright}', -u'\xaa': '{\\textordfeminine}', -u'\xab': '{\\guillemotleft}', -u'\xac': '$\\lnot$', -u'\xad': '$\\-$', -u'\xae': '{\\textregistered}', -u'\xaf': '{\\textasciimacron}', -u'\xb0': '{\\textdegree}', -u'\xb1': '$\\pm$', -u'\xb2': '${^2}$', -u'\xb3': '${^3}$', -u'\xb4': '{\\textasciiacute}', -u'\xb5': '$\\mathrm{\\mu}$', -u'\xb6': '{\\textparagraph}', -u'\xb7': '$\\cdot$', -u'\xb8': '{\\c{}}', -u'\xb9': '${^1}$', -u'\xba': '{\\textordmasculine}', -u'\xbb': '{\\guillemotright}', -u'\xbc': '{\\textonequarter}', -u'\xbd': '{\\textonehalf}', -u'\xbe': '{\\textthreequarters}', -u'\xbf': '{\\textquestiondown}', -u'\xc0': '{\\`{A}}', -u'\xc1': "{\\'{A}}", -u'\xc2': '{\\^{A}}', -u'\xc3': '{\\~{A}}', -u'\xc4': '{\\"{A}}', -u'\xc5': '{\\AA}', -u'\xc6': '{\\AE}', -u'\xc7': '{\\c{C}}', -u'\xc8': '{\\`{E}}', -u'\xc9': "{\\'{E}}", -u'\xca': '{\\^{E}}', -u'\xcb': '{\\"{E}}', -u'\xcc': '{\\`{I}}', -u'\xcd': "{\\'{I}}", -u'\xce': '{\\^{I}}', -u'\xcf': '{\\"{I}}', -u'\xd0': '{\\DH}', -u'\xd1': '{\\~{N}}', -u'\xd2': '{\\`{O}}', -u'\xd3': "{\\'{O}}", -u'\xd4': '{\\^{O}}', -u'\xd5': '{\\~{O}}', -u'\xd6': '{\\"{O}}', -u'\xd7': '{\\texttimes}', -u'\xd8': '{\\O}', -u'\xd9': '{\\`{U}}', -u'\xda': "{\\'{U}}", -u'\xdb': '{\\^{U}}', -u'\xdc': '{\\"{U}}', -u'\xdd': "{\\'{Y}}", -u'\xde': '{\\TH}', -u'\xdf': '{\\ss}', -u'\xe0': '{\\`{a}}', -u'\xe1': "{\\'{a}}", -u'\xe2': '{\\^{a}}', -u'\xe3': '{\\~{a}}', -u'\xe4': '{\\"{a}}', -u'\xe5': '{\\aa}', -u'\xe6': '{\\ae}', -u'\xe7': '{\\c{c}}', -u'\xe8': '{\\`{e}}', -u'\xe9': "{\\'{e}}", -u'\xea': '{\\^{e}}', -u'\xeb': '{\\"{e}}', -u'\xec': '{\\`{\\i}}', -u'\xed': "{\\'{\\i}}", -u'\xee': '{\\^{\\i}}', -u'\xef': '{\\"{\\i}}', -u'\xf0': '{\\dh}', -u'\xf1': '{\\~{n}}', -u'\xf2': '{\\`{o}}', -u'\xf3': "{\\'{o}}", -u'\xf4': '{\\^{o}}', -u'\xf5': '{\\~{o}}', -u'\xf6': '{\\"{o}}', -u'\xf7': '$\\div$', -u'\xf8': '{\\o}', -u'\xf9': '{\\`{u}}', -u'\xfa': "{\\'{u}}", -u'\xfb': '{\\^{u}}', -u'\xfc': '{\\"{u}}', -u'\xfd': "{\\'{y}}", -u'\xfe': '{\\th}', -u'\xff': '{\\"{y}}', -u'\u0100': '{\\={A}}', -u'\u0101': '{\\={a}}', -u'\u0102': '{\\u{A}}', -u'\u0103': '{\\u{a}}', -u'\u0104': '{\\k{A}}', -u'\u0105': '{\\k{a}}', -u'\u0106': "{\\'{C}}", -u'\u0107': "{\\'{c}}", -u'\u0108': '{\\^{C}}', -u'\u0109': '{\\^{c}}', -u'\u010a': '{\\.{C}}', -u'\u010b': '{\\.{c}}', -u'\u010c': '{\\v{C}}', -u'\u010d': '{\\v{c}}', -u'\u010e': '{\\v{D}}', -u'\u010f': '{\\v{d}}', -u'\u0110': '{\\DJ}', -u'\u0111': '{\\dj}', -u'\u0112': '{\\={E}}', -u'\u0113': '{\\={e}}', -u'\u0114': '{\\u{E}}', -u'\u0115': '{\\u{e}}', -u'\u0116': '{\\.{E}}', -u'\u0117': '{\\.{e}}', -u'\u0118': '{\\k{E}}', -u'\u0119': '{\\k{e}}', -u'\u011a': '{\\v{E}}', -u'\u011b': '{\\v{e}}', -u'\u011c': '{\\^{G}}', -u'\u011d': '{\\^{g}}', -u'\u011e': '{\\u{G}}', -u'\u011f': '{\\u{g}}', -u'\u0120': '{\\.{G}}', -u'\u0121': '{\\.{g}}', -u'\u0122': '{\\c{G}}', -u'\u0123': '{\\c{g}}', -u'\u0124': '{\\^{H}}', -u'\u0125': '{\\^{h}}', -u'\u0126': '{{\\fontencoding{LELA}\\selectfont\\char40}}', -u'\u0127': '$\\Elzxh$', -u'\u0128': '{\\~{I}}', -u'\u0129': '{\\~{\\i}}', -u'\u012a': '{\\={I}}', -u'\u012b': '{\\={\\i}}', -u'\u012c': '{\\u{I}}', -u'\u012d': '{\\u{\\i}}', -u'\u012e': '{\\k{I}}', -u'\u012f': '{\\k{i}}', -u'\u0130': '{\\.{I}}', -u'\u0131': '{\\i}', -u'\u0132': '{IJ}', -u'\u0133': '{ij}', -u'\u0134': '{\\^{J}}', -u'\u0135': '{\\^{\\j}}', -u'\u0136': '{\\c{K}}', -u'\u0137': '{\\c{k}}', -u'\u0138': '{{\\fontencoding{LELA}\\selectfont\\char91}}', -u'\u0139': "{\\'{L}}", -u'\u013a': "{\\'{l}}", -u'\u013b': '{\\c{L}}', -u'\u013c': '{\\c{l}}', -u'\u013d': '{\\v{L}}', -u'\u013e': '{\\v{l}}', -u'\u013f': '{{\\fontencoding{LELA}\\selectfont\\char201}}', -u'\u0140': '{{\\fontencoding{LELA}\\selectfont\\char202}}', -u'\u0141': '{\\L}', -u'\u0142': '{\\l}', -u'\u0143': "{\\'{N}}", -u'\u0144': "{\\'{n}}", -u'\u0145': '{\\c{N}}', -u'\u0146': '{\\c{n}}', -u'\u0147': '{\\v{N}}', -u'\u0148': '{\\v{n}}', -u'\u0149': "{'n}", -u'\u014a': '{\\NG}', -u'\u014b': '{\\ng}', -u'\u014c': '{\\={O}}', -u'\u014d': '{\\={o}}', -u'\u014e': '{\\u{O}}', -u'\u014f': '{\\u{o}}', -u'\u0150': '{\\H{O}}', -u'\u0151': '{\\H{o}}', -u'\u0152': '{\\OE}', -u'\u0153': '{\\oe}', -u'\u0154': "{\\'{R}}", -u'\u0155': "{\\'{r}}", -u'\u0156': '{\\c{R}}', -u'\u0157': '{\\c{r}}', -u'\u0158': '{\\v{R}}', -u'\u0159': '{\\v{r}}', -u'\u015a': "{\\'{S}}", -u'\u015b': "{\\'{s}}", -u'\u015c': '{\\^{S}}', -u'\u015d': '{\\^{s}}', -u'\u015e': '{\\c{S}}', -u'\u015f': '{\\c{s}}', -u'\u0160': '{\\v{S}}', -u'\u0161': '{\\v{s}}', -u'\u0162': '{\\c{T}}', -u'\u0163': '{\\c{t}}', -u'\u0164': '{\\v{T}}', -u'\u0165': '{\\v{t}}', -u'\u0166': '{{\\fontencoding{LELA}\\selectfont\\char47}}', -u'\u0167': '{{\\fontencoding{LELA}\\selectfont\\char63}}', -u'\u0168': '{\\~{U}}', -u'\u0169': '{\\~{u}}', -u'\u016a': '{\\={U}}', -u'\u016b': '{\\={u}}', -u'\u016c': '{\\u{U}}', -u'\u016d': '{\\u{u}}', -u'\u016e': '{\\r{U}}', -u'\u016f': '{\\r{u}}', -u'\u0170': '{\\H{U}}', -u'\u0171': '{\\H{u}}', -u'\u0172': '{\\k{U}}', -u'\u0173': '{\\k{u}}', -u'\u0174': '{\\^{W}}', -u'\u0175': '{\\^{w}}', -u'\u0176': '{\\^{Y}}', -u'\u0177': '{\\^{y}}', -u'\u0178': '{\\"{Y}}', -u'\u0179': "{\\'{Z}}", -u'\u017a': "{\\'{z}}", -u'\u017b': '{\\.{Z}}', -u'\u017c': '{\\.{z}}', -u'\u017d': '{\\v{Z}}', -u'\u017e': '{\\v{z}}', -u'\u0192': '$f$', -u'\u0195': '{\\texthvlig}', -u'\u019e': '{\\textnrleg}', -u'\u01aa': '$\\eth$', -u'\u01ba': '{{\\fontencoding{LELA}\\selectfont\\char195}}', -u'\u01c2': '{\\textdoublepipe}', -u'\u01f5': "{\\'{g}}", -u'\u0250': '$\\Elztrna$', -u'\u0252': '$\\Elztrnsa$', -u'\u0254': '$\\Elzopeno$', -u'\u0256': '$\\Elzrtld$', -u'\u0258': '{{\\fontencoding{LEIP}\\selectfont\\char61}}', -u'\u0259': '$\\Elzschwa$', -u'\u025b': '$\\varepsilon$', -u'\u0261': '{g}', -u'\u0263': '$\\Elzpgamma$', -u'\u0264': '$\\Elzpbgam$', -u'\u0265': '$\\Elztrnh$', -u'\u026c': '$\\Elzbtdl$', -u'\u026d': '$\\Elzrtll$', -u'\u026f': '$\\Elztrnm$', -u'\u0270': '$\\Elztrnmlr$', -u'\u0271': '$\\Elzltlmr$', -u'\u0272': '{\\Elzltln}', -u'\u0273': '$\\Elzrtln$', -u'\u0277': '$\\Elzclomeg$', -u'\u0278': '{\\textphi}', -u'\u0279': '$\\Elztrnr$', -u'\u027a': '$\\Elztrnrl$', -u'\u027b': '$\\Elzrttrnr$', -u'\u027c': '$\\Elzrl$', -u'\u027d': '$\\Elzrtlr$', -u'\u027e': '$\\Elzfhr$', -u'\u027f': '{{\\fontencoding{LEIP}\\selectfont\\char202}}', -u'\u0282': '$\\Elzrtls$', -u'\u0283': '$\\Elzesh$', -u'\u0287': '$\\Elztrnt$', -u'\u0288': '$\\Elzrtlt$', -u'\u028a': '$\\Elzpupsil$', -u'\u028b': '$\\Elzpscrv$', -u'\u028c': '$\\Elzinvv$', -u'\u028d': '$\\Elzinvw$', -u'\u028e': '$\\Elztrny$', -u'\u0290': '$\\Elzrtlz$', -u'\u0292': '$\\Elzyogh$', -u'\u0294': '$\\Elzglst$', -u'\u0295': '$\\Elzreglst$', -u'\u0296': '$\\Elzinglst$', -u'\u029e': '{\\textturnk}', -u'\u02a4': '$\\Elzdyogh$', -u'\u02a7': '$\\Elztesh$', -u'\u02bc': "{'}", -u'\u02c7': '{\\textasciicaron}', -u'\u02c8': '$\\Elzverts$', -u'\u02cc': '$\\Elzverti$', -u'\u02d0': '$\\Elzlmrk$', -u'\u02d1': '$\\Elzhlmrk$', -u'\u02d2': '$\\Elzsbrhr$', -u'\u02d3': '$\\Elzsblhr$', -u'\u02d4': '$\\Elzrais$', -u'\u02d5': '$\\Elzlow$', -u'\u02d8': '{\\textasciibreve}', -u'\u02d9': '{\\textperiodcentered}', -u'\u02da': '{\\r{}}', -u'\u02db': '{\\k{}}', -u'\u02dc': '{\\texttildelow}', -u'\u02dd': '{\\H{}}', -u'\u02e5': '{\\tone{55}}', -u'\u02e6': '{\\tone{44}}', -u'\u02e7': '{\\tone{33}}', -u'\u02e8': '{\\tone{22}}', -u'\u02e9': '{\\tone{11}}', -u'\u0300': '{\\`}', -u'\u0301': "{\\'}", -u'\u0302': '{\\^}', -u'\u0303': '{\\~}', -u'\u0304': '{\\=}', -u'\u0306': '{\\u}', -u'\u0307': '{\\.}', -u'\u0308': '{\\"}', -u'\u030a': '{\\r}', -u'\u030b': '{\\H}', -u'\u030c': '{\\v}', -u'\u030f': '{\\cyrchar\\C}', -u'\u0311': '{{\\fontencoding{LECO}\\selectfont\\char177}}', -u'\u0318': '{{\\fontencoding{LECO}\\selectfont\\char184}}', -u'\u0319': '{{\\fontencoding{LECO}\\selectfont\\char185}}', -u'\u0321': '$\\Elzpalh$', -u'\u0322': '{\\Elzrh}', -u'\u0327': '{\\c}', -u'\u0328': '{\\k}', -u'\u032a': '$\\Elzsbbrg$', -u'\u032b': '{{\\fontencoding{LECO}\\selectfont\\char203}}', -u'\u032f': '{{\\fontencoding{LECO}\\selectfont\\char207}}', -u'\u0335': '{\\Elzxl}', -u'\u0336': '{\\Elzbar}', -u'\u0337': '{{\\fontencoding{LECO}\\selectfont\\char215}}', -u'\u0338': '{{\\fontencoding{LECO}\\selectfont\\char216}}', -u'\u033a': '{{\\fontencoding{LECO}\\selectfont\\char218}}', -u'\u033b': '{{\\fontencoding{LECO}\\selectfont\\char219}}', -u'\u033c': '{{\\fontencoding{LECO}\\selectfont\\char220}}', -u'\u033d': '{{\\fontencoding{LECO}\\selectfont\\char221}}', -u'\u0361': '{{\\fontencoding{LECO}\\selectfont\\char225}}', -u'\u0386': "{\\'{A}}", -u'\u0388': "{\\'{E}}", -u'\u0389': "{\\'{H}}", -u'\u038a': "{\\'{}{I}}", -u'\u038c': "{\\'{}O}", -u'\u038e': "$\\mathrm{'Y}$", -u'\u038f': "$\\mathrm{'\\Omega}$", -u'\u0390': '$\\acute{\\ddot{\\iota}}$', -u'\u0391': '$\\Alpha$', -u'\u0392': '$\\Beta$', -u'\u0393': '$\\Gamma$', -u'\u0394': '$\\Delta$', -u'\u0395': '$\\Epsilon$', -u'\u0396': '$\\Zeta$', -u'\u0397': '$\\Eta$', -u'\u0398': '$\\Theta$', -u'\u0399': '$\\Iota$', -u'\u039a': '$\\Kappa$', -u'\u039b': '$\\Lambda$', -u'\u039c': '$M$', -u'\u039d': '$N$', -u'\u039e': '$\\Xi$', -u'\u039f': '$O$', -u'\u03a0': '$\\Pi$', -u'\u03a1': '$\\Rho$', -u'\u03a3': '$\\Sigma$', -u'\u03a4': '$\\Tau$', -u'\u03a5': '$\\Upsilon$', -u'\u03a6': '$\\Phi$', -u'\u03a7': '$\\Chi$', -u'\u03a8': '$\\Psi$', -u'\u03a9': '$\\Omega$', -u'\u03aa': '$\\mathrm{\\ddot{I}}$', -u'\u03ab': '$\\mathrm{\\ddot{Y}}$', -u'\u03ac': "{\\'{$\\alpha$}}", -u'\u03ad': '$\\acute{\\epsilon}$', -u'\u03ae': '$\\acute{\\eta}$', -u'\u03af': '$\\acute{\\iota}$', -u'\u03b0': '$\\acute{\\ddot{\\upsilon}}$', -u'\u03b1': '$\\alpha$', -u'\u03b2': '$\\beta$', -u'\u03b3': '$\\gamma$', -u'\u03b4': '$\\delta$', -u'\u03b5': '$\\epsilon$', -u'\u03b6': '$\\zeta$', -u'\u03b7': '$\\eta$', -u'\u03b8': '{\\texttheta}', -u'\u03b9': '$\\iota$', -u'\u03ba': '$\\kappa$', -u'\u03bb': '$\\lambda$', -u'\u03bc': '$\\mu$', -u'\u03bd': '$\\nu$', -u'\u03be': '$\\xi$', -u'\u03bf': '$o$', -u'\u03c0': '$\\pi$', -u'\u03c1': '$\\rho$', -u'\u03c2': '$\\varsigma$', -u'\u03c3': '$\\sigma$', -u'\u03c4': '$\\tau$', -u'\u03c5': '$\\upsilon$', -u'\u03c6': '$\\varphi$', -u'\u03c7': '$\\chi$', -u'\u03c8': '$\\psi$', -u'\u03c9': '$\\omega$', -u'\u03ca': '$\\ddot{\\iota}$', -u'\u03cb': '$\\ddot{\\upsilon}$', -u'\u03cc': "{\\'{o}}", -u'\u03cd': '$\\acute{\\upsilon}$', -u'\u03ce': '$\\acute{\\omega}$', -u'\u03d0': '{\\Pisymbol{ppi022}{87}}', -u'\u03d1': '{\\textvartheta}', -u'\u03d2': '$\\Upsilon$', -u'\u03d5': '$\\phi$', -u'\u03d6': '$\\varpi$', -u'\u03da': '$\\Stigma$', -u'\u03dc': '$\\Digamma$', -u'\u03dd': '$\\digamma$', -u'\u03de': '$\\Koppa$', -u'\u03e0': '$\\Sampi$', -u'\u03f0': '$\\varkappa$', -u'\u03f1': '$\\varrho$', -u'\u03f4': '{\\textTheta}', -u'\u03f6': '$\\backepsilon$', -u'\u0401': '{\\cyrchar\\CYRYO}', -u'\u0402': '{\\cyrchar\\CYRDJE}', -u'\u0403': "{\\cyrchar{\\'\\CYRG}}", -u'\u0404': '{\\cyrchar\\CYRIE}', -u'\u0405': '{\\cyrchar\\CYRDZE}', -u'\u0406': '{\\cyrchar\\CYRII}', -u'\u0407': '{\\cyrchar\\CYRYI}', -u'\u0408': '{\\cyrchar\\CYRJE}', -u'\u0409': '{\\cyrchar\\CYRLJE}', -u'\u040a': '{\\cyrchar\\CYRNJE}', -u'\u040b': '{\\cyrchar\\CYRTSHE}', -u'\u040c': "{\\cyrchar{\\'\\CYRK}}", -u'\u040e': '{\\cyrchar\\CYRUSHRT}', -u'\u040f': '{\\cyrchar\\CYRDZHE}', -u'\u0410': '{\\cyrchar\\CYRA}', -u'\u0411': '{\\cyrchar\\CYRB}', -u'\u0412': '{\\cyrchar\\CYRV}', -u'\u0413': '{\\cyrchar\\CYRG}', -u'\u0414': '{\\cyrchar\\CYRD}', -u'\u0415': '{\\cyrchar\\CYRE}', -u'\u0416': '{\\cyrchar\\CYRZH}', -u'\u0417': '{\\cyrchar\\CYRZ}', -u'\u0418': '{\\cyrchar\\CYRI}', -u'\u0419': '{\\cyrchar\\CYRISHRT}', -u'\u041a': '{\\cyrchar\\CYRK}', -u'\u041b': '{\\cyrchar\\CYRL}', -u'\u041c': '{\\cyrchar\\CYRM}', -u'\u041d': '{\\cyrchar\\CYRN}', -u'\u041e': '{\\cyrchar\\CYRO}', -u'\u041f': '{\\cyrchar\\CYRP}', -u'\u0420': '{\\cyrchar\\CYRR}', -u'\u0421': '{\\cyrchar\\CYRS}', -u'\u0422': '{\\cyrchar\\CYRT}', -u'\u0423': '{\\cyrchar\\CYRU}', -u'\u0424': '{\\cyrchar\\CYRF}', -u'\u0425': '{\\cyrchar\\CYRH}', -u'\u0426': '{\\cyrchar\\CYRC}', -u'\u0427': '{\\cyrchar\\CYRCH}', -u'\u0428': '{\\cyrchar\\CYRSH}', -u'\u0429': '{\\cyrchar\\CYRSHCH}', -u'\u042a': '{\\cyrchar\\CYRHRDSN}', -u'\u042b': '{\\cyrchar\\CYRERY}', -u'\u042c': '{\\cyrchar\\CYRSFTSN}', -u'\u042d': '{\\cyrchar\\CYREREV}', -u'\u042e': '{\\cyrchar\\CYRYU}', -u'\u042f': '{\\cyrchar\\CYRYA}', -u'\u0430': '{\\cyrchar\\cyra}', -u'\u0431': '{\\cyrchar\\cyrb}', -u'\u0432': '{\\cyrchar\\cyrv}', -u'\u0433': '{\\cyrchar\\cyrg}', -u'\u0434': '{\\cyrchar\\cyrd}', -u'\u0435': '{\\cyrchar\\cyre}', -u'\u0436': '{\\cyrchar\\cyrzh}', -u'\u0437': '{\\cyrchar\\cyrz}', -u'\u0438': '{\\cyrchar\\cyri}', -u'\u0439': '{\\cyrchar\\cyrishrt}', -u'\u043a': '{\\cyrchar\\cyrk}', -u'\u043b': '{\\cyrchar\\cyrl}', -u'\u043c': '{\\cyrchar\\cyrm}', -u'\u043d': '{\\cyrchar\\cyrn}', -u'\u043e': '{\\cyrchar\\cyro}', -u'\u043f': '{\\cyrchar\\cyrp}', -u'\u0440': '{\\cyrchar\\cyrr}', -u'\u0441': '{\\cyrchar\\cyrs}', -u'\u0442': '{\\cyrchar\\cyrt}', -u'\u0443': '{\\cyrchar\\cyru}', -u'\u0444': '{\\cyrchar\\cyrf}', -u'\u0445': '{\\cyrchar\\cyrh}', -u'\u0446': '{\\cyrchar\\cyrc}', -u'\u0447': '{\\cyrchar\\cyrch}', -u'\u0448': '{\\cyrchar\\cyrsh}', -u'\u0449': '{\\cyrchar\\cyrshch}', -u'\u044a': '{\\cyrchar\\cyrhrdsn}', -u'\u044b': '{\\cyrchar\\cyrery}', -u'\u044c': '{\\cyrchar\\cyrsftsn}', -u'\u044d': '{\\cyrchar\\cyrerev}', -u'\u044e': '{\\cyrchar\\cyryu}', -u'\u044f': '{\\cyrchar\\cyrya}', -u'\u0451': '{\\cyrchar\\cyryo}', -u'\u0452': '{\\cyrchar\\cyrdje}', -u'\u0453': "{\\cyrchar{\\'\\cyrg}}", -u'\u0454': '{\\cyrchar\\cyrie}', -u'\u0455': '{\\cyrchar\\cyrdze}', -u'\u0456': '{\\cyrchar\\cyrii}', -u'\u0457': '{\\cyrchar\\cyryi}', -u'\u0458': '{\\cyrchar\\cyrje}', -u'\u0459': '{\\cyrchar\\cyrlje}', -u'\u045a': '{\\cyrchar\\cyrnje}', -u'\u045b': '{\\cyrchar\\cyrtshe}', -u'\u045c': "{\\cyrchar{\\'\\cyrk}}", -u'\u045e': '{\\cyrchar\\cyrushrt}', -u'\u045f': '{\\cyrchar\\cyrdzhe}', -u'\u0460': '{\\cyrchar\\CYROMEGA}', -u'\u0461': '{\\cyrchar\\cyromega}', -u'\u0462': '{\\cyrchar\\CYRYAT}', -u'\u0464': '{\\cyrchar\\CYRIOTE}', -u'\u0465': '{\\cyrchar\\cyriote}', -u'\u0466': '{\\cyrchar\\CYRLYUS}', -u'\u0467': '{\\cyrchar\\cyrlyus}', -u'\u0468': '{\\cyrchar\\CYRIOTLYUS}', -u'\u0469': '{\\cyrchar\\cyriotlyus}', -u'\u046a': '{\\cyrchar\\CYRBYUS}', -u'\u046c': '{\\cyrchar\\CYRIOTBYUS}', -u'\u046d': '{\\cyrchar\\cyriotbyus}', -u'\u046e': '{\\cyrchar\\CYRKSI}', -u'\u046f': '{\\cyrchar\\cyrksi}', -u'\u0470': '{\\cyrchar\\CYRPSI}', -u'\u0471': '{\\cyrchar\\cyrpsi}', -u'\u0472': '{\\cyrchar\\CYRFITA}', -u'\u0474': '{\\cyrchar\\CYRIZH}', -u'\u0478': '{\\cyrchar\\CYRUK}', -u'\u0479': '{\\cyrchar\\cyruk}', -u'\u047a': '{\\cyrchar\\CYROMEGARND}', -u'\u047b': '{\\cyrchar\\cyromegarnd}', -u'\u047c': '{\\cyrchar\\CYROMEGATITLO}', -u'\u047d': '{\\cyrchar\\cyromegatitlo}', -u'\u047e': '{\\cyrchar\\CYROT}', -u'\u047f': '{\\cyrchar\\cyrot}', -u'\u0480': '{\\cyrchar\\CYRKOPPA}', -u'\u0481': '{\\cyrchar\\cyrkoppa}', -u'\u0482': '{\\cyrchar\\cyrthousands}', -u'\u0488': '{\\cyrchar\\cyrhundredthousands}', -u'\u0489': '{\\cyrchar\\cyrmillions}', -u'\u048c': '{\\cyrchar\\CYRSEMISFTSN}', -u'\u048d': '{\\cyrchar\\cyrsemisftsn}', -u'\u048e': '{\\cyrchar\\CYRRTICK}', -u'\u048f': '{\\cyrchar\\cyrrtick}', -u'\u0490': '{\\cyrchar\\CYRGUP}', -u'\u0491': '{\\cyrchar\\cyrgup}', -u'\u0492': '{\\cyrchar\\CYRGHCRS}', -u'\u0493': '{\\cyrchar\\cyrghcrs}', -u'\u0494': '{\\cyrchar\\CYRGHK}', -u'\u0495': '{\\cyrchar\\cyrghk}', -u'\u0496': '{\\cyrchar\\CYRZHDSC}', -u'\u0497': '{\\cyrchar\\cyrzhdsc}', -u'\u0498': '{\\cyrchar\\CYRZDSC}', -u'\u0499': '{\\cyrchar\\cyrzdsc}', -u'\u049a': '{\\cyrchar\\CYRKDSC}', -u'\u049b': '{\\cyrchar\\cyrkdsc}', -u'\u049c': '{\\cyrchar\\CYRKVCRS}', -u'\u049d': '{\\cyrchar\\cyrkvcrs}', -u'\u049e': '{\\cyrchar\\CYRKHCRS}', -u'\u049f': '{\\cyrchar\\cyrkhcrs}', -u'\u04a0': '{\\cyrchar\\CYRKBEAK}', -u'\u04a1': '{\\cyrchar\\cyrkbeak}', -u'\u04a2': '{\\cyrchar\\CYRNDSC}', -u'\u04a3': '{\\cyrchar\\cyrndsc}', -u'\u04a4': '{\\cyrchar\\CYRNG}', -u'\u04a5': '{\\cyrchar\\cyrng}', -u'\u04a6': '{\\cyrchar\\CYRPHK}', -u'\u04a7': '{\\cyrchar\\cyrphk}', -u'\u04a8': '{\\cyrchar\\CYRABHHA}', -u'\u04a9': '{\\cyrchar\\cyrabhha}', -u'\u04aa': '{\\cyrchar\\CYRSDSC}', -u'\u04ab': '{\\cyrchar\\cyrsdsc}', -u'\u04ac': '{\\cyrchar\\CYRTDSC}', -u'\u04ad': '{\\cyrchar\\cyrtdsc}', -u'\u04ae': '{\\cyrchar\\CYRY}', -u'\u04af': '{\\cyrchar\\cyry}', -u'\u04b0': '{\\cyrchar\\CYRYHCRS}', -u'\u04b1': '{\\cyrchar\\cyryhcrs}', -u'\u04b2': '{\\cyrchar\\CYRHDSC}', -u'\u04b3': '{\\cyrchar\\cyrhdsc}', -u'\u04b4': '{\\cyrchar\\CYRTETSE}', -u'\u04b5': '{\\cyrchar\\cyrtetse}', -u'\u04b6': '{\\cyrchar\\CYRCHRDSC}', -u'\u04b7': '{\\cyrchar\\cyrchrdsc}', -u'\u04b8': '{\\cyrchar\\CYRCHVCRS}', -u'\u04b9': '{\\cyrchar\\cyrchvcrs}', -u'\u04ba': '{\\cyrchar\\CYRSHHA}', -u'\u04bb': '{\\cyrchar\\cyrshha}', -u'\u04bc': '{\\cyrchar\\CYRABHCH}', -u'\u04bd': '{\\cyrchar\\cyrabhch}', -u'\u04be': '{\\cyrchar\\CYRABHCHDSC}', -u'\u04bf': '{\\cyrchar\\cyrabhchdsc}', -u'\u04c0': '{\\cyrchar\\CYRpalochka}', -u'\u04c3': '{\\cyrchar\\CYRKHK}', -u'\u04c4': '{\\cyrchar\\cyrkhk}', -u'\u04c7': '{\\cyrchar\\CYRNHK}', -u'\u04c8': '{\\cyrchar\\cyrnhk}', -u'\u04cb': '{\\cyrchar\\CYRCHLDSC}', -u'\u04cc': '{\\cyrchar\\cyrchldsc}', -u'\u04d4': '{\\cyrchar\\CYRAE}', -u'\u04d5': '{\\cyrchar\\cyrae}', -u'\u04d8': '{\\cyrchar\\CYRSCHWA}', -u'\u04d9': '{\\cyrchar\\cyrschwa}', -u'\u04e0': '{\\cyrchar\\CYRABHDZE}', -u'\u04e1': '{\\cyrchar\\cyrabhdze}', -u'\u04e8': '{\\cyrchar\\CYROTLD}', -u'\u04e9': '{\\cyrchar\\cyrotld}', -u'\u2002': '{\\hspace{0.6em}}', -u'\u2003': '{\\hspace{1em}}', -u'\u2004': '{\\hspace{0.33em}}', -u'\u2005': '{\\hspace{0.25em}}', -u'\u2006': '{\\hspace{0.166em}}', -u'\u2007': '{\\hphantom{0}}', -u'\u2008': '{\\hphantom{,}}', -u'\u2009': '{\\hspace{0.167em}}', -u'\u200a': '$\\mkern1mu$', -u'\u2010': '{-}', -u'\u2013': '{\\textendash}', -u'\u2014': '{\\textemdash}', -u'\u2015': '{\\rule{1em}{1pt}}', -u'\u2016': '$\\Vert$', -u'\u2018': '{`}', -u'\u2019': "{'}", -u'\u201a': '{,}', -u'\u201b': '$\\Elzreapos$', -u'\u201c': '{\\textquotedblleft}', -u'\u201d': '{\\textquotedblright}', -u'\u201e': '{,,}', -u'\u2020': '{\\textdagger}', -u'\u2021': '{\\textdaggerdbl}', -u'\u2022': '{\\textbullet}', -u'\u2024': '{.}', -u'\u2025': '{..}', -u'\u2026': '{\\ldots}', -u'\u2030': '{\\textperthousand}', -u'\u2031': '{\\textpertenthousand}', -u'\u2032': "${'}$", -u'\u2033': "${''}$", -u'\u2034': "${'''}$", -u'\u2035': '$\\backprime$', -u'\u2039': '{\\guilsinglleft}', -u'\u203a': '{\\guilsinglright}', -u'\u2057': "$''''$", -u'\u205f': '{\\mkern4mu}', -u'\u2060': '{\\nolinebreak}', -u'\u20a7': '{\\ensuremath{\\Elzpes}}', -u'\u20ac': '{\\mbox{\\texteuro}}', -u'\u20db': '$\\dddot$', -u'\u20dc': '$\\ddddot$', -u'\u2102': '$\\mathbb{C}$', -u'\u210a': '{\\mathscr{g}}', -u'\u210b': '$\\mathscr{H}$', -u'\u210c': '$\\mathfrak{H}$', -u'\u210d': '$\\mathbb{H}$', -u'\u210f': '$\\hslash$', -u'\u2110': '$\\mathscr{I}$', -u'\u2111': '$\\mathfrak{I}$', -u'\u2112': '$\\mathscr{L}$', -u'\u2113': '$\\mathscr{l}$', -u'\u2115': '$\\mathbb{N}$', -u'\u2116': '{\\cyrchar\\textnumero}', -u'\u2118': '$\\wp$', -u'\u2119': '$\\mathbb{P}$', -u'\u211a': '$\\mathbb{Q}$', -u'\u211b': '$\\mathscr{R}$', -u'\u211c': '$\\mathfrak{R}$', -u'\u211d': '$\\mathbb{R}$', -u'\u211e': '$\\Elzxrat$', -u'\u2122': '{\\texttrademark}', -u'\u2124': '$\\mathbb{Z}$', -u'\u2126': '$\\Omega$', -u'\u2127': '$\\mho$', -u'\u2128': '$\\mathfrak{Z}$', -u'\u2129': '$\\ElsevierGlyph{2129}$', -u'\u212b': '{\\AA}', -u'\u212c': '$\\mathscr{B}$', -u'\u212d': '$\\mathfrak{C}$', -u'\u212f': '$\\mathscr{e}$', -u'\u2130': '$\\mathscr{E}$', -u'\u2131': '$\\mathscr{F}$', -u'\u2133': '$\\mathscr{M}$', -u'\u2134': '$\\mathscr{o}$', -u'\u2135': '$\\aleph$', -u'\u2136': '$\\beth$', -u'\u2137': '$\\gimel$', -u'\u2138': '$\\daleth$', -u'\u2153': '$\\textfrac{1}{3}$', -u'\u2154': '$\\textfrac{2}{3}$', -u'\u2155': '$\\textfrac{1}{5}$', -u'\u2156': '$\\textfrac{2}{5}$', -u'\u2157': '$\\textfrac{3}{5}$', -u'\u2158': '$\\textfrac{4}{5}$', -u'\u2159': '$\\textfrac{1}{6}$', -u'\u215a': '$\\textfrac{5}{6}$', -u'\u215b': '$\\textfrac{1}{8}$', -u'\u215c': '$\\textfrac{3}{8}$', -u'\u215d': '$\\textfrac{5}{8}$', -u'\u215e': '$\\textfrac{7}{8}$', -u'\u2190': '$\\leftarrow$', -u'\u2191': '$\\uparrow$', -u'\u2192': '$\\rightarrow$', -u'\u2193': '$\\downarrow$', -u'\u2194': '$\\leftrightarrow$', -u'\u2195': '$\\updownarrow$', -u'\u2196': '$\\nwarrow$', -u'\u2197': '$\\nearrow$', -u'\u2198': '$\\searrow$', -u'\u2199': '$\\swarrow$', -u'\u219a': '$\\nleftarrow$', -u'\u219b': '$\\nrightarrow$', -u'\u219c': '$\\arrowwaveright$', -u'\u219d': '$\\arrowwaveright$', -u'\u219e': '$\\twoheadleftarrow$', -u'\u21a0': '$\\twoheadrightarrow$', -u'\u21a2': '$\\leftarrowtail$', -u'\u21a3': '$\\rightarrowtail$', -u'\u21a6': '$\\mapsto$', -u'\u21a9': '$\\hookleftarrow$', -u'\u21aa': '$\\hookrightarrow$', -u'\u21ab': '$\\looparrowleft$', -u'\u21ac': '$\\looparrowright$', -u'\u21ad': '$\\leftrightsquigarrow$', -u'\u21ae': '$\\nleftrightarrow$', -u'\u21b0': '$\\Lsh$', -u'\u21b1': '$\\Rsh$', -u'\u21b3': '$\\ElsevierGlyph{21B3}$', -u'\u21b6': '$\\curvearrowleft$', -u'\u21b7': '$\\curvearrowright$', -u'\u21ba': '$\\circlearrowleft$', -u'\u21bb': '$\\circlearrowright$', -u'\u21bc': '$\\leftharpoonup$', -u'\u21bd': '$\\leftharpoondown$', -u'\u21be': '$\\upharpoonright$', -u'\u21bf': '$\\upharpoonleft$', -u'\u21c0': '$\\rightharpoonup$', -u'\u21c1': '$\\rightharpoondown$', -u'\u21c2': '$\\downharpoonright$', -u'\u21c3': '$\\downharpoonleft$', -u'\u21c4': '$\\rightleftarrows$', -u'\u21c5': '$\\dblarrowupdown$', -u'\u21c6': '$\\leftrightarrows$', -u'\u21c7': '$\\leftleftarrows$', -u'\u21c8': '$\\upuparrows$', -u'\u21c9': '$\\rightrightarrows$', -u'\u21ca': '$\\downdownarrows$', -u'\u21cb': '$\\leftrightharpoons$', -u'\u21cc': '$\\rightleftharpoons$', -u'\u21cd': '$\\nLeftarrow$', -u'\u21ce': '$\\nLeftrightarrow$', -u'\u21cf': '$\\nRightarrow$', -u'\u21d0': '$\\Leftarrow$', -u'\u21d1': '$\\Uparrow$', -u'\u21d2': '$\\Rightarrow$', -u'\u21d3': '$\\Downarrow$', -u'\u21d4': '$\\Leftrightarrow$', -u'\u21d5': '$\\Updownarrow$', -u'\u21da': '$\\Lleftarrow$', -u'\u21db': '$\\Rrightarrow$', -u'\u21dd': '$\\rightsquigarrow$', -u'\u21f5': '$\\DownArrowUpArrow$', -u'\u2200': '$\\forall$', -u'\u2201': '$\\complement$', -u'\u2202': '$\\partial$', -u'\u2203': '$\\exists$', -u'\u2204': '$\\nexists$', -u'\u2205': '$\\varnothing$', -u'\u2207': '$\\nabla$', -u'\u2208': '$\\in$', -u'\u2209': '$\\not\\in$', -u'\u220b': '$\\ni$', -u'\u220c': '$\\not\\ni$', -u'\u220f': '$\\prod$', -u'\u2210': '$\\coprod$', -u'\u2211': '$\\sum$', -u'\u2212': '{-}', -u'\u2213': '$\\mp$', -u'\u2214': '$\\dotplus$', -u'\u2216': '$\\setminus$', -u'\u2217': '${_\\ast}$', -u'\u2218': '$\\circ$', -u'\u2219': '$\\bullet$', -u'\u221a': '$\\surd$', -u'\u221d': '$\\propto$', -u'\u221e': '$\\infty$', -u'\u221f': '$\\rightangle$', -u'\u2220': '$\\angle$', -u'\u2221': '$\\measuredangle$', -u'\u2222': '$\\sphericalangle$', -u'\u2223': '$\\mid$', -u'\u2224': '$\\nmid$', -u'\u2225': '$\\parallel$', -u'\u2226': '$\\nparallel$', -u'\u2227': '$\\wedge$', -u'\u2228': '$\\vee$', -u'\u2229': '$\\cap$', -u'\u222a': '$\\cup$', -u'\u222b': '$\\int$', -u'\u222c': '$\\int\\!\\int$', -u'\u222d': '$\\int\\!\\int\\!\\int$', -u'\u222e': '$\\oint$', -u'\u222f': '$\\surfintegral$', -u'\u2230': '$\\volintegral$', -u'\u2231': '$\\clwintegral$', -u'\u2232': '$\\ElsevierGlyph{2232}$', -u'\u2233': '$\\ElsevierGlyph{2233}$', -u'\u2234': '$\\therefore$', -u'\u2235': '$\\because$', -u'\u2237': '$\\Colon$', -u'\u2238': '$\\ElsevierGlyph{2238}$', -u'\u223a': '$\\mathbin{{:}\\!\\!{-}\\!\\!{:}}$', -u'\u223b': '$\\homothetic$', -u'\u223c': '$\\sim$', -u'\u223d': '$\\backsim$', -u'\u223e': '$\\lazysinv$', -u'\u2240': '$\\wr$', -u'\u2241': '$\\not\\sim$', -u'\u2242': '$\\ElsevierGlyph{2242}$', -u'\u2243': '$\\simeq$', -u'\u2244': '$\\not\\simeq$', -u'\u2245': '$\\cong$', -u'\u2246': '$\\approxnotequal$', -u'\u2247': '$\\not\\cong$', -u'\u2248': '$\\approx$', -u'\u2249': '$\\not\\approx$', -u'\u224a': '$\\approxeq$', -u'\u224b': '$\\tildetrpl$', -u'\u224c': '$\\allequal$', -u'\u224d': '$\\asymp$', -u'\u224e': '$\\Bumpeq$', -u'\u224f': '$\\bumpeq$', -u'\u2250': '$\\doteq$', -u'\u2251': '$\\doteqdot$', -u'\u2252': '$\\fallingdotseq$', -u'\u2253': '$\\risingdotseq$', -u'\u2254': '{:=}', -u'\u2255': '$=:$', -u'\u2256': '$\\eqcirc$', -u'\u2257': '$\\circeq$', -u'\u2259': '$\\estimates$', -u'\u225a': '$\\ElsevierGlyph{225A}$', -u'\u225b': '$\\starequal$', -u'\u225c': '$\\triangleq$', -u'\u225f': '$\\ElsevierGlyph{225F}$', -u'\u2260': '$\\not =$', -u'\u2261': '$\\equiv$', -u'\u2262': '$\\not\\equiv$', -u'\u2264': '$\\leq$', -u'\u2265': '$\\geq$', -u'\u2266': '$\\leqq$', -u'\u2267': '$\\geqq$', -u'\u2268': '$\\lneqq$', -u'\u2269': '$\\gneqq$', -u'\u226a': '$\\ll$', -u'\u226b': '$\\gg$', -u'\u226c': '$\\between$', -u'\u226d': '$\\not\\kern-0.3em\\times$', -u'\u226e': '$\\not<$', -u'\u226f': '$\\not>$', -u'\u2270': '$\\not\\leq$', -u'\u2271': '$\\not\\geq$', -u'\u2272': '$\\lessequivlnt$', -u'\u2273': '$\\greaterequivlnt$', -u'\u2274': '$\\ElsevierGlyph{2274}$', -u'\u2275': '$\\ElsevierGlyph{2275}$', -u'\u2276': '$\\lessgtr$', -u'\u2277': '$\\gtrless$', -u'\u2278': '$\\notlessgreater$', -u'\u2279': '$\\notgreaterless$', -u'\u227a': '$\\prec$', -u'\u227b': '$\\succ$', -u'\u227c': '$\\preccurlyeq$', -u'\u227d': '$\\succcurlyeq$', -u'\u227e': '$\\precapprox$', -u'\u227f': '$\\succapprox$', -u'\u2280': '$\\not\\prec$', -u'\u2281': '$\\not\\succ$', -u'\u2282': '$\\subset$', -u'\u2283': '$\\supset$', -u'\u2284': '$\\not\\subset$', -u'\u2285': '$\\not\\supset$', -u'\u2286': '$\\subseteq$', -u'\u2287': '$\\supseteq$', -u'\u2288': '$\\not\\subseteq$', -u'\u2289': '$\\not\\supseteq$', -u'\u228a': '$\\subsetneq$', -u'\u228b': '$\\supsetneq$', -u'\u228e': '$\\uplus$', -u'\u228f': '$\\sqsubset$', -u'\u2290': '$\\sqsupset$', -u'\u2291': '$\\sqsubseteq$', -u'\u2292': '$\\sqsupseteq$', -u'\u2293': '$\\sqcap$', -u'\u2294': '$\\sqcup$', -u'\u2295': '$\\oplus$', -u'\u2296': '$\\ominus$', -u'\u2297': '$\\otimes$', -u'\u2298': '$\\oslash$', -u'\u2299': '$\\odot$', -u'\u229a': '$\\circledcirc$', -u'\u229b': '$\\circledast$', -u'\u229d': '$\\circleddash$', -u'\u229e': '$\\boxplus$', -u'\u229f': '$\\boxminus$', -u'\u22a0': '$\\boxtimes$', -u'\u22a1': '$\\boxdot$', -u'\u22a2': '$\\vdash$', -u'\u22a3': '$\\dashv$', -u'\u22a4': '$\\top$', -u'\u22a5': '$\\perp$', -u'\u22a7': '$\\truestate$', -u'\u22a8': '$\\forcesextra$', -u'\u22a9': '$\\Vdash$', -u'\u22aa': '$\\Vvdash$', -u'\u22ab': '$\\VDash$', -u'\u22ac': '$\\nvdash$', -u'\u22ad': '$\\nvDash$', -u'\u22ae': '$\\nVdash$', -u'\u22af': '$\\nVDash$', -u'\u22b2': '$\\vartriangleleft$', -u'\u22b3': '$\\vartriangleright$', -u'\u22b4': '$\\trianglelefteq$', -u'\u22b5': '$\\trianglerighteq$', -u'\u22b6': '$\\original$', -u'\u22b7': '$\\image$', -u'\u22b8': '$\\multimap$', -u'\u22b9': '$\\hermitconjmatrix$', -u'\u22ba': '$\\intercal$', -u'\u22bb': '$\\veebar$', -u'\u22be': '$\\rightanglearc$', -u'\u22c0': '$\\ElsevierGlyph{22C0}$', -u'\u22c1': '$\\ElsevierGlyph{22C1}$', -u'\u22c2': '$\\bigcap$', -u'\u22c3': '$\\bigcup$', -u'\u22c4': '$\\diamond$', -u'\u22c5': '$\\cdot$', -u'\u22c6': '$\\star$', -u'\u22c7': '$\\divideontimes$', -u'\u22c8': '$\\bowtie$', -u'\u22c9': '$\\ltimes$', -u'\u22ca': '$\\rtimes$', -u'\u22cb': '$\\leftthreetimes$', -u'\u22cc': '$\\rightthreetimes$', -u'\u22cd': '$\\backsimeq$', -u'\u22ce': '$\\curlyvee$', -u'\u22cf': '$\\curlywedge$', -u'\u22d0': '$\\Subset$', -u'\u22d1': '$\\Supset$', -u'\u22d2': '$\\Cap$', -u'\u22d3': '$\\Cup$', -u'\u22d4': '$\\pitchfork$', -u'\u22d6': '$\\lessdot$', -u'\u22d7': '$\\gtrdot$', -u'\u22d8': '$\\verymuchless$', -u'\u22d9': '$\\verymuchgreater$', -u'\u22da': '$\\lesseqgtr$', -u'\u22db': '$\\gtreqless$', -u'\u22de': '$\\curlyeqprec$', -u'\u22df': '$\\curlyeqsucc$', -u'\u22e2': '$\\not\\sqsubseteq$', -u'\u22e3': '$\\not\\sqsupseteq$', -u'\u22e5': '$\\Elzsqspne$', -u'\u22e6': '$\\lnsim$', -u'\u22e7': '$\\gnsim$', -u'\u22e8': '$\\precedesnotsimilar$', -u'\u22e9': '$\\succnsim$', -u'\u22ea': '$\\ntriangleleft$', -u'\u22eb': '$\\ntriangleright$', -u'\u22ec': '$\\ntrianglelefteq$', -u'\u22ed': '$\\ntrianglerighteq$', -u'\u22ee': '$\\vdots$', -u'\u22ef': '$\\cdots$', -u'\u22f0': '$\\upslopeellipsis$', -u'\u22f1': '$\\downslopeellipsis$', -u'\u2305': '{\\barwedge}', -u'\u2306': '$\\perspcorrespond$', -u'\u2308': '$\\lceil$', -u'\u2309': '$\\rceil$', -u'\u230a': '$\\lfloor$', -u'\u230b': '$\\rfloor$', -u'\u2315': '$\\recorder$', -u'\u2316': '$\\mathchar"2208$', -u'\u231c': '$\\ulcorner$', -u'\u231d': '$\\urcorner$', -u'\u231e': '$\\llcorner$', -u'\u231f': '$\\lrcorner$', -u'\u2322': '$\\frown$', -u'\u2323': '$\\smile$', -u'\u2329': '$\\langle$', -u'\u232a': '$\\rangle$', -u'\u233d': '$\\ElsevierGlyph{E838}$', -u'\u23a3': '$\\Elzdlcorn$', -u'\u23b0': '$\\lmoustache$', -u'\u23b1': '$\\rmoustache$', -u'\u2423': '{\\textvisiblespace}', -u'\u2460': '{\\ding{172}}', -u'\u2461': '{\\ding{173}}', -u'\u2462': '{\\ding{174}}', -u'\u2463': '{\\ding{175}}', -u'\u2464': '{\\ding{176}}', -u'\u2465': '{\\ding{177}}', -u'\u2466': '{\\ding{178}}', -u'\u2467': '{\\ding{179}}', -u'\u2468': '{\\ding{180}}', -u'\u2469': '{\\ding{181}}', -u'\u24c8': '$\\circledS$', -u'\u2506': '$\\Elzdshfnc$', -u'\u2519': '$\\Elzsqfnw$', -u'\u2571': '$\\diagup$', -u'\u25a0': '{\\ding{110}}', -u'\u25a1': '$\\square$', -u'\u25aa': '$\\blacksquare$', -u'\u25ad': '$\\fbox{~~}$', -u'\u25af': '$\\Elzvrecto$', -u'\u25b1': '$\\ElsevierGlyph{E381}$', -u'\u25b2': '{\\ding{115}}', -u'\u25b3': '$\\bigtriangleup$', -u'\u25b4': '$\\blacktriangle$', -u'\u25b5': '$\\vartriangle$', -u'\u25b8': '$\\blacktriangleright$', -u'\u25b9': '$\\triangleright$', -u'\u25bc': '{\\ding{116}}', -u'\u25bd': '$\\bigtriangledown$', -u'\u25be': '$\\blacktriangledown$', -u'\u25bf': '$\\triangledown$', -u'\u25c2': '$\\blacktriangleleft$', -u'\u25c3': '$\\triangleleft$', -u'\u25c6': '{\\ding{117}}', -u'\u25ca': '$\\lozenge$', -u'\u25cb': '$\\bigcirc$', -u'\u25cf': '{\\ding{108}}', -u'\u25d0': '$\\Elzcirfl$', -u'\u25d1': '$\\Elzcirfr$', -u'\u25d2': '$\\Elzcirfb$', -u'\u25d7': '{\\ding{119}}', -u'\u25d8': '$\\Elzrvbull$', -u'\u25e7': '$\\Elzsqfl$', -u'\u25e8': '$\\Elzsqfr$', -u'\u25ea': '$\\Elzsqfse$', -u'\u25ef': '$\\bigcirc$', -u'\u2605': '{\\ding{72}}', -u'\u2606': '{\\ding{73}}', -u'\u260e': '{\\ding{37}}', -u'\u261b': '{\\ding{42}}', -u'\u261e': '{\\ding{43}}', -u'\u263e': '{\\rightmoon}', -u'\u263f': '{\\mercury}', -u'\u2640': '{\\venus}', -u'\u2642': '{\\male}', -u'\u2643': '{\\jupiter}', -u'\u2644': '{\\saturn}', -u'\u2645': '{\\uranus}', -u'\u2646': '{\\neptune}', -u'\u2647': '{\\pluto}', -u'\u2648': '{\\aries}', -u'\u2649': '{\\taurus}', -u'\u264a': '{\\gemini}', -u'\u264b': '{\\cancer}', -u'\u264c': '{\\leo}', -u'\u264d': '{\\virgo}', -u'\u264e': '{\\libra}', -u'\u264f': '{\\scorpio}', -u'\u2650': '{\\sagittarius}', -u'\u2651': '{\\capricornus}', -u'\u2652': '{\\aquarius}', -u'\u2653': '{\\pisces}', -u'\u2660': '{\\ding{171}}', -u'\u2662': '$\\diamond$', -u'\u2663': '{\\ding{168}}', -u'\u2665': '{\\ding{170}}', -u'\u2666': '{\\ding{169}}', -u'\u2669': '{\\quarternote}', -u'\u266a': '{\\eighthnote}', -u'\u266d': '$\\flat$', -u'\u266e': '$\\natural$', -u'\u266f': '$\\sharp$', -u'\u2701': '{\\ding{33}}', -u'\u2702': '{\\ding{34}}', -u'\u2703': '{\\ding{35}}', -u'\u2704': '{\\ding{36}}', -u'\u2706': '{\\ding{38}}', -u'\u2707': '{\\ding{39}}', -u'\u2708': '{\\ding{40}}', -u'\u2709': '{\\ding{41}}', -u'\u270c': '{\\ding{44}}', -u'\u270d': '{\\ding{45}}', -u'\u270e': '{\\ding{46}}', -u'\u270f': '{\\ding{47}}', -u'\u2710': '{\\ding{48}}', -u'\u2711': '{\\ding{49}}', -u'\u2712': '{\\ding{50}}', -u'\u2713': '{\\ding{51}}', -u'\u2714': '{\\ding{52}}', -u'\u2715': '{\\ding{53}}', -u'\u2716': '{\\ding{54}}', -u'\u2717': '{\\ding{55}}', -u'\u2718': '{\\ding{56}}', -u'\u2719': '{\\ding{57}}', -u'\u271a': '{\\ding{58}}', -u'\u271b': '{\\ding{59}}', -u'\u271c': '{\\ding{60}}', -u'\u271d': '{\\ding{61}}', -u'\u271e': '{\\ding{62}}', -u'\u271f': '{\\ding{63}}', -u'\u2720': '{\\ding{64}}', -u'\u2721': '{\\ding{65}}', -u'\u2722': '{\\ding{66}}', -u'\u2723': '{\\ding{67}}', -u'\u2724': '{\\ding{68}}', -u'\u2725': '{\\ding{69}}', -u'\u2726': '{\\ding{70}}', -u'\u2727': '{\\ding{71}}', -u'\u2729': '{\\ding{73}}', -u'\u272a': '{\\ding{74}}', -u'\u272b': '{\\ding{75}}', -u'\u272c': '{\\ding{76}}', -u'\u272d': '{\\ding{77}}', -u'\u272e': '{\\ding{78}}', -u'\u272f': '{\\ding{79}}', -u'\u2730': '{\\ding{80}}', -u'\u2731': '{\\ding{81}}', -u'\u2732': '{\\ding{82}}', -u'\u2733': '{\\ding{83}}', -u'\u2734': '{\\ding{84}}', -u'\u2735': '{\\ding{85}}', -u'\u2736': '{\\ding{86}}', -u'\u2737': '{\\ding{87}}', -u'\u2738': '{\\ding{88}}', -u'\u2739': '{\\ding{89}}', -u'\u273a': '{\\ding{90}}', -u'\u273b': '{\\ding{91}}', -u'\u273c': '{\\ding{92}}', -u'\u273d': '{\\ding{93}}', -u'\u273e': '{\\ding{94}}', -u'\u273f': '{\\ding{95}}', -u'\u2740': '{\\ding{96}}', -u'\u2741': '{\\ding{97}}', -u'\u2742': '{\\ding{98}}', -u'\u2743': '{\\ding{99}}', -u'\u2744': '{\\ding{100}}', -u'\u2745': '{\\ding{101}}', -u'\u2746': '{\\ding{102}}', -u'\u2747': '{\\ding{103}}', -u'\u2748': '{\\ding{104}}', -u'\u2749': '{\\ding{105}}', -u'\u274a': '{\\ding{106}}', -u'\u274b': '{\\ding{107}}', -u'\u274d': '{\\ding{109}}', -u'\u274f': '{\\ding{111}}', -u'\u2750': '{\\ding{112}}', -u'\u2751': '{\\ding{113}}', -u'\u2752': '{\\ding{114}}', -u'\u2756': '{\\ding{118}}', -u'\u2758': '{\\ding{120}}', -u'\u2759': '{\\ding{121}}', -u'\u275a': '{\\ding{122}}', -u'\u275b': '{\\ding{123}}', -u'\u275c': '{\\ding{124}}', -u'\u275d': '{\\ding{125}}', -u'\u275e': '{\\ding{126}}', -u'\u2761': '{\\ding{161}}', -u'\u2762': '{\\ding{162}}', -u'\u2763': '{\\ding{163}}', -u'\u2764': '{\\ding{164}}', -u'\u2765': '{\\ding{165}}', -u'\u2766': '{\\ding{166}}', -u'\u2767': '{\\ding{167}}', -u'\u2776': '{\\ding{182}}', -u'\u2777': '{\\ding{183}}', -u'\u2778': '{\\ding{184}}', -u'\u2779': '{\\ding{185}}', -u'\u277a': '{\\ding{186}}', -u'\u277b': '{\\ding{187}}', -u'\u277c': '{\\ding{188}}', -u'\u277d': '{\\ding{189}}', -u'\u277e': '{\\ding{190}}', -u'\u277f': '{\\ding{191}}', -u'\u2780': '{\\ding{192}}', -u'\u2781': '{\\ding{193}}', -u'\u2782': '{\\ding{194}}', -u'\u2783': '{\\ding{195}}', -u'\u2784': '{\\ding{196}}', -u'\u2785': '{\\ding{197}}', -u'\u2786': '{\\ding{198}}', -u'\u2787': '{\\ding{199}}', -u'\u2788': '{\\ding{200}}', -u'\u2789': '{\\ding{201}}', -u'\u278a': '{\\ding{202}}', -u'\u278b': '{\\ding{203}}', -u'\u278c': '{\\ding{204}}', -u'\u278d': '{\\ding{205}}', -u'\u278e': '{\\ding{206}}', -u'\u278f': '{\\ding{207}}', -u'\u2790': '{\\ding{208}}', -u'\u2791': '{\\ding{209}}', -u'\u2792': '{\\ding{210}}', -u'\u2793': '{\\ding{211}}', -u'\u2794': '{\\ding{212}}', -u'\u2798': '{\\ding{216}}', -u'\u2799': '{\\ding{217}}', -u'\u279a': '{\\ding{218}}', -u'\u279b': '{\\ding{219}}', -u'\u279c': '{\\ding{220}}', -u'\u279d': '{\\ding{221}}', -u'\u279e': '{\\ding{222}}', -u'\u279f': '{\\ding{223}}', -u'\u27a0': '{\\ding{224}}', -u'\u27a1': '{\\ding{225}}', -u'\u27a2': '{\\ding{226}}', -u'\u27a3': '{\\ding{227}}', -u'\u27a4': '{\\ding{228}}', -u'\u27a5': '{\\ding{229}}', -u'\u27a6': '{\\ding{230}}', -u'\u27a7': '{\\ding{231}}', -u'\u27a8': '{\\ding{232}}', -u'\u27a9': '{\\ding{233}}', -u'\u27aa': '{\\ding{234}}', -u'\u27ab': '{\\ding{235}}', -u'\u27ac': '{\\ding{236}}', -u'\u27ad': '{\\ding{237}}', -u'\u27ae': '{\\ding{238}}', -u'\u27af': '{\\ding{239}}', -u'\u27b1': '{\\ding{241}}', -u'\u27b2': '{\\ding{242}}', -u'\u27b3': '{\\ding{243}}', -u'\u27b4': '{\\ding{244}}', -u'\u27b5': '{\\ding{245}}', -u'\u27b6': '{\\ding{246}}', -u'\u27b7': '{\\ding{247}}', -u'\u27b8': '{\\ding{248}}', -u'\u27b9': '{\\ding{249}}', -u'\u27ba': '{\\ding{250}}', -u'\u27bb': '{\\ding{251}}', -u'\u27bc': '{\\ding{252}}', -u'\u27bd': '{\\ding{253}}', -u'\u27be': '{\\ding{254}}', -u'\u27f5': '$\\longleftarrow$', -u'\u27f6': '$\\longrightarrow$', -u'\u27f7': '$\\longleftrightarrow$', -u'\u27f8': '$\\Longleftarrow$', -u'\u27f9': '$\\Longrightarrow$', -u'\u27fa': '$\\Longleftrightarrow$', -u'\u27fc': '$\\longmapsto$', -u'\u27ff': '$\\sim\\joinrel\\leadsto$', -u'\u2905': '$\\ElsevierGlyph{E212}$', -u'\u2912': '$\\UpArrowBar$', -u'\u2913': '$\\DownArrowBar$', -u'\u2923': '$\\ElsevierGlyph{E20C}$', -u'\u2924': '$\\ElsevierGlyph{E20D}$', -u'\u2925': '$\\ElsevierGlyph{E20B}$', -u'\u2926': '$\\ElsevierGlyph{E20A}$', -u'\u2927': '$\\ElsevierGlyph{E211}$', -u'\u2928': '$\\ElsevierGlyph{E20E}$', -u'\u2929': '$\\ElsevierGlyph{E20F}$', -u'\u292a': '$\\ElsevierGlyph{E210}$', -u'\u2933': '$\\ElsevierGlyph{E21C}$', -u'\u2936': '$\\ElsevierGlyph{E21A}$', -u'\u2937': '$\\ElsevierGlyph{E219}$', -u'\u2940': '$\\Elolarr$', -u'\u2941': '$\\Elorarr$', -u'\u2942': '$\\ElzRlarr$', -u'\u2944': '$\\ElzrLarr$', -u'\u2947': '$\\Elzrarrx$', -u'\u294e': '$\\LeftRightVector$', -u'\u294f': '$\\RightUpDownVector$', -u'\u2950': '$\\DownLeftRightVector$', -u'\u2951': '$\\LeftUpDownVector$', -u'\u2952': '$\\LeftVectorBar$', -u'\u2953': '$\\RightVectorBar$', -u'\u2954': '$\\RightUpVectorBar$', -u'\u2955': '$\\RightDownVectorBar$', -u'\u2956': '$\\DownLeftVectorBar$', -u'\u2957': '$\\DownRightVectorBar$', -u'\u2958': '$\\LeftUpVectorBar$', -u'\u2959': '$\\LeftDownVectorBar$', -u'\u295a': '$\\LeftTeeVector$', -u'\u295b': '$\\RightTeeVector$', -u'\u295c': '$\\RightUpTeeVector$', -u'\u295d': '$\\RightDownTeeVector$', -u'\u295e': '$\\DownLeftTeeVector$', -u'\u295f': '$\\DownRightTeeVector$', -u'\u2960': '$\\LeftUpTeeVector$', -u'\u2961': '$\\LeftDownTeeVector$', -u'\u296e': '$\\UpEquilibrium$', -u'\u296f': '$\\ReverseUpEquilibrium$', -u'\u2970': '$\\RoundImplies$', -u'\u297c': '$\\ElsevierGlyph{E214}$', -u'\u297d': '$\\ElsevierGlyph{E215}$', -u'\u2980': '$\\Elztfnc$', -u'\u2985': '$\\ElsevierGlyph{3018}$', -u'\u2986': '$\\Elroang$', -u'\u2993': '$<\\kern-0.58em($', -u'\u2994': '$\\ElsevierGlyph{E291}$', -u'\u2999': '$\\Elzddfnc$', -u'\u299c': '$\\Angle$', -u'\u29a0': '$\\Elzlpargt$', -u'\u29b5': '$\\ElsevierGlyph{E260}$', -u'\u29b6': '$\\ElsevierGlyph{E61B}$', -u'\u29ca': '$\\ElzLap$', -u'\u29cb': '$\\Elzdefas$', -u'\u29cf': '$\\LeftTriangleBar$', -u'\u29d0': '$\\RightTriangleBar$', -u'\u29dc': '$\\ElsevierGlyph{E372}$', -u'\u29eb': '$\\blacklozenge$', -u'\u29f4': '$\\RuleDelayed$', -u'\u2a04': '$\\Elxuplus$', -u'\u2a05': '$\\ElzThr$', -u'\u2a06': '$\\Elxsqcup$', -u'\u2a07': '$\\ElzInf$', -u'\u2a08': '$\\ElzSup$', -u'\u2a0d': '$\\ElzCint$', -u'\u2a0f': '$\\clockoint$', -u'\u2a10': '$\\ElsevierGlyph{E395}$', -u'\u2a16': '$\\sqrint$', -u'\u2a25': '$\\ElsevierGlyph{E25A}$', -u'\u2a2a': '$\\ElsevierGlyph{E25B}$', -u'\u2a2d': '$\\ElsevierGlyph{E25C}$', -u'\u2a2e': '$\\ElsevierGlyph{E25D}$', -u'\u2a2f': '$\\ElzTimes$', -u'\u2a34': '$\\ElsevierGlyph{E25E}$', -u'\u2a35': '$\\ElsevierGlyph{E25E}$', -u'\u2a3c': '$\\ElsevierGlyph{E259}$', -u'\u2a3f': '$\\amalg$', -u'\u2a53': '$\\ElzAnd$', -u'\u2a54': '$\\ElzOr$', -u'\u2a55': '$\\ElsevierGlyph{E36E}$', -u'\u2a56': '$\\ElOr$', -u'\u2a5e': '$\\perspcorrespond$', -u'\u2a5f': '$\\Elzminhat$', -u'\u2a63': '$\\ElsevierGlyph{225A}$', -u'\u2a6e': '$\\stackrel{*}{=}$', -u'\u2a75': '$\\Equal$', -u'\u2a7d': '$\\leqslant$', -u'\u2a7e': '$\\geqslant$', -u'\u2a85': '$\\lessapprox$', -u'\u2a86': '$\\gtrapprox$', -u'\u2a87': '$\\lneq$', -u'\u2a88': '$\\gneq$', -u'\u2a89': '$\\lnapprox$', -u'\u2a8a': '$\\gnapprox$', -u'\u2a8b': '$\\lesseqqgtr$', -u'\u2a8c': '$\\gtreqqless$', -u'\u2a95': '$\\eqslantless$', -u'\u2a96': '$\\eqslantgtr$', -u'\u2a9d': '$\\Pisymbol{ppi020}{117}$', -u'\u2a9e': '$\\Pisymbol{ppi020}{105}$', -u'\u2aa1': '$\\NestedLessLess$', -u'\u2aa2': '$\\NestedGreaterGreater$', -u'\u2aaf': '$\\preceq$', -u'\u2ab0': '$\\succeq$', -u'\u2ab5': '$\\precneqq$', -u'\u2ab6': '$\\succneqq$', -u'\u2ab7': '$\\precapprox$', -u'\u2ab8': '$\\succapprox$', -u'\u2ab9': '$\\precnapprox$', -u'\u2aba': '$\\succnapprox$', -u'\u2ac5': '$\\subseteqq$', -u'\u2ac6': '$\\supseteqq$', -u'\u2acb': '$\\subsetneqq$', -u'\u2acc': '$\\supsetneqq$', -u'\u2aeb': '$\\ElsevierGlyph{E30D}$', -u'\u2af6': '$\\Elztdcol$', -u'\u2afd': '${{/}\\!\\!{/}}$', -u'\u300a': '$\\ElsevierGlyph{300A}$', -u'\u300b': '$\\ElsevierGlyph{300B}$', -u'\u3018': '$\\ElsevierGlyph{3018}$', -u'\u3019': '$\\ElsevierGlyph{3019}$', -u'\u301a': '$\\openbracketleft$', -u'\u301b': '$\\openbracketright$', -u'\ufb00': '{ff}', -u'\ufb01': '{fi}', -u'\ufb02': '{fl}', -u'\ufb03': '{ffi}', -u'\ufb04': '{ffl}', -u'\U0001d400': '$\\mathbf{A}$', -u'\U0001d401': '$\\mathbf{B}$', -u'\U0001d402': '$\\mathbf{C}$', -u'\U0001d403': '$\\mathbf{D}$', -u'\U0001d404': '$\\mathbf{E}$', -u'\U0001d405': '$\\mathbf{F}$', -u'\U0001d406': '$\\mathbf{G}$', -u'\U0001d407': '$\\mathbf{H}$', -u'\U0001d408': '$\\mathbf{I}$', -u'\U0001d409': '$\\mathbf{J}$', -u'\U0001d40a': '$\\mathbf{K}$', -u'\U0001d40b': '$\\mathbf{L}$', -u'\U0001d40c': '$\\mathbf{M}$', -u'\U0001d40d': '$\\mathbf{N}$', -u'\U0001d40e': '$\\mathbf{O}$', -u'\U0001d40f': '$\\mathbf{P}$', -u'\U0001d410': '$\\mathbf{Q}$', -u'\U0001d411': '$\\mathbf{R}$', -u'\U0001d412': '$\\mathbf{S}$', -u'\U0001d413': '$\\mathbf{T}$', -u'\U0001d414': '$\\mathbf{U}$', -u'\U0001d415': '$\\mathbf{V}$', -u'\U0001d416': '$\\mathbf{W}$', -u'\U0001d417': '$\\mathbf{X}$', -u'\U0001d418': '$\\mathbf{Y}$', -u'\U0001d419': '$\\mathbf{Z}$', -u'\U0001d41a': '$\\mathbf{a}$', -u'\U0001d41b': '$\\mathbf{b}$', -u'\U0001d41c': '$\\mathbf{c}$', -u'\U0001d41d': '$\\mathbf{d}$', -u'\U0001d41e': '$\\mathbf{e}$', -u'\U0001d41f': '$\\mathbf{f}$', -u'\U0001d420': '$\\mathbf{g}$', -u'\U0001d421': '$\\mathbf{h}$', -u'\U0001d422': '$\\mathbf{i}$', -u'\U0001d423': '$\\mathbf{j}$', -u'\U0001d424': '$\\mathbf{k}$', -u'\U0001d425': '$\\mathbf{l}$', -u'\U0001d426': '$\\mathbf{m}$', -u'\U0001d427': '$\\mathbf{n}$', -u'\U0001d428': '$\\mathbf{o}$', -u'\U0001d429': '$\\mathbf{p}$', -u'\U0001d42a': '$\\mathbf{q}$', -u'\U0001d42b': '$\\mathbf{r}$', -u'\U0001d42c': '$\\mathbf{s}$', -u'\U0001d42d': '$\\mathbf{t}$', -u'\U0001d42e': '$\\mathbf{u}$', -u'\U0001d42f': '$\\mathbf{v}$', -u'\U0001d430': '$\\mathbf{w}$', -u'\U0001d431': '$\\mathbf{x}$', -u'\U0001d432': '$\\mathbf{y}$', -u'\U0001d433': '$\\mathbf{z}$', -u'\U0001d434': '$\\mathsl{A}$', -u'\U0001d435': '$\\mathsl{B}$', -u'\U0001d436': '$\\mathsl{C}$', -u'\U0001d437': '$\\mathsl{D}$', -u'\U0001d438': '$\\mathsl{E}$', -u'\U0001d439': '$\\mathsl{F}$', -u'\U0001d43a': '$\\mathsl{G}$', -u'\U0001d43b': '$\\mathsl{H}$', -u'\U0001d43c': '$\\mathsl{I}$', -u'\U0001d43d': '$\\mathsl{J}$', -u'\U0001d43e': '$\\mathsl{K}$', -u'\U0001d43f': '$\\mathsl{L}$', -u'\U0001d440': '$\\mathsl{M}$', -u'\U0001d441': '$\\mathsl{N}$', -u'\U0001d442': '$\\mathsl{O}$', -u'\U0001d443': '$\\mathsl{P}$', -u'\U0001d444': '$\\mathsl{Q}$', -u'\U0001d445': '$\\mathsl{R}$', -u'\U0001d446': '$\\mathsl{S}$', -u'\U0001d447': '$\\mathsl{T}$', -u'\U0001d448': '$\\mathsl{U}$', -u'\U0001d449': '$\\mathsl{V}$', -u'\U0001d44a': '$\\mathsl{W}$', -u'\U0001d44b': '$\\mathsl{X}$', -u'\U0001d44c': '$\\mathsl{Y}$', -u'\U0001d44d': '$\\mathsl{Z}$', -u'\U0001d44e': '$\\mathsl{a}$', -u'\U0001d44f': '$\\mathsl{b}$', -u'\U0001d450': '$\\mathsl{c}$', -u'\U0001d451': '$\\mathsl{d}$', -u'\U0001d452': '$\\mathsl{e}$', -u'\U0001d453': '$\\mathsl{f}$', -u'\U0001d454': '$\\mathsl{g}$', -u'\U0001d456': '$\\mathsl{i}$', -u'\U0001d457': '$\\mathsl{j}$', -u'\U0001d458': '$\\mathsl{k}$', -u'\U0001d459': '$\\mathsl{l}$', -u'\U0001d45a': '$\\mathsl{m}$', -u'\U0001d45b': '$\\mathsl{n}$', -u'\U0001d45c': '$\\mathsl{o}$', -u'\U0001d45d': '$\\mathsl{p}$', -u'\U0001d45e': '$\\mathsl{q}$', -u'\U0001d45f': '$\\mathsl{r}$', -u'\U0001d460': '$\\mathsl{s}$', -u'\U0001d461': '$\\mathsl{t}$', -u'\U0001d462': '$\\mathsl{u}$', -u'\U0001d463': '$\\mathsl{v}$', -u'\U0001d464': '$\\mathsl{w}$', -u'\U0001d465': '$\\mathsl{x}$', -u'\U0001d466': '$\\mathsl{y}$', -u'\U0001d467': '$\\mathsl{z}$', -u'\U0001d468': '$\\mathbit{A}$', -u'\U0001d469': '$\\mathbit{B}$', -u'\U0001d46a': '$\\mathbit{C}$', -u'\U0001d46b': '$\\mathbit{D}$', -u'\U0001d46c': '$\\mathbit{E}$', -u'\U0001d46d': '$\\mathbit{F}$', -u'\U0001d46e': '$\\mathbit{G}$', -u'\U0001d46f': '$\\mathbit{H}$', -u'\U0001d470': '$\\mathbit{I}$', -u'\U0001d471': '$\\mathbit{J}$', -u'\U0001d472': '$\\mathbit{K}$', -u'\U0001d473': '$\\mathbit{L}$', -u'\U0001d474': '$\\mathbit{M}$', -u'\U0001d475': '$\\mathbit{N}$', -u'\U0001d476': '$\\mathbit{O}$', -u'\U0001d477': '$\\mathbit{P}$', -u'\U0001d478': '$\\mathbit{Q}$', -u'\U0001d479': '$\\mathbit{R}$', -u'\U0001d47a': '$\\mathbit{S}$', -u'\U0001d47b': '$\\mathbit{T}$', -u'\U0001d47c': '$\\mathbit{U}$', -u'\U0001d47d': '$\\mathbit{V}$', -u'\U0001d47e': '$\\mathbit{W}$', -u'\U0001d47f': '$\\mathbit{X}$', -u'\U0001d480': '$\\mathbit{Y}$', -u'\U0001d481': '$\\mathbit{Z}$', -u'\U0001d482': '$\\mathbit{a}$', -u'\U0001d483': '$\\mathbit{b}$', -u'\U0001d484': '$\\mathbit{c}$', -u'\U0001d485': '$\\mathbit{d}$', -u'\U0001d486': '$\\mathbit{e}$', -u'\U0001d487': '$\\mathbit{f}$', -u'\U0001d488': '$\\mathbit{g}$', -u'\U0001d489': '$\\mathbit{h}$', -u'\U0001d48a': '$\\mathbit{i}$', -u'\U0001d48b': '$\\mathbit{j}$', -u'\U0001d48c': '$\\mathbit{k}$', -u'\U0001d48d': '$\\mathbit{l}$', -u'\U0001d48e': '$\\mathbit{m}$', -u'\U0001d48f': '$\\mathbit{n}$', -u'\U0001d490': '$\\mathbit{o}$', -u'\U0001d491': '$\\mathbit{p}$', -u'\U0001d492': '$\\mathbit{q}$', -u'\U0001d493': '$\\mathbit{r}$', -u'\U0001d494': '$\\mathbit{s}$', -u'\U0001d495': '$\\mathbit{t}$', -u'\U0001d496': '$\\mathbit{u}$', -u'\U0001d497': '$\\mathbit{v}$', -u'\U0001d498': '$\\mathbit{w}$', -u'\U0001d499': '$\\mathbit{x}$', -u'\U0001d49a': '$\\mathbit{y}$', -u'\U0001d49b': '$\\mathbit{z}$', -u'\U0001d49c': '$\\mathscr{A}$', -u'\U0001d49e': '$\\mathscr{C}$', -u'\U0001d49f': '$\\mathscr{D}$', -u'\U0001d4a2': '$\\mathscr{G}$', -u'\U0001d4a5': '$\\mathscr{J}$', -u'\U0001d4a6': '$\\mathscr{K}$', -u'\U0001d4a9': '$\\mathscr{N}$', -u'\U0001d4aa': '$\\mathscr{O}$', -u'\U0001d4ab': '$\\mathscr{P}$', -u'\U0001d4ac': '$\\mathscr{Q}$', -u'\U0001d4ae': '$\\mathscr{S}$', -u'\U0001d4af': '$\\mathscr{T}$', -u'\U0001d4b0': '$\\mathscr{U}$', -u'\U0001d4b1': '$\\mathscr{V}$', -u'\U0001d4b2': '$\\mathscr{W}$', -u'\U0001d4b3': '$\\mathscr{X}$', -u'\U0001d4b4': '$\\mathscr{Y}$', -u'\U0001d4b5': '$\\mathscr{Z}$', -u'\U0001d4b6': '$\\mathscr{a}$', -u'\U0001d4b7': '$\\mathscr{b}$', -u'\U0001d4b8': '$\\mathscr{c}$', -u'\U0001d4b9': '$\\mathscr{d}$', -u'\U0001d4bb': '$\\mathscr{f}$', -u'\U0001d4bd': '$\\mathscr{h}$', -u'\U0001d4be': '$\\mathscr{i}$', -u'\U0001d4bf': '$\\mathscr{j}$', -u'\U0001d4c0': '$\\mathscr{k}$', -u'\U0001d4c1': '$\\mathscr{l}$', -u'\U0001d4c2': '$\\mathscr{m}$', -u'\U0001d4c3': '$\\mathscr{n}$', -u'\U0001d4c5': '$\\mathscr{p}$', -u'\U0001d4c6': '$\\mathscr{q}$', -u'\U0001d4c7': '$\\mathscr{r}$', -u'\U0001d4c8': '$\\mathscr{s}$', -u'\U0001d4c9': '$\\mathscr{t}$', -u'\U0001d4ca': '$\\mathscr{u}$', -u'\U0001d4cb': '$\\mathscr{v}$', -u'\U0001d4cc': '$\\mathscr{w}$', -u'\U0001d4cd': '$\\mathscr{x}$', -u'\U0001d4ce': '$\\mathscr{y}$', -u'\U0001d4cf': '$\\mathscr{z}$', -u'\U0001d4d0': '$\\mathmit{A}$', -u'\U0001d4d1': '$\\mathmit{B}$', -u'\U0001d4d2': '$\\mathmit{C}$', -u'\U0001d4d3': '$\\mathmit{D}$', -u'\U0001d4d4': '$\\mathmit{E}$', -u'\U0001d4d5': '$\\mathmit{F}$', -u'\U0001d4d6': '$\\mathmit{G}$', -u'\U0001d4d7': '$\\mathmit{H}$', -u'\U0001d4d8': '$\\mathmit{I}$', -u'\U0001d4d9': '$\\mathmit{J}$', -u'\U0001d4da': '$\\mathmit{K}$', -u'\U0001d4db': '$\\mathmit{L}$', -u'\U0001d4dc': '$\\mathmit{M}$', -u'\U0001d4dd': '$\\mathmit{N}$', -u'\U0001d4de': '$\\mathmit{O}$', -u'\U0001d4df': '$\\mathmit{P}$', -u'\U0001d4e0': '$\\mathmit{Q}$', -u'\U0001d4e1': '$\\mathmit{R}$', -u'\U0001d4e2': '$\\mathmit{S}$', -u'\U0001d4e3': '$\\mathmit{T}$', -u'\U0001d4e4': '$\\mathmit{U}$', -u'\U0001d4e5': '$\\mathmit{V}$', -u'\U0001d4e6': '$\\mathmit{W}$', -u'\U0001d4e7': '$\\mathmit{X}$', -u'\U0001d4e8': '$\\mathmit{Y}$', -u'\U0001d4e9': '$\\mathmit{Z}$', -u'\U0001d4ea': '$\\mathmit{a}$', -u'\U0001d4eb': '$\\mathmit{b}$', -u'\U0001d4ec': '$\\mathmit{c}$', -u'\U0001d4ed': '$\\mathmit{d}$', -u'\U0001d4ee': '$\\mathmit{e}$', -u'\U0001d4ef': '$\\mathmit{f}$', -u'\U0001d4f0': '$\\mathmit{g}$', -u'\U0001d4f1': '$\\mathmit{h}$', -u'\U0001d4f2': '$\\mathmit{i}$', -u'\U0001d4f3': '$\\mathmit{j}$', -u'\U0001d4f4': '$\\mathmit{k}$', -u'\U0001d4f5': '$\\mathmit{l}$', -u'\U0001d4f6': '$\\mathmit{m}$', -u'\U0001d4f7': '$\\mathmit{n}$', -u'\U0001d4f8': '$\\mathmit{o}$', -u'\U0001d4f9': '$\\mathmit{p}$', -u'\U0001d4fa': '$\\mathmit{q}$', -u'\U0001d4fb': '$\\mathmit{r}$', -u'\U0001d4fc': '$\\mathmit{s}$', -u'\U0001d4fd': '$\\mathmit{t}$', -u'\U0001d4fe': '$\\mathmit{u}$', -u'\U0001d4ff': '$\\mathmit{v}$', -u'\U0001d500': '$\\mathmit{w}$', -u'\U0001d501': '$\\mathmit{x}$', -u'\U0001d502': '$\\mathmit{y}$', -u'\U0001d503': '$\\mathmit{z}$', -u'\U0001d504': '$\\mathfrak{A}$', -u'\U0001d505': '$\\mathfrak{B}$', -u'\U0001d507': '$\\mathfrak{D}$', -u'\U0001d508': '$\\mathfrak{E}$', -u'\U0001d509': '$\\mathfrak{F}$', -u'\U0001d50a': '$\\mathfrak{G}$', -u'\U0001d50d': '$\\mathfrak{J}$', -u'\U0001d50e': '$\\mathfrak{K}$', -u'\U0001d50f': '$\\mathfrak{L}$', -u'\U0001d510': '$\\mathfrak{M}$', -u'\U0001d511': '$\\mathfrak{N}$', -u'\U0001d512': '$\\mathfrak{O}$', -u'\U0001d513': '$\\mathfrak{P}$', -u'\U0001d514': '$\\mathfrak{Q}$', -u'\U0001d516': '$\\mathfrak{S}$', -u'\U0001d517': '$\\mathfrak{T}$', -u'\U0001d518': '$\\mathfrak{U}$', -u'\U0001d519': '$\\mathfrak{V}$', -u'\U0001d51a': '$\\mathfrak{W}$', -u'\U0001d51b': '$\\mathfrak{X}$', -u'\U0001d51c': '$\\mathfrak{Y}$', -u'\U0001d51e': '$\\mathfrak{a}$', -u'\U0001d51f': '$\\mathfrak{b}$', -u'\U0001d520': '$\\mathfrak{c}$', -u'\U0001d521': '$\\mathfrak{d}$', -u'\U0001d522': '$\\mathfrak{e}$', -u'\U0001d523': '$\\mathfrak{f}$', -u'\U0001d524': '$\\mathfrak{g}$', -u'\U0001d525': '$\\mathfrak{h}$', -u'\U0001d526': '$\\mathfrak{i}$', -u'\U0001d527': '$\\mathfrak{j}$', -u'\U0001d528': '$\\mathfrak{k}$', -u'\U0001d529': '$\\mathfrak{l}$', -u'\U0001d52a': '$\\mathfrak{m}$', -u'\U0001d52b': '$\\mathfrak{n}$', -u'\U0001d52c': '$\\mathfrak{o}$', -u'\U0001d52d': '$\\mathfrak{p}$', -u'\U0001d52e': '$\\mathfrak{q}$', -u'\U0001d52f': '$\\mathfrak{r}$', -u'\U0001d530': '$\\mathfrak{s}$', -u'\U0001d531': '$\\mathfrak{t}$', -u'\U0001d532': '$\\mathfrak{u}$', -u'\U0001d533': '$\\mathfrak{v}$', -u'\U0001d534': '$\\mathfrak{w}$', -u'\U0001d535': '$\\mathfrak{x}$', -u'\U0001d536': '$\\mathfrak{y}$', -u'\U0001d537': '$\\mathfrak{z}$', -u'\U0001d538': '$\\mathbb{A}$', -u'\U0001d539': '$\\mathbb{B}$', -u'\U0001d53b': '$\\mathbb{D}$', -u'\U0001d53c': '$\\mathbb{E}$', -u'\U0001d53d': '$\\mathbb{F}$', -u'\U0001d53e': '$\\mathbb{G}$', -u'\U0001d540': '$\\mathbb{I}$', -u'\U0001d541': '$\\mathbb{J}$', -u'\U0001d542': '$\\mathbb{K}$', -u'\U0001d543': '$\\mathbb{L}$', -u'\U0001d544': '$\\mathbb{M}$', -u'\U0001d546': '$\\mathbb{O}$', -u'\U0001d54a': '$\\mathbb{S}$', -u'\U0001d54b': '$\\mathbb{T}$', -u'\U0001d54c': '$\\mathbb{U}$', -u'\U0001d54d': '$\\mathbb{V}$', -u'\U0001d54e': '$\\mathbb{W}$', -u'\U0001d54f': '$\\mathbb{X}$', -u'\U0001d550': '$\\mathbb{Y}$', -u'\U0001d552': '$\\mathbb{a}$', -u'\U0001d553': '$\\mathbb{b}$', -u'\U0001d554': '$\\mathbb{c}$', -u'\U0001d555': '$\\mathbb{d}$', -u'\U0001d556': '$\\mathbb{e}$', -u'\U0001d557': '$\\mathbb{f}$', -u'\U0001d558': '$\\mathbb{g}$', -u'\U0001d559': '$\\mathbb{h}$', -u'\U0001d55a': '$\\mathbb{i}$', -u'\U0001d55b': '$\\mathbb{j}$', -u'\U0001d55c': '$\\mathbb{k}$', -u'\U0001d55d': '$\\mathbb{l}$', -u'\U0001d55e': '$\\mathbb{m}$', -u'\U0001d55f': '$\\mathbb{n}$', -u'\U0001d560': '$\\mathbb{o}$', -u'\U0001d561': '$\\mathbb{p}$', -u'\U0001d562': '$\\mathbb{q}$', -u'\U0001d563': '$\\mathbb{r}$', -u'\U0001d564': '$\\mathbb{s}$', -u'\U0001d565': '$\\mathbb{t}$', -u'\U0001d566': '$\\mathbb{u}$', -u'\U0001d567': '$\\mathbb{v}$', -u'\U0001d568': '$\\mathbb{w}$', -u'\U0001d569': '$\\mathbb{x}$', -u'\U0001d56a': '$\\mathbb{y}$', -u'\U0001d56b': '$\\mathbb{z}$', -u'\U0001d56c': '$\\mathslbb{A}$', -u'\U0001d56d': '$\\mathslbb{B}$', -u'\U0001d56e': '$\\mathslbb{C}$', -u'\U0001d56f': '$\\mathslbb{D}$', -u'\U0001d570': '$\\mathslbb{E}$', -u'\U0001d571': '$\\mathslbb{F}$', -u'\U0001d572': '$\\mathslbb{G}$', -u'\U0001d573': '$\\mathslbb{H}$', -u'\U0001d574': '$\\mathslbb{I}$', -u'\U0001d575': '$\\mathslbb{J}$', -u'\U0001d576': '$\\mathslbb{K}$', -u'\U0001d577': '$\\mathslbb{L}$', -u'\U0001d578': '$\\mathslbb{M}$', -u'\U0001d579': '$\\mathslbb{N}$', -u'\U0001d57a': '$\\mathslbb{O}$', -u'\U0001d57b': '$\\mathslbb{P}$', -u'\U0001d57c': '$\\mathslbb{Q}$', -u'\U0001d57d': '$\\mathslbb{R}$', -u'\U0001d57e': '$\\mathslbb{S}$', -u'\U0001d57f': '$\\mathslbb{T}$', -u'\U0001d580': '$\\mathslbb{U}$', -u'\U0001d581': '$\\mathslbb{V}$', -u'\U0001d582': '$\\mathslbb{W}$', -u'\U0001d583': '$\\mathslbb{X}$', -u'\U0001d584': '$\\mathslbb{Y}$', -u'\U0001d585': '$\\mathslbb{Z}$', -u'\U0001d586': '$\\mathslbb{a}$', -u'\U0001d587': '$\\mathslbb{b}$', -u'\U0001d588': '$\\mathslbb{c}$', -u'\U0001d589': '$\\mathslbb{d}$', -u'\U0001d58a': '$\\mathslbb{e}$', -u'\U0001d58b': '$\\mathslbb{f}$', -u'\U0001d58c': '$\\mathslbb{g}$', -u'\U0001d58d': '$\\mathslbb{h}$', -u'\U0001d58e': '$\\mathslbb{i}$', -u'\U0001d58f': '$\\mathslbb{j}$', -u'\U0001d590': '$\\mathslbb{k}$', -u'\U0001d591': '$\\mathslbb{l}$', -u'\U0001d592': '$\\mathslbb{m}$', -u'\U0001d593': '$\\mathslbb{n}$', -u'\U0001d594': '$\\mathslbb{o}$', -u'\U0001d595': '$\\mathslbb{p}$', -u'\U0001d596': '$\\mathslbb{q}$', -u'\U0001d597': '$\\mathslbb{r}$', -u'\U0001d598': '$\\mathslbb{s}$', -u'\U0001d599': '$\\mathslbb{t}$', -u'\U0001d59a': '$\\mathslbb{u}$', -u'\U0001d59b': '$\\mathslbb{v}$', -u'\U0001d59c': '$\\mathslbb{w}$', -u'\U0001d59d': '$\\mathslbb{x}$', -u'\U0001d59e': '$\\mathslbb{y}$', -u'\U0001d59f': '$\\mathslbb{z}$', -u'\U0001d5a0': '$\\mathsf{A}$', -u'\U0001d5a1': '$\\mathsf{B}$', -u'\U0001d5a2': '$\\mathsf{C}$', -u'\U0001d5a3': '$\\mathsf{D}$', -u'\U0001d5a4': '$\\mathsf{E}$', -u'\U0001d5a5': '$\\mathsf{F}$', -u'\U0001d5a6': '$\\mathsf{G}$', -u'\U0001d5a7': '$\\mathsf{H}$', -u'\U0001d5a8': '$\\mathsf{I}$', -u'\U0001d5a9': '$\\mathsf{J}$', -u'\U0001d5aa': '$\\mathsf{K}$', -u'\U0001d5ab': '$\\mathsf{L}$', -u'\U0001d5ac': '$\\mathsf{M}$', -u'\U0001d5ad': '$\\mathsf{N}$', -u'\U0001d5ae': '$\\mathsf{O}$', -u'\U0001d5af': '$\\mathsf{P}$', -u'\U0001d5b0': '$\\mathsf{Q}$', -u'\U0001d5b1': '$\\mathsf{R}$', -u'\U0001d5b2': '$\\mathsf{S}$', -u'\U0001d5b3': '$\\mathsf{T}$', -u'\U0001d5b4': '$\\mathsf{U}$', -u'\U0001d5b5': '$\\mathsf{V}$', -u'\U0001d5b6': '$\\mathsf{W}$', -u'\U0001d5b7': '$\\mathsf{X}$', -u'\U0001d5b8': '$\\mathsf{Y}$', -u'\U0001d5b9': '$\\mathsf{Z}$', -u'\U0001d5ba': '$\\mathsf{a}$', -u'\U0001d5bb': '$\\mathsf{b}$', -u'\U0001d5bc': '$\\mathsf{c}$', -u'\U0001d5bd': '$\\mathsf{d}$', -u'\U0001d5be': '$\\mathsf{e}$', -u'\U0001d5bf': '$\\mathsf{f}$', -u'\U0001d5c0': '$\\mathsf{g}$', -u'\U0001d5c1': '$\\mathsf{h}$', -u'\U0001d5c2': '$\\mathsf{i}$', -u'\U0001d5c3': '$\\mathsf{j}$', -u'\U0001d5c4': '$\\mathsf{k}$', -u'\U0001d5c5': '$\\mathsf{l}$', -u'\U0001d5c6': '$\\mathsf{m}$', -u'\U0001d5c7': '$\\mathsf{n}$', -u'\U0001d5c8': '$\\mathsf{o}$', -u'\U0001d5c9': '$\\mathsf{p}$', -u'\U0001d5ca': '$\\mathsf{q}$', -u'\U0001d5cb': '$\\mathsf{r}$', -u'\U0001d5cc': '$\\mathsf{s}$', -u'\U0001d5cd': '$\\mathsf{t}$', -u'\U0001d5ce': '$\\mathsf{u}$', -u'\U0001d5cf': '$\\mathsf{v}$', -u'\U0001d5d0': '$\\mathsf{w}$', -u'\U0001d5d1': '$\\mathsf{x}$', -u'\U0001d5d2': '$\\mathsf{y}$', -u'\U0001d5d3': '$\\mathsf{z}$', -u'\U0001d5d4': '$\\mathsfbf{A}$', -u'\U0001d5d5': '$\\mathsfbf{B}$', -u'\U0001d5d6': '$\\mathsfbf{C}$', -u'\U0001d5d7': '$\\mathsfbf{D}$', -u'\U0001d5d8': '$\\mathsfbf{E}$', -u'\U0001d5d9': '$\\mathsfbf{F}$', -u'\U0001d5da': '$\\mathsfbf{G}$', -u'\U0001d5db': '$\\mathsfbf{H}$', -u'\U0001d5dc': '$\\mathsfbf{I}$', -u'\U0001d5dd': '$\\mathsfbf{J}$', -u'\U0001d5de': '$\\mathsfbf{K}$', -u'\U0001d5df': '$\\mathsfbf{L}$', -u'\U0001d5e0': '$\\mathsfbf{M}$', -u'\U0001d5e1': '$\\mathsfbf{N}$', -u'\U0001d5e2': '$\\mathsfbf{O}$', -u'\U0001d5e3': '$\\mathsfbf{P}$', -u'\U0001d5e4': '$\\mathsfbf{Q}$', -u'\U0001d5e5': '$\\mathsfbf{R}$', -u'\U0001d5e6': '$\\mathsfbf{S}$', -u'\U0001d5e7': '$\\mathsfbf{T}$', -u'\U0001d5e8': '$\\mathsfbf{U}$', -u'\U0001d5e9': '$\\mathsfbf{V}$', -u'\U0001d5ea': '$\\mathsfbf{W}$', -u'\U0001d5eb': '$\\mathsfbf{X}$', -u'\U0001d5ec': '$\\mathsfbf{Y}$', -u'\U0001d5ed': '$\\mathsfbf{Z}$', -u'\U0001d5ee': '$\\mathsfbf{a}$', -u'\U0001d5ef': '$\\mathsfbf{b}$', -u'\U0001d5f0': '$\\mathsfbf{c}$', -u'\U0001d5f1': '$\\mathsfbf{d}$', -u'\U0001d5f2': '$\\mathsfbf{e}$', -u'\U0001d5f3': '$\\mathsfbf{f}$', -u'\U0001d5f4': '$\\mathsfbf{g}$', -u'\U0001d5f5': '$\\mathsfbf{h}$', -u'\U0001d5f6': '$\\mathsfbf{i}$', -u'\U0001d5f7': '$\\mathsfbf{j}$', -u'\U0001d5f8': '$\\mathsfbf{k}$', -u'\U0001d5f9': '$\\mathsfbf{l}$', -u'\U0001d5fa': '$\\mathsfbf{m}$', -u'\U0001d5fb': '$\\mathsfbf{n}$', -u'\U0001d5fc': '$\\mathsfbf{o}$', -u'\U0001d5fd': '$\\mathsfbf{p}$', -u'\U0001d5fe': '$\\mathsfbf{q}$', -u'\U0001d5ff': '$\\mathsfbf{r}$', -u'\U0001d600': '$\\mathsfbf{s}$', -u'\U0001d601': '$\\mathsfbf{t}$', -u'\U0001d602': '$\\mathsfbf{u}$', -u'\U0001d603': '$\\mathsfbf{v}$', -u'\U0001d604': '$\\mathsfbf{w}$', -u'\U0001d605': '$\\mathsfbf{x}$', -u'\U0001d606': '$\\mathsfbf{y}$', -u'\U0001d607': '$\\mathsfbf{z}$', -u'\U0001d608': '$\\mathsfsl{A}$', -u'\U0001d609': '$\\mathsfsl{B}$', -u'\U0001d60a': '$\\mathsfsl{C}$', -u'\U0001d60b': '$\\mathsfsl{D}$', -u'\U0001d60c': '$\\mathsfsl{E}$', -u'\U0001d60d': '$\\mathsfsl{F}$', -u'\U0001d60e': '$\\mathsfsl{G}$', -u'\U0001d60f': '$\\mathsfsl{H}$', -u'\U0001d610': '$\\mathsfsl{I}$', -u'\U0001d611': '$\\mathsfsl{J}$', -u'\U0001d612': '$\\mathsfsl{K}$', -u'\U0001d613': '$\\mathsfsl{L}$', -u'\U0001d614': '$\\mathsfsl{M}$', -u'\U0001d615': '$\\mathsfsl{N}$', -u'\U0001d616': '$\\mathsfsl{O}$', -u'\U0001d617': '$\\mathsfsl{P}$', -u'\U0001d618': '$\\mathsfsl{Q}$', -u'\U0001d619': '$\\mathsfsl{R}$', -u'\U0001d61a': '$\\mathsfsl{S}$', -u'\U0001d61b': '$\\mathsfsl{T}$', -u'\U0001d61c': '$\\mathsfsl{U}$', -u'\U0001d61d': '$\\mathsfsl{V}$', -u'\U0001d61e': '$\\mathsfsl{W}$', -u'\U0001d61f': '$\\mathsfsl{X}$', -u'\U0001d620': '$\\mathsfsl{Y}$', -u'\U0001d621': '$\\mathsfsl{Z}$', -u'\U0001d622': '$\\mathsfsl{a}$', -u'\U0001d623': '$\\mathsfsl{b}$', -u'\U0001d624': '$\\mathsfsl{c}$', -u'\U0001d625': '$\\mathsfsl{d}$', -u'\U0001d626': '$\\mathsfsl{e}$', -u'\U0001d627': '$\\mathsfsl{f}$', -u'\U0001d628': '$\\mathsfsl{g}$', -u'\U0001d629': '$\\mathsfsl{h}$', -u'\U0001d62a': '$\\mathsfsl{i}$', -u'\U0001d62b': '$\\mathsfsl{j}$', -u'\U0001d62c': '$\\mathsfsl{k}$', -u'\U0001d62d': '$\\mathsfsl{l}$', -u'\U0001d62e': '$\\mathsfsl{m}$', -u'\U0001d62f': '$\\mathsfsl{n}$', -u'\U0001d630': '$\\mathsfsl{o}$', -u'\U0001d631': '$\\mathsfsl{p}$', -u'\U0001d632': '$\\mathsfsl{q}$', -u'\U0001d633': '$\\mathsfsl{r}$', -u'\U0001d634': '$\\mathsfsl{s}$', -u'\U0001d635': '$\\mathsfsl{t}$', -u'\U0001d636': '$\\mathsfsl{u}$', -u'\U0001d637': '$\\mathsfsl{v}$', -u'\U0001d638': '$\\mathsfsl{w}$', -u'\U0001d639': '$\\mathsfsl{x}$', -u'\U0001d63a': '$\\mathsfsl{y}$', -u'\U0001d63b': '$\\mathsfsl{z}$', -u'\U0001d63c': '$\\mathsfbfsl{A}$', -u'\U0001d63d': '$\\mathsfbfsl{B}$', -u'\U0001d63e': '$\\mathsfbfsl{C}$', -u'\U0001d63f': '$\\mathsfbfsl{D}$', -u'\U0001d640': '$\\mathsfbfsl{E}$', -u'\U0001d641': '$\\mathsfbfsl{F}$', -u'\U0001d642': '$\\mathsfbfsl{G}$', -u'\U0001d643': '$\\mathsfbfsl{H}$', -u'\U0001d644': '$\\mathsfbfsl{I}$', -u'\U0001d645': '$\\mathsfbfsl{J}$', -u'\U0001d646': '$\\mathsfbfsl{K}$', -u'\U0001d647': '$\\mathsfbfsl{L}$', -u'\U0001d648': '$\\mathsfbfsl{M}$', -u'\U0001d649': '$\\mathsfbfsl{N}$', -u'\U0001d64a': '$\\mathsfbfsl{O}$', -u'\U0001d64b': '$\\mathsfbfsl{P}$', -u'\U0001d64c': '$\\mathsfbfsl{Q}$', -u'\U0001d64d': '$\\mathsfbfsl{R}$', -u'\U0001d64e': '$\\mathsfbfsl{S}$', -u'\U0001d64f': '$\\mathsfbfsl{T}$', -u'\U0001d650': '$\\mathsfbfsl{U}$', -u'\U0001d651': '$\\mathsfbfsl{V}$', -u'\U0001d652': '$\\mathsfbfsl{W}$', -u'\U0001d653': '$\\mathsfbfsl{X}$', -u'\U0001d654': '$\\mathsfbfsl{Y}$', -u'\U0001d655': '$\\mathsfbfsl{Z}$', -u'\U0001d656': '$\\mathsfbfsl{a}$', -u'\U0001d657': '$\\mathsfbfsl{b}$', -u'\U0001d658': '$\\mathsfbfsl{c}$', -u'\U0001d659': '$\\mathsfbfsl{d}$', -u'\U0001d65a': '$\\mathsfbfsl{e}$', -u'\U0001d65b': '$\\mathsfbfsl{f}$', -u'\U0001d65c': '$\\mathsfbfsl{g}$', -u'\U0001d65d': '$\\mathsfbfsl{h}$', -u'\U0001d65e': '$\\mathsfbfsl{i}$', -u'\U0001d65f': '$\\mathsfbfsl{j}$', -u'\U0001d660': '$\\mathsfbfsl{k}$', -u'\U0001d661': '$\\mathsfbfsl{l}$', -u'\U0001d662': '$\\mathsfbfsl{m}$', -u'\U0001d663': '$\\mathsfbfsl{n}$', -u'\U0001d664': '$\\mathsfbfsl{o}$', -u'\U0001d665': '$\\mathsfbfsl{p}$', -u'\U0001d666': '$\\mathsfbfsl{q}$', -u'\U0001d667': '$\\mathsfbfsl{r}$', -u'\U0001d668': '$\\mathsfbfsl{s}$', -u'\U0001d669': '$\\mathsfbfsl{t}$', -u'\U0001d66a': '$\\mathsfbfsl{u}$', -u'\U0001d66b': '$\\mathsfbfsl{v}$', -u'\U0001d66c': '$\\mathsfbfsl{w}$', -u'\U0001d66d': '$\\mathsfbfsl{x}$', -u'\U0001d66e': '$\\mathsfbfsl{y}$', -u'\U0001d66f': '$\\mathsfbfsl{z}$', -u'\U0001d670': '$\\mathtt{A}$', -u'\U0001d671': '$\\mathtt{B}$', -u'\U0001d672': '$\\mathtt{C}$', -u'\U0001d673': '$\\mathtt{D}$', -u'\U0001d674': '$\\mathtt{E}$', -u'\U0001d675': '$\\mathtt{F}$', -u'\U0001d676': '$\\mathtt{G}$', -u'\U0001d677': '$\\mathtt{H}$', -u'\U0001d678': '$\\mathtt{I}$', -u'\U0001d679': '$\\mathtt{J}$', -u'\U0001d67a': '$\\mathtt{K}$', -u'\U0001d67b': '$\\mathtt{L}$', -u'\U0001d67c': '$\\mathtt{M}$', -u'\U0001d67d': '$\\mathtt{N}$', -u'\U0001d67e': '$\\mathtt{O}$', -u'\U0001d67f': '$\\mathtt{P}$', -u'\U0001d680': '$\\mathtt{Q}$', -u'\U0001d681': '$\\mathtt{R}$', -u'\U0001d682': '$\\mathtt{S}$', -u'\U0001d683': '$\\mathtt{T}$', -u'\U0001d684': '$\\mathtt{U}$', -u'\U0001d685': '$\\mathtt{V}$', -u'\U0001d686': '$\\mathtt{W}$', -u'\U0001d687': '$\\mathtt{X}$', -u'\U0001d688': '$\\mathtt{Y}$', -u'\U0001d689': '$\\mathtt{Z}$', -u'\U0001d68a': '$\\mathtt{a}$', -u'\U0001d68b': '$\\mathtt{b}$', -u'\U0001d68c': '$\\mathtt{c}$', -u'\U0001d68d': '$\\mathtt{d}$', -u'\U0001d68e': '$\\mathtt{e}$', -u'\U0001d68f': '$\\mathtt{f}$', -u'\U0001d690': '$\\mathtt{g}$', -u'\U0001d691': '$\\mathtt{h}$', -u'\U0001d692': '$\\mathtt{i}$', -u'\U0001d693': '$\\mathtt{j}$', -u'\U0001d694': '$\\mathtt{k}$', -u'\U0001d695': '$\\mathtt{l}$', -u'\U0001d696': '$\\mathtt{m}$', -u'\U0001d697': '$\\mathtt{n}$', -u'\U0001d698': '$\\mathtt{o}$', -u'\U0001d699': '$\\mathtt{p}$', -u'\U0001d69a': '$\\mathtt{q}$', -u'\U0001d69b': '$\\mathtt{r}$', -u'\U0001d69c': '$\\mathtt{s}$', -u'\U0001d69d': '$\\mathtt{t}$', -u'\U0001d69e': '$\\mathtt{u}$', -u'\U0001d69f': '$\\mathtt{v}$', -u'\U0001d6a0': '$\\mathtt{w}$', -u'\U0001d6a1': '$\\mathtt{x}$', -u'\U0001d6a2': '$\\mathtt{y}$', -u'\U0001d6a3': '$\\mathtt{z}$', -u'\U0001d6a8': '$\\mathbf{\\Alpha}$', -u'\U0001d6a9': '$\\mathbf{\\Beta}$', -u'\U0001d6aa': '$\\mathbf{\\Gamma}$', -u'\U0001d6ab': '$\\mathbf{\\Delta}$', -u'\U0001d6ac': '$\\mathbf{\\Epsilon}$', -u'\U0001d6ad': '$\\mathbf{\\Zeta}$', -u'\U0001d6ae': '$\\mathbf{\\Eta}$', -u'\U0001d6af': '$\\mathbf{\\Theta}$', -u'\U0001d6b0': '$\\mathbf{\\Iota}$', -u'\U0001d6b1': '$\\mathbf{\\Kappa}$', -u'\U0001d6b2': '$\\mathbf{\\Lambda}$', -u'\U0001d6b3': '$M$', -u'\U0001d6b4': '$N$', -u'\U0001d6b5': '$\\mathbf{\\Xi}$', -u'\U0001d6b6': '$O$', -u'\U0001d6b7': '$\\mathbf{\\Pi}$', -u'\U0001d6b8': '$\\mathbf{\\Rho}$', -u'\U0001d6b9': '{\\mathbf{\\vartheta}}', -u'\U0001d6ba': '$\\mathbf{\\Sigma}$', -u'\U0001d6bb': '$\\mathbf{\\Tau}$', -u'\U0001d6bc': '$\\mathbf{\\Upsilon}$', -u'\U0001d6bd': '$\\mathbf{\\Phi}$', -u'\U0001d6be': '$\\mathbf{\\Chi}$', -u'\U0001d6bf': '$\\mathbf{\\Psi}$', -u'\U0001d6c0': '$\\mathbf{\\Omega}$', -u'\U0001d6c1': '$\\mathbf{\\nabla}$', -u'\U0001d6c2': '$\\mathbf{\\Alpha}$', -u'\U0001d6c3': '$\\mathbf{\\Beta}$', -u'\U0001d6c4': '$\\mathbf{\\Gamma}$', -u'\U0001d6c5': '$\\mathbf{\\Delta}$', -u'\U0001d6c6': '$\\mathbf{\\Epsilon}$', -u'\U0001d6c7': '$\\mathbf{\\Zeta}$', -u'\U0001d6c8': '$\\mathbf{\\Eta}$', -u'\U0001d6c9': '$\\mathbf{\\theta}$', -u'\U0001d6ca': '$\\mathbf{\\Iota}$', -u'\U0001d6cb': '$\\mathbf{\\Kappa}$', -u'\U0001d6cc': '$\\mathbf{\\Lambda}$', -u'\U0001d6cd': '$M$', -u'\U0001d6ce': '$N$', -u'\U0001d6cf': '$\\mathbf{\\Xi}$', -u'\U0001d6d0': '$O$', -u'\U0001d6d1': '$\\mathbf{\\Pi}$', -u'\U0001d6d2': '$\\mathbf{\\Rho}$', -u'\U0001d6d3': '$\\mathbf{\\varsigma}$', -u'\U0001d6d4': '$\\mathbf{\\Sigma}$', -u'\U0001d6d5': '$\\mathbf{\\Tau}$', -u'\U0001d6d6': '$\\mathbf{\\Upsilon}$', -u'\U0001d6d7': '$\\mathbf{\\Phi}$', -u'\U0001d6d8': '$\\mathbf{\\Chi}$', -u'\U0001d6d9': '$\\mathbf{\\Psi}$', -u'\U0001d6da': '$\\mathbf{\\Omega}$', -u'\U0001d6db': '$\\partial$', -u'\U0001d6dc': '$\\in$', -u'\U0001d6dd': '{\\mathbf{\\vartheta}}', -u'\U0001d6de': '{\\mathbf{\\varkappa}}', -u'\U0001d6df': '{\\mathbf{\\phi}}', -u'\U0001d6e0': '{\\mathbf{\\varrho}}', -u'\U0001d6e1': '{\\mathbf{\\varpi}}', -u'\U0001d6e2': '$\\mathsl{\\Alpha}$', -u'\U0001d6e3': '$\\mathsl{\\Beta}$', -u'\U0001d6e4': '$\\mathsl{\\Gamma}$', -u'\U0001d6e5': '$\\mathsl{\\Delta}$', -u'\U0001d6e6': '$\\mathsl{\\Epsilon}$', -u'\U0001d6e7': '$\\mathsl{\\Zeta}$', -u'\U0001d6e8': '$\\mathsl{\\Eta}$', -u'\U0001d6e9': '$\\mathsl{\\Theta}$', -u'\U0001d6ea': '$\\mathsl{\\Iota}$', -u'\U0001d6eb': '$\\mathsl{\\Kappa}$', -u'\U0001d6ec': '$\\mathsl{\\Lambda}$', -u'\U0001d6ed': '$M$', -u'\U0001d6ee': '$N$', -u'\U0001d6ef': '$\\mathsl{\\Xi}$', -u'\U0001d6f0': '$O$', -u'\U0001d6f1': '$\\mathsl{\\Pi}$', -u'\U0001d6f2': '$\\mathsl{\\Rho}$', -u'\U0001d6f3': '{\\mathsl{\\vartheta}}', -u'\U0001d6f4': '$\\mathsl{\\Sigma}$', -u'\U0001d6f5': '$\\mathsl{\\Tau}$', -u'\U0001d6f6': '$\\mathsl{\\Upsilon}$', -u'\U0001d6f7': '$\\mathsl{\\Phi}$', -u'\U0001d6f8': '$\\mathsl{\\Chi}$', -u'\U0001d6f9': '$\\mathsl{\\Psi}$', -u'\U0001d6fa': '$\\mathsl{\\Omega}$', -u'\U0001d6fb': '$\\mathsl{\\nabla}$', -u'\U0001d6fc': '$\\mathsl{\\Alpha}$', -u'\U0001d6fd': '$\\mathsl{\\Beta}$', -u'\U0001d6fe': '$\\mathsl{\\Gamma}$', -u'\U0001d6ff': '$\\mathsl{\\Delta}$', -u'\U0001d700': '$\\mathsl{\\Epsilon}$', -u'\U0001d701': '$\\mathsl{\\Zeta}$', -u'\U0001d702': '$\\mathsl{\\Eta}$', -u'\U0001d703': '$\\mathsl{\\Theta}$', -u'\U0001d704': '$\\mathsl{\\Iota}$', -u'\U0001d705': '$\\mathsl{\\Kappa}$', -u'\U0001d706': '$\\mathsl{\\Lambda}$', -u'\U0001d707': '$M$', -u'\U0001d708': '$N$', -u'\U0001d709': '$\\mathsl{\\Xi}$', -u'\U0001d70a': '$O$', -u'\U0001d70b': '$\\mathsl{\\Pi}$', -u'\U0001d70c': '$\\mathsl{\\Rho}$', -u'\U0001d70d': '$\\mathsl{\\varsigma}$', -u'\U0001d70e': '$\\mathsl{\\Sigma}$', -u'\U0001d70f': '$\\mathsl{\\Tau}$', -u'\U0001d710': '$\\mathsl{\\Upsilon}$', -u'\U0001d711': '$\\mathsl{\\Phi}$', -u'\U0001d712': '$\\mathsl{\\Chi}$', -u'\U0001d713': '$\\mathsl{\\Psi}$', -u'\U0001d714': '$\\mathsl{\\Omega}$', -u'\U0001d715': '$\\partial$', -u'\U0001d716': '$\\in$', -u'\U0001d717': '{\\mathsl{\\vartheta}}', -u'\U0001d718': '{\\mathsl{\\varkappa}}', -u'\U0001d719': '{\\mathsl{\\phi}}', -u'\U0001d71a': '{\\mathsl{\\varrho}}', -u'\U0001d71b': '{\\mathsl{\\varpi}}', -u'\U0001d71c': '$\\mathbit{\\Alpha}$', -u'\U0001d71d': '$\\mathbit{\\Beta}$', -u'\U0001d71e': '$\\mathbit{\\Gamma}$', -u'\U0001d71f': '$\\mathbit{\\Delta}$', -u'\U0001d720': '$\\mathbit{\\Epsilon}$', -u'\U0001d721': '$\\mathbit{\\Zeta}$', -u'\U0001d722': '$\\mathbit{\\Eta}$', -u'\U0001d723': '$\\mathbit{\\Theta}$', -u'\U0001d724': '$\\mathbit{\\Iota}$', -u'\U0001d725': '$\\mathbit{\\Kappa}$', -u'\U0001d726': '$\\mathbit{\\Lambda}$', -u'\U0001d727': '$M$', -u'\U0001d728': '$N$', -u'\U0001d729': '$\\mathbit{\\Xi}$', -u'\U0001d72a': '$O$', -u'\U0001d72b': '$\\mathbit{\\Pi}$', -u'\U0001d72c': '$\\mathbit{\\Rho}$', -u'\U0001d72d': '{\\mathbit{O}}', -u'\U0001d72e': '$\\mathbit{\\Sigma}$', -u'\U0001d72f': '$\\mathbit{\\Tau}$', -u'\U0001d730': '$\\mathbit{\\Upsilon}$', -u'\U0001d731': '$\\mathbit{\\Phi}$', -u'\U0001d732': '$\\mathbit{\\Chi}$', -u'\U0001d733': '$\\mathbit{\\Psi}$', -u'\U0001d734': '$\\mathbit{\\Omega}$', -u'\U0001d735': '$\\mathbit{\\nabla}$', -u'\U0001d736': '$\\mathbit{\\Alpha}$', -u'\U0001d737': '$\\mathbit{\\Beta}$', -u'\U0001d738': '$\\mathbit{\\Gamma}$', -u'\U0001d739': '$\\mathbit{\\Delta}$', -u'\U0001d73a': '$\\mathbit{\\Epsilon}$', -u'\U0001d73b': '$\\mathbit{\\Zeta}$', -u'\U0001d73c': '$\\mathbit{\\Eta}$', -u'\U0001d73d': '$\\mathbit{\\Theta}$', -u'\U0001d73e': '$\\mathbit{\\Iota}$', -u'\U0001d73f': '$\\mathbit{\\Kappa}$', -u'\U0001d740': '$\\mathbit{\\Lambda}$', -u'\U0001d741': '$M$', -u'\U0001d742': '$N$', -u'\U0001d743': '$\\mathbit{\\Xi}$', -u'\U0001d744': '$O$', -u'\U0001d745': '$\\mathbit{\\Pi}$', -u'\U0001d746': '$\\mathbit{\\Rho}$', -u'\U0001d747': '$\\mathbit{\\varsigma}$', -u'\U0001d748': '$\\mathbit{\\Sigma}$', -u'\U0001d749': '$\\mathbit{\\Tau}$', -u'\U0001d74a': '$\\mathbit{\\Upsilon}$', -u'\U0001d74b': '$\\mathbit{\\Phi}$', -u'\U0001d74c': '$\\mathbit{\\Chi}$', -u'\U0001d74d': '$\\mathbit{\\Psi}$', -u'\U0001d74e': '$\\mathbit{\\Omega}$', -u'\U0001d74f': '$\\partial$', -u'\U0001d750': '$\\in$', -u'\U0001d751': '{\\mathbit{\\vartheta}}', -u'\U0001d752': '{\\mathbit{\\varkappa}}', -u'\U0001d753': '{\\mathbit{\\phi}}', -u'\U0001d754': '{\\mathbit{\\varrho}}', -u'\U0001d755': '{\\mathbit{\\varpi}}', -u'\U0001d756': '$\\mathsfbf{\\Alpha}$', -u'\U0001d757': '$\\mathsfbf{\\Beta}$', -u'\U0001d758': '$\\mathsfbf{\\Gamma}$', -u'\U0001d759': '$\\mathsfbf{\\Delta}$', -u'\U0001d75a': '$\\mathsfbf{\\Epsilon}$', -u'\U0001d75b': '$\\mathsfbf{\\Zeta}$', -u'\U0001d75c': '$\\mathsfbf{\\Eta}$', -u'\U0001d75d': '$\\mathsfbf{\\Theta}$', -u'\U0001d75e': '$\\mathsfbf{\\Iota}$', -u'\U0001d75f': '$\\mathsfbf{\\Kappa}$', -u'\U0001d760': '$\\mathsfbf{\\Lambda}$', -u'\U0001d761': '$M$', -u'\U0001d762': '$N$', -u'\U0001d763': '$\\mathsfbf{\\Xi}$', -u'\U0001d764': '$O$', -u'\U0001d765': '$\\mathsfbf{\\Pi}$', -u'\U0001d766': '$\\mathsfbf{\\Rho}$', -u'\U0001d767': '{\\mathsfbf{\\vartheta}}', -u'\U0001d768': '$\\mathsfbf{\\Sigma}$', -u'\U0001d769': '$\\mathsfbf{\\Tau}$', -u'\U0001d76a': '$\\mathsfbf{\\Upsilon}$', -u'\U0001d76b': '$\\mathsfbf{\\Phi}$', -u'\U0001d76c': '$\\mathsfbf{\\Chi}$', -u'\U0001d76d': '$\\mathsfbf{\\Psi}$', -u'\U0001d76e': '$\\mathsfbf{\\Omega}$', -u'\U0001d76f': '$\\mathsfbf{\\nabla}$', -u'\U0001d770': '$\\mathsfbf{\\Alpha}$', -u'\U0001d771': '$\\mathsfbf{\\Beta}$', -u'\U0001d772': '$\\mathsfbf{\\Gamma}$', -u'\U0001d773': '$\\mathsfbf{\\Delta}$', -u'\U0001d774': '$\\mathsfbf{\\Epsilon}$', -u'\U0001d775': '$\\mathsfbf{\\Zeta}$', -u'\U0001d776': '$\\mathsfbf{\\Eta}$', -u'\U0001d777': '$\\mathsfbf{\\Theta}$', -u'\U0001d778': '$\\mathsfbf{\\Iota}$', -u'\U0001d779': '$\\mathsfbf{\\Kappa}$', -u'\U0001d77a': '$\\mathsfbf{\\Lambda}$', -u'\U0001d77b': '$M$', -u'\U0001d77c': '$N$', -u'\U0001d77d': '$\\mathsfbf{\\Xi}$', -u'\U0001d77e': '$O$', -u'\U0001d77f': '$\\mathsfbf{\\Pi}$', -u'\U0001d780': '$\\mathsfbf{\\Rho}$', -u'\U0001d781': '$\\mathsfbf{\\varsigma}$', -u'\U0001d782': '$\\mathsfbf{\\Sigma}$', -u'\U0001d783': '$\\mathsfbf{\\Tau}$', -u'\U0001d784': '$\\mathsfbf{\\Upsilon}$', -u'\U0001d785': '$\\mathsfbf{\\Phi}$', -u'\U0001d786': '$\\mathsfbf{\\Chi}$', -u'\U0001d787': '$\\mathsfbf{\\Psi}$', -u'\U0001d788': '$\\mathsfbf{\\Omega}$', -u'\U0001d789': '$\\partial$', -u'\U0001d78a': '$\\in$', -u'\U0001d78b': '{\\mathsfbf{\\vartheta}}', -u'\U0001d78c': '{\\mathsfbf{\\varkappa}}', -u'\U0001d78d': '{\\mathsfbf{\\phi}}', -u'\U0001d78e': '{\\mathsfbf{\\varrho}}', -u'\U0001d78f': '{\\mathsfbf{\\varpi}}', -u'\U0001d790': '$\\mathsfbfsl{\\Alpha}$', -u'\U0001d791': '$\\mathsfbfsl{\\Beta}$', -u'\U0001d792': '$\\mathsfbfsl{\\Gamma}$', -u'\U0001d793': '$\\mathsfbfsl{\\Delta}$', -u'\U0001d794': '$\\mathsfbfsl{\\Epsilon}$', -u'\U0001d795': '$\\mathsfbfsl{\\Zeta}$', -u'\U0001d796': '$\\mathsfbfsl{\\Eta}$', -u'\U0001d797': '$\\mathsfbfsl{\\vartheta}$', -u'\U0001d798': '$\\mathsfbfsl{\\Iota}$', -u'\U0001d799': '$\\mathsfbfsl{\\Kappa}$', -u'\U0001d79a': '$\\mathsfbfsl{\\Lambda}$', -u'\U0001d79b': '$M$', -u'\U0001d79c': '$N$', -u'\U0001d79d': '$\\mathsfbfsl{\\Xi}$', -u'\U0001d79e': '$O$', -u'\U0001d79f': '$\\mathsfbfsl{\\Pi}$', -u'\U0001d7a0': '$\\mathsfbfsl{\\Rho}$', -u'\U0001d7a1': '{\\mathsfbfsl{\\vartheta}}', -u'\U0001d7a2': '$\\mathsfbfsl{\\Sigma}$', -u'\U0001d7a3': '$\\mathsfbfsl{\\Tau}$', -u'\U0001d7a4': '$\\mathsfbfsl{\\Upsilon}$', -u'\U0001d7a5': '$\\mathsfbfsl{\\Phi}$', -u'\U0001d7a6': '$\\mathsfbfsl{\\Chi}$', -u'\U0001d7a7': '$\\mathsfbfsl{\\Psi}$', -u'\U0001d7a8': '$\\mathsfbfsl{\\Omega}$', -u'\U0001d7a9': '$\\mathsfbfsl{\\nabla}$', -u'\U0001d7aa': '$\\mathsfbfsl{\\Alpha}$', -u'\U0001d7ab': '$\\mathsfbfsl{\\Beta}$', -u'\U0001d7ac': '$\\mathsfbfsl{\\Gamma}$', -u'\U0001d7ad': '$\\mathsfbfsl{\\Delta}$', -u'\U0001d7ae': '$\\mathsfbfsl{\\Epsilon}$', -u'\U0001d7af': '$\\mathsfbfsl{\\Zeta}$', -u'\U0001d7b0': '$\\mathsfbfsl{\\Eta}$', -u'\U0001d7b1': '$\\mathsfbfsl{\\vartheta}$', -u'\U0001d7b2': '$\\mathsfbfsl{\\Iota}$', -u'\U0001d7b3': '$\\mathsfbfsl{\\Kappa}$', -u'\U0001d7b4': '$\\mathsfbfsl{\\Lambda}$', -u'\U0001d7b5': '$M$', -u'\U0001d7b6': '$N$', -u'\U0001d7b7': '$\\mathsfbfsl{\\Xi}$', -u'\U0001d7b8': '$O$', -u'\U0001d7b9': '$\\mathsfbfsl{\\Pi}$', -u'\U0001d7ba': '$\\mathsfbfsl{\\Rho}$', -u'\U0001d7bb': '$\\mathsfbfsl{\\varsigma}$', -u'\U0001d7bc': '$\\mathsfbfsl{\\Sigma}$', -u'\U0001d7bd': '$\\mathsfbfsl{\\Tau}$', -u'\U0001d7be': '$\\mathsfbfsl{\\Upsilon}$', -u'\U0001d7bf': '$\\mathsfbfsl{\\Phi}$', -u'\U0001d7c0': '$\\mathsfbfsl{\\Chi}$', -u'\U0001d7c1': '$\\mathsfbfsl{\\Psi}$', -u'\U0001d7c2': '$\\mathsfbfsl{\\Omega}$', -u'\U0001d7c3': '$\\partial$', -u'\U0001d7c4': '$\\in$', -u'\U0001d7c5': '{\\mathsfbfsl{\\vartheta}}', -u'\U0001d7c6': '{\\mathsfbfsl{\\varkappa}}', -u'\U0001d7c7': '{\\mathsfbfsl{\\phi}}', -u'\U0001d7c8': '{\\mathsfbfsl{\\varrho}}', -u'\U0001d7c9': '{\\mathsfbfsl{\\varpi}}', -u'\U0001d7ce': '$\\mathbf{0}$', -u'\U0001d7cf': '$\\mathbf{1}$', -u'\U0001d7d0': '$\\mathbf{2}$', -u'\U0001d7d1': '$\\mathbf{3}$', -u'\U0001d7d2': '$\\mathbf{4}$', -u'\U0001d7d3': '$\\mathbf{5}$', -u'\U0001d7d4': '$\\mathbf{6}$', -u'\U0001d7d5': '$\\mathbf{7}$', -u'\U0001d7d6': '$\\mathbf{8}$', -u'\U0001d7d7': '$\\mathbf{9}$', -u'\U0001d7d8': '$\\mathbb{0}$', -u'\U0001d7d9': '$\\mathbb{1}$', -u'\U0001d7da': '$\\mathbb{2}$', -u'\U0001d7db': '$\\mathbb{3}$', -u'\U0001d7dc': '$\\mathbb{4}$', -u'\U0001d7dd': '$\\mathbb{5}$', -u'\U0001d7de': '$\\mathbb{6}$', -u'\U0001d7df': '$\\mathbb{7}$', -u'\U0001d7e0': '$\\mathbb{8}$', -u'\U0001d7e1': '$\\mathbb{9}$', -u'\U0001d7e2': '$\\mathsf{0}$', -u'\U0001d7e3': '$\\mathsf{1}$', -u'\U0001d7e4': '$\\mathsf{2}$', -u'\U0001d7e5': '$\\mathsf{3}$', -u'\U0001d7e6': '$\\mathsf{4}$', -u'\U0001d7e7': '$\\mathsf{5}$', -u'\U0001d7e8': '$\\mathsf{6}$', -u'\U0001d7e9': '$\\mathsf{7}$', -u'\U0001d7ea': '$\\mathsf{8}$', -u'\U0001d7eb': '$\\mathsf{9}$', -u'\U0001d7ec': '$\\mathsfbf{0}$', -u'\U0001d7ed': '$\\mathsfbf{1}$', -u'\U0001d7ee': '$\\mathsfbf{2}$', -u'\U0001d7ef': '$\\mathsfbf{3}$', -u'\U0001d7f0': '$\\mathsfbf{4}$', -u'\U0001d7f1': '$\\mathsfbf{5}$', -u'\U0001d7f2': '$\\mathsfbf{6}$', -u'\U0001d7f3': '$\\mathsfbf{7}$', -u'\U0001d7f4': '$\\mathsfbf{8}$', -u'\U0001d7f5': '$\\mathsfbf{9}$', -u'\U0001d7f6': '$\\mathtt{0}$', -u'\U0001d7f7': '$\\mathtt{1}$', -u'\U0001d7f8': '$\\mathtt{2}$', -u'\U0001d7f9': '$\\mathtt{3}$', -u'\U0001d7fa': '$\\mathtt{4}$', -u'\U0001d7fb': '$\\mathtt{5}$', -u'\U0001d7fc': '$\\mathtt{6}$', -u'\U0001d7fd': '$\\mathtt{7}$', -u'\U0001d7fe': '$\\mathtt{8}$', -u'\U0001d7ff': '$\\mathtt{9}$'} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/null.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/null.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# $Id: null.py 4564 2006-05-21 20:44:42Z wiemann $ -# Author: David Goodger -# Copyright: This module has been placed in the public domain. - -""" -A do-nothing Writer. -""" - -from docutils import writers - - -class Writer(writers.UnfilteredWriter): - - supported = ('null',) - """Formats this writer supports.""" - - config_section = 'null writer' - config_section_dependencies = ('writers',) - - def translate(self): - pass diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/__init__.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,105 +0,0 @@ -# $Id: __init__.py 4564 2006-05-21 20:44:42Z wiemann $ -# Author: David Goodger -# Copyright: This module has been placed in the public domain. - -""" -PEP HTML Writer. -""" - -__docformat__ = 'reStructuredText' - - -import sys -import os -import os.path -import codecs -import docutils -from docutils import frontend, nodes, utils, writers -from docutils.writers import html4css1 - - -class Writer(html4css1.Writer): - - default_stylesheet = 'pep.css' - - default_stylesheet_path = utils.relative_path( - os.path.join(os.getcwd(), 'dummy'), - os.path.join(os.path.dirname(__file__), default_stylesheet)) - - default_template = 'template.txt' - - default_template_path = utils.relative_path( - os.path.join(os.getcwd(), 'dummy'), - os.path.join(os.path.dirname(__file__), default_template)) - - settings_spec = html4css1.Writer.settings_spec + ( - 'PEP/HTML-Specific Options', - 'For the PEP/HTML writer, the default value for the --stylesheet-path ' - 'option is "%s", and the default value for --template is "%s". ' - 'See HTML-Specific Options above.' - % (default_stylesheet_path, default_template_path), - (('Python\'s home URL. Default is "http://www.python.org".', - ['--python-home'], - {'default': 'http://www.python.org', 'metavar': ''}), - ('Home URL prefix for PEPs. Default is "." (current directory).', - ['--pep-home'], - {'default': '.', 'metavar': ''}), - # For testing. - (frontend.SUPPRESS_HELP, - ['--no-random'], - {'action': 'store_true', 'validator': frontend.validate_boolean}),)) - - settings_default_overrides = {'stylesheet_path': default_stylesheet_path, - 'template': default_template_path,} - - relative_path_settings = (html4css1.Writer.relative_path_settings - + ('template',)) - - config_section = 'pep_html writer' - config_section_dependencies = ('writers', 'html4css1 writer') - - def __init__(self): - html4css1.Writer.__init__(self) - self.translator_class = HTMLTranslator - - def interpolation_dict(self): - subs = html4css1.Writer.interpolation_dict(self) - settings = self.document.settings - pyhome = settings.python_home - subs['pyhome'] = pyhome - subs['pephome'] = settings.pep_home - if pyhome == '..': - subs['pepindex'] = '.' - else: - subs['pepindex'] = pyhome + '/dev/peps' - index = self.document.first_child_matching_class(nodes.field_list) - header = self.document[index] - self.pepnum = header[0][1].astext() - subs['pep'] = self.pepnum - if settings.no_random: - subs['banner'] = 0 - else: - import random - subs['banner'] = random.randrange(64) - try: - subs['pepnum'] = '%04i' % int(self.pepnum) - except ValueError: - subs['pepnum'] = pepnum - self.title = header[1][1].astext() - subs['title'] = self.title - subs['body'] = ''.join( - self.body_pre_docinfo + self.docinfo + self.body) - return subs - - def assemble_parts(self): - html4css1.Writer.assemble_parts(self) - self.parts['title'] = [self.title] - self.parts['pepnum'] = self.pepnum - - -class HTMLTranslator(html4css1.HTMLTranslator): - - def depart_field_list(self, node): - html4css1.HTMLTranslator.depart_field_list(self, node) - if 'rfc2822' in node['classes']: - self.body.append('
    \n') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/pep.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/pep.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,344 +0,0 @@ -/* -:Author: David Goodger -:Contact: goodger@python.org -:date: $Date: 2006-05-21 22:44:42 +0200 (Sun, 21 May 2006) $ -:version: $Revision: 4564 $ -:copyright: This stylesheet has been placed in the public domain. - -Default cascading style sheet for the PEP HTML output of Docutils. -*/ - -/* "! important" is used here to override other ``margin-top`` and - ``margin-bottom`` styles that are later in the stylesheet or - more specific. See http://www.w3.org/TR/CSS1#the-cascade */ -.first { - margin-top: 0 ! important } - -.last, .with-subtitle { - margin-bottom: 0 ! important } - -.hidden { - display: none } - -.navigation { - width: 100% ; - background: #99ccff ; - margin-top: 0px ; - margin-bottom: 0px } - -.navigation .navicon { - width: 150px ; - height: 35px } - -.navigation .textlinks { - padding-left: 1em ; - text-align: left } - -.navigation td, .navigation th { - padding-left: 0em ; - padding-right: 0em ; - vertical-align: middle } - -.rfc2822 { - margin-top: 0.5em ; - margin-left: 0.5em ; - margin-right: 0.5em ; - margin-bottom: 0em } - -.rfc2822 td { - text-align: left } - -.rfc2822 th.field-name { - text-align: right ; - font-family: sans-serif ; - padding-right: 0.5em ; - font-weight: bold ; - margin-bottom: 0em } - -a.toc-backref { - text-decoration: none ; - color: black } - -blockquote.epigraph { - margin: 2em 5em ; } - -body { - margin: 0px ; - margin-bottom: 1em ; - padding: 0px } - -dl.docutils dd { - margin-bottom: 0.5em } - -div.section { - margin-left: 1em ; - margin-right: 1em ; - margin-bottom: 1.5em } - -div.section div.section { - margin-left: 0em ; - margin-right: 0em ; - margin-top: 1.5em } - -div.abstract { - margin: 2em 5em } - -div.abstract p.topic-title { - font-weight: bold ; - text-align: center } - -div.admonition, div.attention, div.caution, div.danger, div.error, -div.hint, div.important, div.note, div.tip, div.warning { - margin: 2em ; - border: medium outset ; - padding: 1em } - -div.admonition p.admonition-title, div.hint p.admonition-title, -div.important p.admonition-title, div.note p.admonition-title, -div.tip p.admonition-title { - font-weight: bold ; - font-family: sans-serif } - -div.attention p.admonition-title, div.caution p.admonition-title, -div.danger p.admonition-title, div.error p.admonition-title, -div.warning p.admonition-title { - color: red ; - font-weight: bold ; - font-family: sans-serif } - -/* Uncomment (and remove this text!) to get reduced vertical space in - compound paragraphs. -div.compound .compound-first, div.compound .compound-middle { - margin-bottom: 0.5em } - -div.compound .compound-last, div.compound .compound-middle { - margin-top: 0.5em } -*/ - -div.dedication { - margin: 2em 5em ; - text-align: center ; - font-style: italic } - -div.dedication p.topic-title { - font-weight: bold ; - font-style: normal } - -div.figure { - margin-left: 2em ; - margin-right: 2em } - -div.footer, div.header { - clear: both; - font-size: smaller } - -div.footer { - margin-left: 1em ; - margin-right: 1em } - -div.line-block { - display: block ; - margin-top: 1em ; - margin-bottom: 1em } - -div.line-block div.line-block { - margin-top: 0 ; - margin-bottom: 0 ; - margin-left: 1.5em } - -div.sidebar { - margin-left: 1em ; - border: medium outset ; - padding: 1em ; - background-color: #ffffee ; - width: 40% ; - float: right ; - clear: right } - -div.sidebar p.rubric { - font-family: sans-serif ; - font-size: medium } - -div.system-messages { - margin: 5em } - -div.system-messages h1 { - color: red } - -div.system-message { - border: medium outset ; - padding: 1em } - -div.system-message p.system-message-title { - color: red ; - font-weight: bold } - -div.topic { - margin: 2em } - -h1.section-subtitle, h2.section-subtitle, h3.section-subtitle, -h4.section-subtitle, h5.section-subtitle, h6.section-subtitle { - margin-top: 0.4em } - -h1 { - font-family: sans-serif ; - font-size: large } - -h2 { - font-family: sans-serif ; - font-size: medium } - -h3 { - font-family: sans-serif ; - font-size: small } - -h4 { - font-family: sans-serif ; - font-style: italic ; - font-size: small } - -h5 { - font-family: sans-serif; - font-size: x-small } - -h6 { - font-family: sans-serif; - font-style: italic ; - font-size: x-small } - -hr.docutils { - width: 75% } - -img.align-left { - clear: left } - -img.align-right { - clear: right } - -img.borderless { - border: 0 } - -ol.simple, ul.simple { - margin-bottom: 1em } - -ol.arabic { - list-style: decimal } - -ol.loweralpha { - list-style: lower-alpha } - -ol.upperalpha { - list-style: upper-alpha } - -ol.lowerroman { - list-style: lower-roman } - -ol.upperroman { - list-style: upper-roman } - -p.attribution { - text-align: right ; - margin-left: 50% } - -p.caption { - font-style: italic } - -p.credits { - font-style: italic ; - font-size: smaller } - -p.label { - white-space: nowrap } - -p.rubric { - font-weight: bold ; - font-size: larger ; - color: maroon ; - text-align: center } - -p.sidebar-title { - font-family: sans-serif ; - font-weight: bold ; - font-size: larger } - -p.sidebar-subtitle { - font-family: sans-serif ; - font-weight: bold } - -p.topic-title { - font-family: sans-serif ; - font-weight: bold } - -pre.address { - margin-bottom: 0 ; - margin-top: 0 ; - font-family: serif ; - font-size: 100% } - -pre.literal-block, pre.doctest-block { - margin-left: 2em ; - margin-right: 2em } - -span.classifier { - font-family: sans-serif ; - font-style: oblique } - -span.classifier-delimiter { - font-family: sans-serif ; - font-weight: bold } - -span.interpreted { - font-family: sans-serif } - -span.option { - white-space: nowrap } - -span.option-argument { - font-style: italic } - -span.pre { - white-space: pre } - -span.problematic { - color: red } - -span.section-subtitle { - /* font-size relative to parent (h1..h6 element) */ - font-size: 80% } - -table.citation { - border-left: solid 1px gray; - margin-left: 1px } - -table.docinfo { - margin: 2em 4em } - -table.docutils { - margin-top: 0.5em ; - margin-bottom: 0.5em } - -table.footnote { - border-left: solid 1px black; - margin-left: 1px } - -table.docutils td, table.docutils th, -table.docinfo td, table.docinfo th { - padding-left: 0.5em ; - padding-right: 0.5em ; - vertical-align: top } - -td.num { - text-align: right } - -th.field-name { - font-weight: bold ; - text-align: left ; - white-space: nowrap ; - padding-left: 0 } - -h1 tt.docutils, h2 tt.docutils, h3 tt.docutils, -h4 tt.docutils, h5 tt.docutils, h6 tt.docutils { - font-size: 100% } - -ul.auto-toc { - list-style-type: none } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/template.txt --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pep_html/template.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ - - - - - - - - PEP %(pep)s -- %(title)s - %(stylesheet)s - - - - - -
    -%(body)s -%(body_suffix)s diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pseudoxml.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/pseudoxml.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -# $Id: pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $ -# Author: David Goodger -# Copyright: This module has been placed in the public domain. - -""" -Simple internal document tree Writer, writes indented pseudo-XML. -""" - -__docformat__ = 'reStructuredText' - - -from docutils import writers - - -class Writer(writers.Writer): - - supported = ('pprint', 'pformat', 'pseudoxml') - """Formats this writer supports.""" - - config_section = 'pseudoxml writer' - config_section_dependencies = ('writers',) - - output = None - """Final translated form of `document`.""" - - def translate(self): - self.output = self.document.pformat() - - def supports(self, format): - """This writer supports all format-specific elements.""" - return 1 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/__init__.py --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/__init__.py Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,337 +0,0 @@ -# $Id: __init__.py 4883 2007-01-16 01:51:28Z wiemann $ -# Authors: Chris Liechti ; -# David Goodger -# Copyright: This module has been placed in the public domain. - -""" -S5/HTML Slideshow Writer. -""" - -__docformat__ = 'reStructuredText' - - -import sys -import os -import re -import docutils -from docutils import frontend, nodes, utils -from docutils.writers import html4css1 -from docutils.parsers.rst import directives - -themes_dir_path = utils.relative_path( - os.path.join(os.getcwd(), 'dummy'), - os.path.join(os.path.dirname(__file__), 'themes')) - -def find_theme(name): - # Where else to look for a theme? - # Check working dir? Destination dir? Config dir? Plugins dir? - path = os.path.join(themes_dir_path, name) - if not os.path.isdir(path): - raise docutils.ApplicationError( - 'Theme directory not found: %r (path: %r)' % (name, path)) - return path - - -class Writer(html4css1.Writer): - - settings_spec = html4css1.Writer.settings_spec + ( - 'S5 Slideshow Specific Options', - 'For the S5/HTML writer, the --no-toc-backlinks option ' - '(defined in General Docutils Options above) is the default, ' - 'and should not be changed.', - (('Specify an installed S5 theme by name. Overrides --theme-url. ' - 'The default theme name is "default". The theme files will be ' - 'copied into a "ui/" directory, in the same directory as the ' - 'destination file (output HTML). Note that existing theme files ' - 'will not be overwritten (unless --overwrite-theme-files is used).', - ['--theme'], - {'default': 'default', 'metavar': '', - 'overrides': 'theme_url'}), - ('Specify an S5 theme URL. The destination file (output HTML) will ' - 'link to this theme; nothing will be copied. Overrides --theme.', - ['--theme-url'], - {'metavar': '', 'overrides': 'theme'}), - ('Allow existing theme files in the ``ui/`` directory to be ' - 'overwritten. The default is not to overwrite theme files.', - ['--overwrite-theme-files'], - {'action': 'store_true', 'validator': frontend.validate_boolean}), - ('Keep existing theme files in the ``ui/`` directory; do not ' - 'overwrite any. This is the default.', - ['--keep-theme-files'], - {'dest': 'overwrite_theme_files', 'action': 'store_false'}), - ('Set the initial view mode to "slideshow" [default] or "outline".', - ['--view-mode'], - {'choices': ['slideshow', 'outline'], 'default': 'slideshow', - 'metavar': ''}), - ('Normally hide the presentation controls in slideshow mode. ' - 'This is the default.', - ['--hidden-controls'], - {'action': 'store_true', 'default': True, - 'validator': frontend.validate_boolean}), - ('Always show the presentation controls in slideshow mode. ' - 'The default is to hide the controls.', - ['--visible-controls'], - {'dest': 'hidden_controls', 'action': 'store_false'}), - ('Enable the current slide indicator ("1 / 15"). ' - 'The default is to disable it.', - ['--current-slide'], - {'action': 'store_true', 'validator': frontend.validate_boolean}), - ('Disable the current slide indicator. This is the default.', - ['--no-current-slide'], - {'dest': 'current_slide', 'action': 'store_false'}),)) - - settings_default_overrides = {'toc_backlinks': 0} - - config_section = 's5_html writer' - config_section_dependencies = ('writers', 'html4css1 writer') - - def __init__(self): - html4css1.Writer.__init__(self) - self.translator_class = S5HTMLTranslator - - -class S5HTMLTranslator(html4css1.HTMLTranslator): - - s5_stylesheet_template = """\ - - - - - - - - -\n""" - # The script element must go in front of the link elements to - # avoid a flash of unstyled content (FOUC), reproducible with - # Firefox. - - disable_current_slide = """ -\n""" - - layout_template = """\ -
    -
    -
    - - -
    \n""" -#
    -#
    -#
    -#
    - - default_theme = 'default' - """Name of the default theme.""" - - base_theme_file = '__base__' - """Name of the file containing the name of the base theme.""" - - direct_theme_files = ( - 'slides.css', 'outline.css', 'print.css', 'opera.css', 'slides.js') - """Names of theme files directly linked to in the output HTML""" - - indirect_theme_files = ( - 's5-core.css', 'framing.css', 'pretty.css', 'blank.gif', 'iepngfix.htc') - """Names of files used indirectly; imported or used by files in - `direct_theme_files`.""" - - required_theme_files = indirect_theme_files + direct_theme_files - """Names of mandatory theme files.""" - - def __init__(self, *args): - html4css1.HTMLTranslator.__init__(self, *args) - #insert S5-specific stylesheet and script stuff: - self.theme_file_path = None - self.setup_theme() - view_mode = self.document.settings.view_mode - control_visibility = ('visible', 'hidden')[self.document.settings - .hidden_controls] - self.stylesheet.append(self.s5_stylesheet_template - % {'path': self.theme_file_path, - 'view_mode': view_mode, - 'control_visibility': control_visibility}) - if not self.document.settings.current_slide: - self.stylesheet.append(self.disable_current_slide) - self.add_meta('\n') - self.s5_footer = [] - self.s5_header = [] - self.section_count = 0 - self.theme_files_copied = None - - def setup_theme(self): - if self.document.settings.theme: - self.copy_theme() - elif self.document.settings.theme_url: - self.theme_file_path = self.document.settings.theme_url - else: - raise docutils.ApplicationError( - 'No theme specified for S5/HTML writer.') - - def copy_theme(self): - """ - Locate & copy theme files. - - A theme may be explicitly based on another theme via a '__base__' - file. The default base theme is 'default'. Files are accumulated - from the specified theme, any base themes, and 'default'. - """ - settings = self.document.settings - path = find_theme(settings.theme) - theme_paths = [path] - self.theme_files_copied = {} - required_files_copied = {} - # This is a link (URL) in HTML, so we use "/", not os.sep: - self.theme_file_path = '%s/%s' % ('ui', settings.theme) - if settings._destination: - dest = os.path.join( - os.path.dirname(settings._destination), 'ui', settings.theme) - if not os.path.isdir(dest): - os.makedirs(dest) - else: - # no destination, so we can't copy the theme - return - default = 0 - while path: - for f in os.listdir(path): # copy all files from each theme - if f == self.base_theme_file: - continue # ... except the "__base__" file - if ( self.copy_file(f, path, dest) - and f in self.required_theme_files): - required_files_copied[f] = 1 - if default: - break # "default" theme has no base theme - # Find the "__base__" file in theme directory: - base_theme_file = os.path.join(path, self.base_theme_file) - # If it exists, read it and record the theme path: - if os.path.isfile(base_theme_file): - lines = open(base_theme_file).readlines() - for line in lines: - line = line.strip() - if line and not line.startswith('#'): - path = find_theme(line) - if path in theme_paths: # check for duplicates (cycles) - path = None # if found, use default base - else: - theme_paths.append(path) - break - else: # no theme name found - path = None # use default base - else: # no base theme file found - path = None # use default base - if not path: - path = find_theme(self.default_theme) - theme_paths.append(path) - default = 1 - if len(required_files_copied) != len(self.required_theme_files): - # Some required files weren't found & couldn't be copied. - required = list(self.required_theme_files) - for f in required_files_copied.keys(): - required.remove(f) - raise docutils.ApplicationError( - 'Theme files not found: %s' - % ', '.join(['%r' % f for f in required])) - - files_to_skip_pattern = re.compile(r'~$|\.bak$|#$|\.cvsignore$') - - def copy_file(self, name, source_dir, dest_dir): - """ - Copy file `name` from `source_dir` to `dest_dir`. - Return 1 if the file exists in either `source_dir` or `dest_dir`. - """ - source = os.path.join(source_dir, name) - dest = os.path.join(dest_dir, name) - if self.theme_files_copied.has_key(dest): - return 1 - else: - self.theme_files_copied[dest] = 1 - if os.path.isfile(source): - if self.files_to_skip_pattern.search(source): - return None - settings = self.document.settings - if os.path.exists(dest) and not settings.overwrite_theme_files: - settings.record_dependencies.add(dest) - else: - src_file = open(source, 'rb') - src_data = src_file.read() - src_file.close() - dest_file = open(dest, 'wb') - dest_dir = dest_dir.replace(os.sep, '/') - dest_file.write(src_data.replace( - 'ui/default', dest_dir[dest_dir.rfind('ui/'):])) - dest_file.close() - settings.record_dependencies.add(source) - return 1 - if os.path.isfile(dest): - return 1 - - def depart_document(self, node): - header = ''.join(self.s5_header) - footer = ''.join(self.s5_footer) - title = ''.join(self.html_title).replace('

    ', '

    ') - layout = self.layout_template % {'header': header, - 'title': title, - 'footer': footer} - self.fragment.extend(self.body) - self.body_prefix.extend(layout) - self.body_prefix.append('
    \n') - self.body_prefix.append( - self.starttag({'classes': ['slide'], 'ids': ['slide0']}, 'div')) - if not self.section_count: - self.body.append('
    \n') - self.body_suffix.insert(0, '

    \n') - # skip content-type meta tag with interpolated charset value: - self.html_head.extend(self.head[1:]) - self.html_body.extend(self.body_prefix[1:] + self.body_pre_docinfo - + self.docinfo + self.body - + self.body_suffix[:-1]) - - def depart_footer(self, node): - start = self.context.pop() - self.s5_footer.append('

    ') - self.s5_footer.extend(self.body[start:]) - self.s5_footer.append('

    ') - del self.body[start:] - - def depart_header(self, node): - start = self.context.pop() - header = ['\n') - del self.body[start:] - self.s5_header.extend(header) - - def visit_section(self, node): - if not self.section_count: - self.body.append('\n\n') - self.section_count += 1 - self.section_level += 1 - if self.section_level > 1: - # dummy for matching div's - self.body.append(self.starttag(node, 'div', CLASS='section')) - else: - self.body.append(self.starttag(node, 'div', CLASS='slide')) - - def visit_subtitle(self, node): - if isinstance(node.parent, nodes.section): - level = self.section_level + self.initial_header_level - 1 - if level == 1: - level = 2 - tag = 'h%s' % level - self.body.append(self.starttag(node, tag, '')) - self.context.append('\n' % tag) - else: - html4css1.HTMLTranslator.visit_subtitle(self, node) - - def visit_title(self, node): - html4css1.HTMLTranslator.visit_title(self, node) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/README.txt --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/README.txt Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -Except where otherwise noted (default/iepngfix.htc), all files in this -directory have been released into the Public Domain. - -These files are based on files from S5 1.1, released into the Public -Domain by Eric Meyer. For further details, please see -http://www.meyerweb.com/eric/tools/s5/credits.html. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/__base__ --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/__base__ Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -# base theme of this theme: -big-white diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/framing.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/framing.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -/* The following styles size, place, and layer the slide components. - Edit these if you want to change the overall slide layout. - The commented lines can be uncommented (and modified, if necessary) - to help you with the rearrangement process. */ - -/* target = 1024x768 */ - -div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} -div#header {top: 0; z-index: 1;} -div#footer {display:none;} -.slide {top: 0; width: 92%; padding: 0.1em 4% 4%; z-index: 2;} -/* list-style: none;} */ -div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0;} -#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; - z-index: 10;} -html>body #currentSlide {position: fixed;} - -/* -div#header {background: #FCC;} -div#footer {background: #CCF;} -div#controls {background: #BBD;} -div#currentSlide {background: #FFC;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/pretty.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/pretty.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,109 +0,0 @@ -/* This file has been placed in the public domain. */ -/* Following are the presentation styles -- edit away! */ - -html, body {margin: 0; padding: 0;} -body {background: black; color: white;} -:link, :visited {text-decoration: none; color: cyan;} -#controls :active {color: #888 !important;} -#controls :focus {outline: 1px dotted #CCC;} - -blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} -blockquote p {margin: 0;} - -kbd {font-weight: bold; font-size: 1em;} -sup {font-size: smaller; line-height: 1px;} - -.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} -.slide ul ul li {list-style: square;} -.slide img.leader {display: block; margin: 0 auto;} -.slide tt {font-size: 90%;} - -.slide {font-size: 3em; font-family: sans-serif; font-weight: bold;} -.slide h1 {padding-top: 0; z-index: 1; margin: 0; font-size: 120%;} -.slide h2 {font-size: 110%;} -.slide h3 {font-size: 105%;} -h1 abbr {font-variant: small-caps;} - -div#controls {position: absolute; left: 50%; bottom: 0; - width: 50%; text-align: right; font: bold 0.9em sans-serif;} -html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0; padding: 0;} -#controls #navLinks a {padding: 0; margin: 0 0.5em; - border: none; color: #888; cursor: pointer;} -#controls #navList {height: 1em;} -#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; - background: black; color: #CCC;} - -#currentSlide {text-align: center; font-size: 0.5em; color: #AAA; - font-family: sans-serif; font-weight: bold;} - -#slide0 h1 {position: static; margin: 0 0 0.5em; padding-top: 0.3em; top: 0; - font-size: 150%; white-space: normal; background: transparent;} -#slide0 h2 {font: 110%; font-style: italic; color: gray;} -#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} -#slide0 h4 {margin-top: 0; font-size: 1em;} - -ul.urls {list-style: none; display: inline; margin: 0;} -.urls li {display: inline; margin: 0;} -.external {border-bottom: 1px dotted gray;} -html>body .external {border-bottom: none;} -.external:after {content: " \274F"; font-size: smaller; color: #FCC;} - -.incremental, .incremental *, .incremental *:after { - color: black; visibility: visible; border: 0;} -img.incremental {visibility: hidden;} -.slide .current {color: lime;} - -.slide-display {display: inline ! important;} - -.huge {font-size: 150%;} -.big {font-size: 120%;} -.small {font-size: 75%;} -.tiny {font-size: 50%;} -.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} -.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} - -.maroon {color: maroon;} -.red {color: red;} -.magenta {color: magenta;} -.fuchsia {color: fuchsia;} -.pink {color: #FAA;} -.orange {color: orange;} -.yellow {color: yellow;} -.lime {color: lime;} -.green {color: green;} -.olive {color: olive;} -.teal {color: teal;} -.cyan {color: cyan;} -.aqua {color: aqua;} -.blue {color: blue;} -.navy {color: navy;} -.purple {color: purple;} -.black {color: black;} -.gray {color: gray;} -.silver {color: silver;} -.white {color: white;} - -.left {text-align: left ! important;} -.center {text-align: center ! important;} -.right {text-align: right ! important;} - -.animation {position: relative; margin: 1em 0; padding: 0;} -.animation img {position: absolute;} - -/* Docutils-specific overrides */ - -.slide table.docinfo {margin: 0.5em 0 0.5em 1em;} - -div.sidebar {background-color: black;} - -pre.literal-block, pre.doctest-block {background-color: black;} - -tt.docutils {background-color: black;} - -/* diagnostics */ -/* -li:after {content: " [" attr(class) "]"; color: #F88;} -div:before {content: "[" attr(class) "]"; color: #F88;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/framing.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/framing.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -/* This file has been placed in the public domain. */ -/* The following styles size, place, and layer the slide components. - Edit these if you want to change the overall slide layout. - The commented lines can be uncommented (and modified, if necessary) - to help you with the rearrangement process. */ - -/* target = 1024x768 */ - -div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} -div#footer {display:none;} -.slide {top: 0; width: 92%; padding: 0.25em 4% 4%; z-index: 2;} -div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0;} -#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; - z-index: 10;} -html>body #currentSlide {position: fixed;} - -/* -div#header {background: #FCC;} -div#footer {background: #CCF;} -div#controls {background: #BBD;} -div#currentSlide {background: #FFC;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/pretty.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/pretty.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,107 +0,0 @@ -/* This file has been placed in the public domain. */ -/* Following are the presentation styles -- edit away! */ - -html, body {margin: 0; padding: 0;} -body {background: white; color: black;} -:link, :visited {text-decoration: none; color: #00C;} -#controls :active {color: #88A !important;} -#controls :focus {outline: 1px dotted #227;} - -blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} -blockquote p {margin: 0;} - -kbd {font-weight: bold; font-size: 1em;} -sup {font-size: smaller; line-height: 1px;} - -.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} -.slide ul ul li {list-style: square;} -.slide img.leader {display: block; margin: 0 auto;} -.slide tt {font-size: 90%;} - -.slide {font-size: 3em; font-family: sans-serif; font-weight: bold;} -.slide h1 {padding-top: 0; z-index: 1; margin: 0; font-size: 120%;} -.slide h2 {font-size: 110%;} -.slide h3 {font-size: 105%;} -h1 abbr {font-variant: small-caps;} - -div#controls {position: absolute; left: 50%; bottom: 0; - width: 50%; text-align: right; font: bold 0.9em sans-serif;} -html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0; padding: 0;} -#controls #navLinks a {padding: 0; margin: 0 0.5em; - border: none; color: #005; cursor: pointer;} -#controls #navList {height: 1em;} -#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; - background: #DDD; color: #227;} - -#currentSlide {text-align: center; font-size: 0.5em; color: #444; - font-family: sans-serif; font-weight: bold;} - -#slide0 h1 {position: static; margin: 0 0 0.5em; padding-top: 0.3em; top: 0; - font-size: 150%; white-space: normal; background: transparent;} -#slide0 h2 {font: 110%; font-style: italic; color: gray;} -#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} -#slide0 h4 {margin-top: 0; font-size: 1em;} - -ul.urls {list-style: none; display: inline; margin: 0;} -.urls li {display: inline; margin: 0;} -.external {border-bottom: 1px dotted gray;} -html>body .external {border-bottom: none;} -.external:after {content: " \274F"; font-size: smaller; color: #77B;} - -.incremental, .incremental *, .incremental *:after { - color: white; visibility: visible; border: 0;} -img.incremental {visibility: hidden;} -.slide .current {color: green;} - -.slide-display {display: inline ! important;} - -.huge {font-size: 150%;} -.big {font-size: 120%;} -.small {font-size: 75%;} -.tiny {font-size: 50%;} -.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} -.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} - -.maroon {color: maroon;} -.red {color: red;} -.magenta {color: magenta;} -.fuchsia {color: fuchsia;} -.pink {color: #FAA;} -.orange {color: orange;} -.yellow {color: yellow;} -.lime {color: lime;} -.green {color: green;} -.olive {color: olive;} -.teal {color: teal;} -.cyan {color: cyan;} -.aqua {color: aqua;} -.blue {color: blue;} -.navy {color: navy;} -.purple {color: purple;} -.black {color: black;} -.gray {color: gray;} -.silver {color: silver;} -.white {color: white;} - -.left {text-align: left ! important;} -.center {text-align: center ! important;} -.right {text-align: right ! important;} - -.animation {position: relative; margin: 1em 0; padding: 0;} -.animation img {position: absolute;} - -/* Docutils-specific overrides */ - -.slide table.docinfo {margin: 0.5em 0 0.5em 1em;} - -pre.literal-block, pre.doctest-block {background-color: white;} - -tt.docutils {background-color: white;} - -/* diagnostics */ -/* -li:after {content: " [" attr(class) "]"; color: #F88;} -div:before {content: "[" attr(class) "]"; color: #F88;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/blank.gif Binary file buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/blank.gif has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/framing.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/framing.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,25 +0,0 @@ -/* This file has been placed in the public domain. */ -/* The following styles size, place, and layer the slide components. - Edit these if you want to change the overall slide layout. - The commented lines can be uncommented (and modified, if necessary) - to help you with the rearrangement process. */ - -/* target = 1024x768 */ - -div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} -div#header {position: fixed; top: 0; height: 3em; z-index: 1;} -div#footer {top: auto; bottom: 0; height: 2.5em; z-index: 5;} -.slide {top: 0; width: 92%; padding: 2.5em 4% 4%; z-index: 2;} -div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0;} -#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; - z-index: 10;} -html>body #currentSlide {position: fixed;} - -/* -div#header {background: #FCC;} -div#footer {background: #CCF;} -div#controls {background: #BBD;} -div#currentSlide {background: #FFC;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/iepngfix.htc --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/iepngfix.htc Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,42 +0,0 @@ - - - - - \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/opera.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/opera.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -/* This file has been placed in the public domain. */ -/* DO NOT CHANGE THESE unless you really want to break Opera Show */ -.slide { - visibility: visible !important; - position: static !important; - page-break-before: always; -} -#slide0 {page-break-before: avoid;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/outline.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/outline.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -/* This file has been placed in the public domain. */ -/* Don't change this unless you want the layout stuff to show up in the - outline view! */ - -.layout div, #footer *, #controlForm * {display: none;} -#footer, #controls, #controlForm, #navLinks, #toggle { - display: block; visibility: visible; margin: 0; padding: 0;} -#toggle {float: right; padding: 0.5em;} -html>body #toggle {position: fixed; top: 0; right: 0;} - -/* making the outline look pretty-ish */ - -#slide0 h1, #slide0 h2, #slide0 h3, #slide0 h4 {border: none; margin: 0;} -#toggle {border: 1px solid; border-width: 0 0 1px 1px; background: #FFF;} - -.outline {display: inline ! important;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/pretty.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/pretty.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,120 +0,0 @@ -/* This file has been placed in the public domain. */ -/* Following are the presentation styles -- edit away! */ - -html, body {margin: 0; padding: 0;} -body {background: white; color: black;} -/* Replace the background style above with the style below (and again for - div#header) for a graphic: */ -/* background: white url(bodybg.gif) -16px 0 no-repeat; */ -:link, :visited {text-decoration: none; color: #00C;} -#controls :active {color: #88A !important;} -#controls :focus {outline: 1px dotted #227;} -h1, h2, h3, h4 {font-size: 100%; margin: 0; padding: 0; font-weight: inherit;} - -blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} -blockquote p {margin: 0;} - -kbd {font-weight: bold; font-size: 1em;} -sup {font-size: smaller; line-height: 1px;} - -.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} -.slide ul ul li {list-style: square;} -.slide img.leader {display: block; margin: 0 auto;} -.slide tt {font-size: 90%;} - -div#header, div#footer {background: #005; color: #AAB; font-family: sans-serif;} -/* background: #005 url(bodybg.gif) -16px 0 no-repeat; */ -div#footer {font-size: 0.5em; font-weight: bold; padding: 1em 0;} -#footer h1 {display: block; padding: 0 1em;} -#footer h2 {display: block; padding: 0.8em 1em 0;} - -.slide {font-size: 1.2em;} -.slide h1 {position: absolute; top: 0.45em; z-index: 1; - margin: 0; padding-left: 0.7em; white-space: nowrap; - font: bold 150% sans-serif; color: #DDE; background: #005;} -.slide h2 {font: bold 120%/1em sans-serif; padding-top: 0.5em;} -.slide h3 {font: bold 100% sans-serif; padding-top: 0.5em;} -h1 abbr {font-variant: small-caps;} - -div#controls {position: absolute; left: 50%; bottom: 0; - width: 50%; text-align: right; font: bold 0.9em sans-serif;} -html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} -div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; - margin: 0; padding: 0;} -#controls #navLinks a {padding: 0; margin: 0 0.5em; - background: #005; border: none; color: #779; cursor: pointer;} -#controls #navList {height: 1em;} -#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; - background: #DDD; color: #227;} - -#currentSlide {text-align: center; font-size: 0.5em; color: #449; - font-family: sans-serif; font-weight: bold;} - -#slide0 {padding-top: 1.5em} -#slide0 h1 {position: static; margin: 1em 0 0; padding: 0; color: #000; - font: bold 2em sans-serif; white-space: normal; background: transparent;} -#slide0 h2 {font: bold italic 1em sans-serif; margin: 0.25em;} -#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} -#slide0 h4 {margin-top: 0; font-size: 1em;} - -ul.urls {list-style: none; display: inline; margin: 0;} -.urls li {display: inline; margin: 0;} -.external {border-bottom: 1px dotted gray;} -html>body .external {border-bottom: none;} -.external:after {content: " \274F"; font-size: smaller; color: #77B;} - -.incremental, .incremental *, .incremental *:after {visibility: visible; - color: white; border: 0;} -img.incremental {visibility: hidden;} -.slide .current {color: green;} - -.slide-display {display: inline ! important;} - -.huge {font-family: sans-serif; font-weight: bold; font-size: 150%;} -.big {font-family: sans-serif; font-weight: bold; font-size: 120%;} -.small {font-size: 75%;} -.tiny {font-size: 50%;} -.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} -.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} - -.maroon {color: maroon;} -.red {color: red;} -.magenta {color: magenta;} -.fuchsia {color: fuchsia;} -.pink {color: #FAA;} -.orange {color: orange;} -.yellow {color: yellow;} -.lime {color: lime;} -.green {color: green;} -.olive {color: olive;} -.teal {color: teal;} -.cyan {color: cyan;} -.aqua {color: aqua;} -.blue {color: blue;} -.navy {color: navy;} -.purple {color: purple;} -.black {color: black;} -.gray {color: gray;} -.silver {color: silver;} -.white {color: white;} - -.left {text-align: left ! important;} -.center {text-align: center ! important;} -.right {text-align: right ! important;} - -.animation {position: relative; margin: 1em 0; padding: 0;} -.animation img {position: absolute;} - -/* Docutils-specific overrides */ - -.slide table.docinfo {margin: 1em 0 0.5em 2em;} - -pre.literal-block, pre.doctest-block {background-color: white;} - -tt.docutils {background-color: white;} - -/* diagnostics */ -/* -li:after {content: " [" attr(class) "]"; color: #F88;} -div:before {content: "[" attr(class) "]"; color: #F88;} -*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/print.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/print.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -/* This file has been placed in the public domain. */ -/* The following rule is necessary to have all slides appear in print! - DO NOT REMOVE IT! */ -.slide, ul {page-break-inside: avoid; visibility: visible !important;} -h1 {page-break-after: avoid;} - -body {font-size: 12pt; background: white;} -* {color: black;} - -#slide0 h1 {font-size: 200%; border: none; margin: 0.5em 0 0.25em;} -#slide0 h3 {margin: 0; padding: 0;} -#slide0 h4 {margin: 0 0 0.5em; padding: 0;} -#slide0 {margin-bottom: 3em;} - -#header {display: none;} -#footer h1 {margin: 0; border-bottom: 1px solid; color: gray; - font-style: italic;} -#footer h2, #controls {display: none;} - -.print {display: inline ! important;} - -/* The following rule keeps the layout stuff out of print. - Remove at your own risk! */ -.layout, .layout * {display: none !important;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/s5-core.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/s5-core.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -/* This file has been placed in the public domain. */ -/* Do not edit or override these styles! - The system will likely break if you do. */ - -div#header, div#footer, div#controls, .slide {position: absolute;} -html>body div#header, html>body div#footer, - html>body div#controls, html>body .slide {position: fixed;} -.handout {display: none;} -.layout {display: block;} -.slide, .hideme, .incremental {visibility: hidden;} -#slide0 {visibility: visible;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.css --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.css Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -/* This file has been placed in the public domain. */ - -/* required to make the slide show run at all */ -@import url(s5-core.css); - -/* sets basic placement and size of slide components */ -@import url(framing.css); - -/* styles that make the slides look good */ -@import url(pretty.css); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.js --- a/buildframework/helium/external/python/lib/2.5/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.js Wed Oct 28 14:39:48 2009 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,558 +0,0 @@ -// S5 v1.1 slides.js -- released into the Public Domain -// Modified for Docutils (http://docutils.sf.net) by David Goodger -// -// Please see http://www.meyerweb.com/eric/tools/s5/credits.html for -// information about all the wonderful and talented contributors to this code! - -var undef; -var slideCSS = ''; -var snum = 0; -var smax = 1; -var slideIDs = new Array(); -var incpos = 0; -var number = undef; -var s5mode = true; -var defaultView = 'slideshow'; -var controlVis = 'visible'; - -var isIE = navigator.appName == 'Microsoft Internet Explorer' ? 1 : 0; -var isOp = navigator.userAgent.indexOf('Opera') > -1 ? 1 : 0; -var isGe = navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('Safari') < 1 ? 1 : 0; - -function hasClass(object, className) { - if (!object.className) return false; - return (object.className.search('(^|\\s)' + className + '(\\s|$)') != -1); -} - -function hasValue(object, value) { - if (!object) return false; - return (object.search('(^|\\s)' + value + '(\\s|$)') != -1); -} - -function removeClass(object,className) { - if (!object) return; - object.className = object.className.replace(new RegExp('(^|\\s)'+className+'(\\s|$)'), RegExp.$1+RegExp.$2); -} - -function addClass(object,className) { - if (!object || hasClass(object, className)) return; - if (object.className) { - object.className += ' '+className; - } else { - object.className = className; - } -} - -function GetElementsWithClassName(elementName,className) { - var allElements = document.getElementsByTagName(elementName); - var elemColl = new Array(); - for (var i = 0; i< allElements.length; i++) { - if (hasClass(allElements[i], className)) { - elemColl[elemColl.length] = allElements[i]; - } - } - return elemColl; -} - -function isParentOrSelf(element, id) { - if (element == null || element.nodeName=='BODY') return false; - else if (element.id == id) return true; - else return isParentOrSelf(element.parentNode, id); -} - -function nodeValue(node) { - var result = ""; - if (node.nodeType == 1) { - var children = node.childNodes; - for (var i = 0; i < children.length; ++i) { - result += nodeValue(children[i]); - } - } - else if (node.nodeType == 3) { - result = node.nodeValue; - } - return(result); -} - -function slideLabel() { - var slideColl = GetElementsWithClassName('*','slide'); - var list = document.getElementById('jumplist'); - smax = slideColl.length; - for (var n = 0; n < smax; n++) { - var obj = slideColl[n]; - - var did = 'slide' + n.toString(); - if (obj.getAttribute('id')) { - slideIDs[n] = obj.getAttribute('id'); - } - else { - obj.setAttribute('id',did); - slideIDs[n] = did; - } - if (isOp) continue; - - var otext = ''; - var menu = obj.firstChild; - if (!menu) continue; // to cope with empty slides - while (menu && menu.nodeType == 3) { - menu = menu.nextSibling; - } - if (!menu) continue; // to cope with slides with only text nodes - - var menunodes = menu.childNodes; - for (var o = 0; o < menunodes.length; o++) { - otext += nodeValue(menunodes[o]); - } - list.options[list.length] = new Option(n + ' : ' + otext, n); - } -} - -function currentSlide() { - var cs; - var footer_nodes; - var vis = 'visible'; - if (document.getElementById) { - cs = document.getElementById('currentSlide'); - footer_nodes = document.getElementById('footer').childNodes; - } else { - cs = document.currentSlide; - footer = document.footer.childNodes; - } - cs.innerHTML = '' + snum + '<\/span> ' + - '\/<\/span> ' + - '' + (smax-1) + '<\/span>'; - if (snum == 0) { - vis = 'hidden'; - } - cs.style.visibility = vis; - for (var i = 0; i < footer_nodes.length; i++) { - if (footer_nodes[i].nodeType == 1) { - footer_nodes[i].style.visibility = vis; - } - } -} - -function go(step) { - if (document.getElementById('slideProj').disabled || step == 0) return; - var jl = document.getElementById('jumplist'); - var cid = slideIDs[snum]; - var ce = document.getElementById(cid); - if (incrementals[snum].length > 0) { - for (var i = 0; i < incrementals[snum].length; i++) { - removeClass(incrementals[snum][i], 'current'); - removeClass(incrementals[snum][i], 'incremental'); - } - } - if (step != 'j') { - snum += step; - lmax = smax - 1; - if (snum > lmax) snum = lmax; - if (snum < 0) snum = 0; - } else - snum = parseInt(jl.value); - var nid = slideIDs[snum]; - var ne = document.getElementById(nid); - if (!ne) { - ne = document.getElementById(slideIDs[0]); - snum = 0; - } - if (step < 0) {incpos = incrementals[snum].length} else {incpos = 0;} - if (incrementals[snum].length > 0 && incpos == 0) { - for (var i = 0; i < incrementals[snum].length; i++) { - if (hasClass(incrementals[snum][i], 'current')) - incpos = i + 1; - else - addClass(incrementals[snum][i], 'incremental'); - } - } - if (incrementals[snum].length > 0 && incpos > 0) - addClass(incrementals[snum][incpos - 1], 'current'); - ce.style.visibility = 'hidden'; - ne.style.visibility = 'visible'; - jl.selectedIndex = snum; - currentSlide(); - number = 0; -} - -function goTo(target) { - if (target >= smax || target == snum) return; - go(target - snum); -} - -function subgo(step) { - if (step > 0) { - removeClass(incrementals[snum][incpos - 1],'current'); - removeClass(incrementals[snum][incpos], 'incremental'); - addClass(incrementals[snum][incpos],'current'); - incpos++; - } else { - incpos--; - removeClass(incrementals[snum][incpos],'current'); - addClass(incrementals[snum][incpos], 'incremental'); - addClass(incrementals[snum][incpos - 1],'current'); - } -} - -function toggle() { - var slideColl = GetElementsWithClassName('*','slide'); - var slides = document.getElementById('slideProj'); - var outline = document.getElementById('outlineStyle'); - if (!slides.disabled) { - slides.disabled = true; - outline.disabled = false; - s5mode = false; - fontSize('1em'); - for (var n = 0; n < smax; n++) { - var slide = slideColl[n]; - slide.style.visibility = 'visible'; - } - } else { - slides.disabled = false; - outline.disabled = true; - s5mode = true; - fontScale(); - for (var n = 0; n < smax; n++) { - var slide = slideColl[n]; - slide.style.visibility = 'hidden'; - } - slideColl[snum].style.visibility = 'visible'; - } -} - -function showHide(action) { - var obj = GetElementsWithClassName('*','hideme')[0]; - switch (action) { - case 's': obj.style.visibility = 'visible'; break; - case 'h': obj.style.visibility = 'hidden'; break; - case 'k': - if (obj.style.visibility != 'visible') { - obj.style.visibility = 'visible'; - } else { - obj.style.visibility = 'hidden'; - } - break; - } -} - -// 'keys' code adapted from MozPoint (http://mozpoint.mozdev.org/) -function keys(key) { - if (!key) { - key = event; - key.which = key.keyCode; - } - if (key.which == 84) { - toggle(); - return; - } - if (s5mode) { - switch (key.which) { - case 10: // return - case 13: // enter - if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return; - if (key.target && isParentOrSelf(key.target, 'controls')) return; - if(number != undef) { - goTo(number); - break; - } - case 32: // spacebar - case 34: // page down - case 39: // rightkey - case 40: // downkey - if(number != undef) { - go(number); - } else if (!incrementals[snum] || incpos >= incrementals[snum].length) { - go(1); - } else { - subgo(1); - } - break; - case 33: // page up - case 37: // leftkey - case 38: // upkey - if(number != undef) { - go(-1 * number); - } else if (!incrementals[snum] || incpos <= 0) { - go(-1); - } else { - subgo(-1); - } - break; - case 36: // home - goTo(0); - break; - case 35: // end - goTo(smax-1); - break; - case 67: // c - showHide('k'); - break; - } - if (key.which < 48 || key.which > 57) { - number = undef; - } else { - if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return; - if (key.target && isParentOrSelf(key.target, 'controls')) return; - number = (((number != undef) ? number : 0) * 10) + (key.which - 48); - } - } - return false; -} - -function clicker(e) { - number = undef; - var target; - if (window.event) { - target = window.event.srcElement; - e = window.event; - } else target = e.target; - if (target.href != null || hasValue(target.rel, 'external') || isParentOrSelf(target, 'controls') || isParentOrSelf(target,'embed') || isParentOrSelf(target, 'object')) return true; - if (!e.which || e.which == 1) { - if (!incrementals[snum] || incpos >= incrementals[snum].length) { - go(1); - } else { - subgo(1); - } - } -} - -function findSlide(hash) { - var target = document.getElementById(hash); - if (target) { - for (var i = 0; i < slideIDs.length; i++) { - if (target.id == slideIDs[i]) return i; - } - } - return null; -} - -function slideJump() { - if (window.location.hash == null || window.location.hash == '') { - currentSlide(); - return; - } - if (window.location.hash == null) return; - var dest = null; - dest = findSlide(window.location.hash.slice(1)); - if (dest == null) { - dest = 0; - } - go(dest - snum); -} - -function fixLinks() { - var thisUri = window.location.href; - thisUri = thisUri.slice(0, thisUri.length - window.location.hash.length); - var aelements = document.getElementsByTagName('A'); - for (var i = 0; i < aelements.length; i++) { - var a = aelements[i].href; - var slideID = a.match('\#.+'); - if ((slideID) && (slideID[0].slice(0,1) == '#')) { - var dest = findSlide(slideID[0].slice(1)); - if (dest != null) { - if (aelements[i].addEventListener) { - aelements[i].addEventListener("click", new Function("e", - "if (document.getElementById('slideProj').disabled) return;" + - "go("+dest+" - snum); " + - "if (e.preventDefault) e.preventDefault();"), true); - } else if (aelements[i].attachEvent) { - aelements[i].attachEvent("onclick", new Function("", - "if (document.getElementById('slideProj').disabled) return;" + - "go("+dest+" - snum); " + - "event.returnValue = false;")); - } - } - } - } -} - -function externalLinks() { - if (!document.getElementsByTagName) return; - var anchors = document.getElementsByTagName('a'); - for (var i=0; i' + - '\n') + self.footer.extend(footer) + self.body_suffix[:0] = footer + del self.body[start:] + + def visit_footnote(self, node): + self.body.append(self.starttag(node, 'table', + CLASS='docutils footnote', + frame="void", rules="none")) + self.body.append('\n' + '\n' + '') + self.footnote_backrefs(node) + + def footnote_backrefs(self, node): + backlinks = [] + backrefs = node['backrefs'] + if self.settings.footnote_backlinks and backrefs: + if len(backrefs) == 1: + self.context.append('') + self.context.append('') + self.context.append('' + % backrefs[0]) + else: + i = 1 + for backref in backrefs: + backlinks.append('%s' + % (backref, i)) + i += 1 + self.context.append('(%s) ' % ', '.join(backlinks)) + self.context += ['', ''] + else: + self.context.append('') + self.context += ['', ''] + # If the node does not only consist of a label. + if len(node) > 1: + # If there are preceding backlinks, we do not set class + # 'first', because we need to retain the top-margin. + if not backlinks: + node[1]['classes'].append('first') + node[-1]['classes'].append('last') + + def depart_footnote(self, node): + self.body.append('\n' + '\n\n') + + def visit_footnote_reference(self, node): + href = '#' + node['refid'] + format = self.settings.footnote_references + if format == 'brackets': + suffix = '[' + self.context.append(']') + else: + assert format == 'superscript' + suffix = '' + self.context.append('') + self.body.append(self.starttag(node, 'a', suffix, + CLASS='footnote-reference', href=href)) + + def depart_footnote_reference(self, node): + self.body.append(self.context.pop() + '') + + def visit_generated(self, node): + pass + + def depart_generated(self, node): + pass + + def visit_header(self, node): + self.context.append(len(self.body)) + + def depart_header(self, node): + start = self.context.pop() + header = [self.starttag(node, 'div', CLASS='header')] + header.extend(self.body[start:]) + header.append('\n
    \n\n') + self.body_prefix.extend(header) + self.header.extend(header) + del self.body[start:] + + def visit_image(self, node): + atts = {} + atts['src'] = node['uri'] + if node.has_key('width'): + atts['width'] = node['width'] + if node.has_key('height'): + atts['height'] = node['height'] + if node.has_key('scale'): + if Image and not (node.has_key('width') + and node.has_key('height')): + try: + im = Image.open(str(atts['src'])) + except (IOError, # Source image can't be found or opened + UnicodeError): # PIL doesn't like Unicode paths. + pass + else: + if not atts.has_key('width'): + atts['width'] = str(im.size[0]) + if not atts.has_key('height'): + atts['height'] = str(im.size[1]) + del im + for att_name in 'width', 'height': + if atts.has_key(att_name): + match = re.match(r'([0-9.]+)(\S*)$', atts[att_name]) + assert match + atts[att_name] = '%s%s' % ( + float(match.group(1)) * (float(node['scale']) / 100), + match.group(2)) + style = [] + for att_name in 'width', 'height': + if atts.has_key(att_name): + if re.match(r'^[0-9.]+$', atts[att_name]): + # Interpret unitless values as pixels. + atts[att_name] += 'px' + style.append('%s: %s;' % (att_name, atts[att_name])) + del atts[att_name] + if style: + atts['style'] = ' '.join(style) + atts['alt'] = node.get('alt', atts['src']) + if (isinstance(node.parent, nodes.TextElement) or + (isinstance(node.parent, nodes.reference) and + not isinstance(node.parent.parent, nodes.TextElement))): + # Inline context or surrounded by .... + suffix = '' + else: + suffix = '\n' + if node.has_key('align'): + if node['align'] == 'center': + # "align" attribute is set in surrounding "div" element. + self.body.append('
    ') + self.context.append('
    \n') + suffix = '' + else: + # "align" attribute is set in "img" element. + atts['align'] = node['align'] + self.context.append('') + atts['class'] = 'align-%s' % node['align'] + else: + self.context.append('') + self.body.append(self.emptytag(node, 'img', suffix, **atts)) + + def depart_image(self, node): + self.body.append(self.context.pop()) + + def visit_inline(self, node): + self.body.append(self.starttag(node, 'span', '')) + + def depart_inline(self, node): + self.body.append('
    ') + + def visit_label(self, node): + # Context added in footnote_backrefs. + self.body.append(self.starttag(node, 'td', '%s[' % self.context.pop(), + CLASS='label')) + + def depart_label(self, node): + # Context added in footnote_backrefs. + self.body.append(']%s%s' % (self.context.pop(), self.context.pop())) + + def visit_legend(self, node): + self.body.append(self.starttag(node, 'div', CLASS='legend')) + + def depart_legend(self, node): + self.body.append('\n') + + def visit_line(self, node): + self.body.append(self.starttag(node, 'div', suffix='', CLASS='line')) + if not len(node): + self.body.append('
    ') + + def depart_line(self, node): + self.body.append('\n') + + def visit_line_block(self, node): + self.body.append(self.starttag(node, 'div', CLASS='line-block')) + + def depart_line_block(self, node): + self.body.append('\n') + + def visit_list_item(self, node): + self.body.append(self.starttag(node, 'li', '')) + if len(node): + node[0]['classes'].append('first') + + def depart_list_item(self, node): + self.body.append('\n') + + def visit_literal(self, node): + """Process text to prevent tokens from wrapping.""" + self.body.append( + self.starttag(node, 'tt', '', CLASS='docutils literal')) + text = node.astext() + for token in self.words_and_spaces.findall(text): + if token.strip(): + # Protect text like "--an-option" from bad line wrapping: + self.body.append('%s' + % self.encode(token)) + elif token in ('\n', ' '): + # Allow breaks at whitespace: + self.body.append(token) + else: + # Protect runs of multiple spaces; the last space can wrap: + self.body.append(' ' * (len(token) - 1) + ' ') + self.body.append('') + # Content already processed: + raise nodes.SkipNode + + def visit_literal_block(self, node): + self.body.append(self.starttag(node, 'pre', CLASS='literal-block')) + + def depart_literal_block(self, node): + self.body.append('\n\n') + + def visit_meta(self, node): + meta = self.emptytag(node, 'meta', **node.non_default_attributes()) + self.add_meta(meta) + + def depart_meta(self, node): + pass + + def add_meta(self, tag): + self.meta.append(tag) + self.head.append(tag) + + def visit_option(self, node): + if self.context[-1]: + self.body.append(', ') + self.body.append(self.starttag(node, 'span', '', CLASS='option')) + + def depart_option(self, node): + self.body.append('
    ') + self.context[-1] += 1 + + def visit_option_argument(self, node): + self.body.append(node.get('delimiter', ' ')) + self.body.append(self.starttag(node, 'var', '')) + + def depart_option_argument(self, node): + self.body.append('') + + def visit_option_group(self, node): + atts = {} + if ( self.settings.option_limit + and len(node.astext()) > self.settings.option_limit): + atts['colspan'] = 2 + self.context.append('\n ') + else: + self.context.append('') + self.body.append( + self.starttag(node, 'td', CLASS='option-group', **atts)) + self.body.append('') + self.context.append(0) # count number of options + + def depart_option_group(self, node): + self.context.pop() + self.body.append('\n') + self.body.append(self.context.pop()) + + def visit_option_list(self, node): + self.body.append( + self.starttag(node, 'table', CLASS='docutils option-list', + frame="void", rules="none")) + self.body.append('\n' + '\n' + '\n') + + def depart_option_list(self, node): + self.body.append('\n\n') + + def visit_option_list_item(self, node): + self.body.append(self.starttag(node, 'tr', '')) + + def depart_option_list_item(self, node): + self.body.append('\n') + + def visit_option_string(self, node): + pass + + def depart_option_string(self, node): + pass + + def visit_organization(self, node): + self.visit_docinfo_item(node, 'organization') + + def depart_organization(self, node): + self.depart_docinfo_item() + + def should_be_compact_paragraph(self, node): + """ + Determine if the

    tags around paragraph ``node`` can be omitted. + """ + if (isinstance(node.parent, nodes.document) or + isinstance(node.parent, nodes.compound)): + # Never compact paragraphs in document or compound. + return 0 + for key, value in node.attlist(): + if (node.is_not_default(key) and + not (key == 'classes' and value in + ([], ['first'], ['last'], ['first', 'last']))): + # Attribute which needs to survive. + return 0 + first = isinstance(node.parent[0], nodes.label) # skip label + for child in node.parent.children[first:]: + # only first paragraph can be compact + if isinstance(child, nodes.Invisible): + continue + if child is node: + break + return 0 + parent_length = len([n for n in node.parent if not isinstance( + n, (nodes.Invisible, nodes.label))]) + if ( self.compact_simple + or self.compact_field_list + or self.compact_p and parent_length == 1): + return 1 + return 0 + + def visit_paragraph(self, node): + if self.should_be_compact_paragraph(node): + self.context.append('') + else: + self.body.append(self.starttag(node, 'p', '')) + self.context.append('

    \n') + + def depart_paragraph(self, node): + self.body.append(self.context.pop()) + + def visit_problematic(self, node): + if node.hasattr('refid'): + self.body.append('' % node['refid']) + self.context.append('') + else: + self.context.append('') + self.body.append(self.starttag(node, 'span', '', CLASS='problematic')) + + def depart_problematic(self, node): + self.body.append('
    ') + self.body.append(self.context.pop()) + + def visit_raw(self, node): + if 'html' in node.get('format', '').split(): + t = isinstance(node.parent, nodes.TextElement) and 'span' or 'div' + if node['classes']: + self.body.append(self.starttag(node, t, suffix='')) + self.body.append(node.astext()) + if node['classes']: + self.body.append('' % t) + # Keep non-HTML raw text out of output: + raise nodes.SkipNode + + def visit_reference(self, node): + atts = {'class': 'reference'} + if node.has_key('refuri'): + atts['href'] = node['refuri'] + if ( self.settings.cloak_email_addresses + and atts['href'].startswith('mailto:')): + atts['href'] = self.cloak_mailto(atts['href']) + self.in_mailto = 1 + atts['class'] += ' external' + else: + assert node.has_key('refid'), \ + 'References must have "refuri" or "refid" attribute.' + atts['href'] = '#' + node['refid'] + atts['class'] += ' internal' + if not isinstance(node.parent, nodes.TextElement): + assert len(node) == 1 and isinstance(node[0], nodes.image) + atts['class'] += ' image-reference' + self.body.append(self.starttag(node, 'a', '', **atts)) + + def depart_reference(self, node): + self.body.append('') + if not isinstance(node.parent, nodes.TextElement): + self.body.append('\n') + self.in_mailto = 0 + + def visit_revision(self, node): + self.visit_docinfo_item(node, 'revision', meta=None) + + def depart_revision(self, node): + self.depart_docinfo_item() + + def visit_row(self, node): + self.body.append(self.starttag(node, 'tr', '')) + node.column = 0 + + def depart_row(self, node): + self.body.append('\n') + + def visit_rubric(self, node): + self.body.append(self.starttag(node, 'p', '', CLASS='rubric')) + + def depart_rubric(self, node): + self.body.append('

    \n') + + def visit_section(self, node): + self.section_level += 1 + self.body.append( + self.starttag(node, 'div', CLASS='section')) + + def depart_section(self, node): + self.section_level -= 1 + self.body.append('\n') + + def visit_sidebar(self, node): + self.body.append( + self.starttag(node, 'div', CLASS='sidebar')) + self.set_first_last(node) + self.in_sidebar = 1 + + def depart_sidebar(self, node): + self.body.append('\n') + self.in_sidebar = None + + def visit_status(self, node): + self.visit_docinfo_item(node, 'status', meta=None) + + def depart_status(self, node): + self.depart_docinfo_item() + + def visit_strong(self, node): + self.body.append('') + + def depart_strong(self, node): + self.body.append('') + + def visit_subscript(self, node): + self.body.append(self.starttag(node, 'sub', '')) + + def depart_subscript(self, node): + self.body.append('') + + def visit_substitution_definition(self, node): + """Internal only.""" + raise nodes.SkipNode + + def visit_substitution_reference(self, node): + self.unimplemented_visit(node) + + def visit_subtitle(self, node): + if isinstance(node.parent, nodes.sidebar): + self.body.append(self.starttag(node, 'p', '', + CLASS='sidebar-subtitle')) + self.context.append('

    \n') + elif isinstance(node.parent, nodes.document): + self.body.append(self.starttag(node, 'h2', '', CLASS='subtitle')) + self.context.append('\n') + self.in_document_title = len(self.body) + elif isinstance(node.parent, nodes.section): + tag = 'h%s' % (self.section_level + self.initial_header_level - 1) + self.body.append( + self.starttag(node, tag, '', CLASS='section-subtitle') + + self.starttag({}, 'span', '', CLASS='section-subtitle')) + self.context.append('\n' % tag) + + def depart_subtitle(self, node): + self.body.append(self.context.pop()) + if self.in_document_title: + self.subtitle = self.body[self.in_document_title:-1] + self.in_document_title = 0 + self.body_pre_docinfo.extend(self.body) + self.html_subtitle.extend(self.body) + del self.body[:] + + def visit_superscript(self, node): + self.body.append(self.starttag(node, 'sup', '')) + + def depart_superscript(self, node): + self.body.append('') + + def visit_system_message(self, node): + self.body.append(self.starttag(node, 'div', CLASS='system-message')) + self.body.append('

    ') + backref_text = '' + if len(node['backrefs']): + backrefs = node['backrefs'] + if len(backrefs) == 1: + backref_text = ('; backlink' + % backrefs[0]) + else: + i = 1 + backlinks = [] + for backref in backrefs: + backlinks.append('%s' % (backref, i)) + i += 1 + backref_text = ('; backlinks: %s' + % ', '.join(backlinks)) + if node.hasattr('line'): + line = ', line %s' % node['line'] + else: + line = '' + self.body.append('System Message: %s/%s ' + '(%s%s)%s

    \n' + % (node['type'], node['level'], + self.encode(node['source']), line, backref_text)) + + def depart_system_message(self, node): + self.body.append('\n') + + def visit_table(self, node): + self.body.append( + self.starttag(node, 'table', CLASS='docutils', border="1")) + + def depart_table(self, node): + self.body.append('\n') + + def visit_target(self, node): + if not (node.has_key('refuri') or node.has_key('refid') + or node.has_key('refname')): + self.body.append(self.starttag(node, 'span', '', CLASS='target')) + self.context.append('') + else: + self.context.append('') + + def depart_target(self, node): + self.body.append(self.context.pop()) + + def visit_tbody(self, node): + self.write_colspecs() + self.body.append(self.context.pop()) # '\n' or '' + self.body.append(self.starttag(node, 'tbody', valign='top')) + + def depart_tbody(self, node): + self.body.append('\n') + + def visit_term(self, node): + self.body.append(self.starttag(node, 'dt', '')) + + def depart_term(self, node): + """ + Leave the end tag to `self.visit_definition()`, in case there's a + classifier. + """ + pass + + def visit_tgroup(self, node): + # Mozilla needs : + self.body.append(self.starttag(node, 'colgroup')) + # Appended by thead or tbody: + self.context.append('\n') + node.stubs = [] + + def depart_tgroup(self, node): + pass + + def visit_thead(self, node): + self.write_colspecs() + self.body.append(self.context.pop()) # '\n' + # There may or may not be a ; this is for to use: + self.context.append('') + self.body.append(self.starttag(node, 'thead', valign='bottom')) + + def depart_thead(self, node): + self.body.append('\n') + + def visit_title(self, node): + """Only 6 section levels are supported by HTML.""" + check_id = 0 + close_tag = '

    \n' + if isinstance(node.parent, nodes.topic): + self.body.append( + self.starttag(node, 'p', '', CLASS='topic-title first')) + elif isinstance(node.parent, nodes.sidebar): + self.body.append( + self.starttag(node, 'p', '', CLASS='sidebar-title')) + elif isinstance(node.parent, nodes.Admonition): + self.body.append( + self.starttag(node, 'p', '', CLASS='admonition-title')) + elif isinstance(node.parent, nodes.table): + self.body.append( + self.starttag(node, 'caption', '')) + close_tag = '\n' + elif isinstance(node.parent, nodes.document): + self.body.append(self.starttag(node, 'h1', '', CLASS='title')) + close_tag = '\n' + self.in_document_title = len(self.body) + else: + assert isinstance(node.parent, nodes.section) + h_level = self.section_level + self.initial_header_level - 1 + atts = {} + if (len(node.parent) >= 2 and + isinstance(node.parent[1], nodes.subtitle)): + atts['CLASS'] = 'with-subtitle' + self.body.append( + self.starttag(node, 'h%s' % h_level, '', **atts)) + atts = {} + if node.hasattr('refid'): + atts['class'] = 'toc-backref' + atts['href'] = '#' + node['refid'] + if atts: + self.body.append(self.starttag({}, 'a', '', **atts)) + close_tag = '\n' % (h_level) + else: + close_tag = '\n' % (h_level) + self.context.append(close_tag) + + def depart_title(self, node): + self.body.append(self.context.pop()) + if self.in_document_title: + self.title = self.body[self.in_document_title:-1] + self.in_document_title = 0 + self.body_pre_docinfo.extend(self.body) + self.html_title.extend(self.body) + del self.body[:] + + def visit_title_reference(self, node): + self.body.append(self.starttag(node, 'cite', '')) + + def depart_title_reference(self, node): + self.body.append('') + + def visit_topic(self, node): + self.body.append(self.starttag(node, 'div', CLASS='topic')) + self.topic_classes = node['classes'] + + def depart_topic(self, node): + self.body.append('\n') + self.topic_classes = [] + + def visit_transition(self, node): + self.body.append(self.emptytag(node, 'hr', CLASS='docutils')) + + def depart_transition(self, node): + pass + + def visit_version(self, node): + self.visit_docinfo_item(node, 'version', meta=None) + + def depart_version(self, node): + self.depart_docinfo_item() + + def unimplemented_visit(self, node): + raise NotImplementedError('visiting unimplemented node type: %s' + % node.__class__.__name__) + + +class SimpleListChecker(nodes.GenericNodeVisitor): + + """ + Raise `nodes.NodeFound` if non-simple list item is encountered. + + Here "simple" means a list item containing nothing other than a single + paragraph, a simple list, or a paragraph followed by a simple list. + """ + + def default_visit(self, node): + raise nodes.NodeFound + + def visit_bullet_list(self, node): + pass + + def visit_enumerated_list(self, node): + pass + + def visit_list_item(self, node): + children = [] + for child in node.children: + if not isinstance(child, nodes.Invisible): + children.append(child) + if (children and isinstance(children[0], nodes.paragraph) + and (isinstance(children[-1], nodes.bullet_list) + or isinstance(children[-1], nodes.enumerated_list))): + children.pop() + if len(children) <= 1: + return + else: + raise nodes.NodeFound + + def visit_paragraph(self, node): + raise nodes.SkipNode + + def invisible_visit(self, node): + """Invisible nodes should be ignored.""" + raise nodes.SkipNode + + visit_comment = invisible_visit + visit_substitution_definition = invisible_visit + visit_target = invisible_visit + visit_pending = invisible_visit diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/html4css1/html4css1.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/html4css1/html4css1.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,273 @@ +/* +:Author: David Goodger (goodger@python.org) +:Id: $Id: html4css1.css 5196 2007-06-03 20:25:28Z wiemann $ +:Copyright: This stylesheet has been placed in the public domain. + +Default cascading style sheet for the HTML output of Docutils. + +See http://docutils.sf.net/docs/howto/html-stylesheets.html for how to +customize this style sheet. +*/ + +/* used to remove borders from tables and images */ +.borderless, table.borderless td, table.borderless th { + border: 0 } + +table.borderless td, table.borderless th { + /* Override padding for "table.docutils td" with "! important". + The right padding separates the table cells. */ + padding: 0 0.5em 0 0 ! important } + +.first { + /* Override more specific margin styles with "! important". */ + margin-top: 0 ! important } + +.last, .with-subtitle { + margin-bottom: 0 ! important } + +.hidden { + display: none } + +a.toc-backref { + text-decoration: none ; + color: black } + +blockquote.epigraph { + margin: 2em 5em ; } + +dl.docutils dd { + margin-bottom: 0.5em } + +/* Uncomment (and remove this text!) to get bold-faced definition list terms +dl.docutils dt { + font-weight: bold } +*/ + +div.abstract { + margin: 2em 5em } + +div.abstract p.topic-title { + font-weight: bold ; + text-align: center } + +div.admonition, div.attention, div.caution, div.danger, div.error, +div.hint, div.important, div.note, div.tip, div.warning { + margin: 2em ; + border: medium outset ; + padding: 1em } + +div.admonition p.admonition-title, div.hint p.admonition-title, +div.important p.admonition-title, div.note p.admonition-title, +div.tip p.admonition-title { + font-weight: bold ; + font-family: sans-serif } + +div.attention p.admonition-title, div.caution p.admonition-title, +div.danger p.admonition-title, div.error p.admonition-title, +div.warning p.admonition-title { + color: red ; + font-weight: bold ; + font-family: sans-serif } + +/* Uncomment (and remove this text!) to get reduced vertical space in + compound paragraphs. +div.compound .compound-first, div.compound .compound-middle { + margin-bottom: 0.5em } + +div.compound .compound-last, div.compound .compound-middle { + margin-top: 0.5em } +*/ + +div.dedication { + margin: 2em 5em ; + text-align: center ; + font-style: italic } + +div.dedication p.topic-title { + font-weight: bold ; + font-style: normal } + +div.figure { + margin-left: 2em ; + margin-right: 2em } + +div.footer, div.header { + clear: both; + font-size: smaller } + +div.line-block { + display: block ; + margin-top: 1em ; + margin-bottom: 1em } + +div.line-block div.line-block { + margin-top: 0 ; + margin-bottom: 0 ; + margin-left: 1.5em } + +div.sidebar { + margin: 0 0 0.5em 1em ; + border: medium outset ; + padding: 1em ; + background-color: #ffffee ; + width: 40% ; + float: right ; + clear: right } + +div.sidebar p.rubric { + font-family: sans-serif ; + font-size: medium } + +div.system-messages { + margin: 5em } + +div.system-messages h1 { + color: red } + +div.system-message { + border: medium outset ; + padding: 1em } + +div.system-message p.system-message-title { + color: red ; + font-weight: bold } + +div.topic { + margin: 2em } + +h1.section-subtitle, h2.section-subtitle, h3.section-subtitle, +h4.section-subtitle, h5.section-subtitle, h6.section-subtitle { + margin-top: 0.4em } + +h1.title { + text-align: center } + +h2.subtitle { + text-align: center } + +hr.docutils { + width: 75% } + +img.align-left { + clear: left } + +img.align-right { + clear: right } + +ol.simple, ul.simple { + margin-bottom: 1em } + +ol.arabic { + list-style: decimal } + +ol.loweralpha { + list-style: lower-alpha } + +ol.upperalpha { + list-style: upper-alpha } + +ol.lowerroman { + list-style: lower-roman } + +ol.upperroman { + list-style: upper-roman } + +p.attribution { + text-align: right ; + margin-left: 50% } + +p.caption { + font-style: italic } + +p.credits { + font-style: italic ; + font-size: smaller } + +p.label { + white-space: nowrap } + +p.rubric { + font-weight: bold ; + font-size: larger ; + color: maroon ; + text-align: center } + +p.sidebar-title { + font-family: sans-serif ; + font-weight: bold ; + font-size: larger } + +p.sidebar-subtitle { + font-family: sans-serif ; + font-weight: bold } + +p.topic-title { + font-weight: bold } + +pre.address { + margin-bottom: 0 ; + margin-top: 0 ; + font-family: serif ; + font-size: 100% } + +pre.literal-block, pre.doctest-block { + margin-left: 2em ; + margin-right: 2em } + +span.classifier { + font-family: sans-serif ; + font-style: oblique } + +span.classifier-delimiter { + font-family: sans-serif ; + font-weight: bold } + +span.interpreted { + font-family: sans-serif } + +span.option { + white-space: nowrap } + +span.pre { + white-space: pre } + +span.problematic { + color: red } + +span.section-subtitle { + /* font-size relative to parent (h1..h6 element) */ + font-size: 80% } + +table.citation { + border-left: solid 1px gray; + margin-left: 1px } + +table.docinfo { + margin: 2em 4em } + +table.docutils { + margin-top: 0.5em ; + margin-bottom: 0.5em } + +table.footnote { + border-left: solid 1px black; + margin-left: 1px } + +table.docutils td, table.docutils th, +table.docinfo td, table.docinfo th { + padding-left: 0.5em ; + padding-right: 0.5em ; + vertical-align: top } + +table.docutils th.field-name, table.docinfo th.docinfo-name { + font-weight: bold ; + text-align: left ; + white-space: nowrap ; + padding-left: 0 } + +h1 tt.docutils, h2 tt.docutils, h3 tt.docutils, +h4 tt.docutils, h5 tt.docutils, h6 tt.docutils { + font-size: 100% } + +ul.auto-toc { + list-style-type: none } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/html4css1/template.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/html4css1/template.txt Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,8 @@ +%(head_prefix)s +%(head)s +%(stylesheet)s +%(body_prefix)s +%(body_pre_docinfo)s +%(docinfo)s +%(body)s +%(body_suffix)s diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/latex2e/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/latex2e/__init__.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,2177 @@ +# $Id: __init__.py 5333 2007-07-10 17:31:28Z grubert $ +# Author: Engelbert Gruber +# Copyright: This module has been placed in the public domain. + +""" +LaTeX2e document tree Writer. +""" + +__docformat__ = 'reStructuredText' + +# code contributions from several people included, thanks to all. +# some named: David Abrahams, Julien Letessier, Lele Gaifax, and others. +# +# convention deactivate code by two # e.g. ##. + +import sys +import time +import re +import string +from types import ListType +from docutils import frontend, nodes, languages, writers, utils +from docutils.writers.newlatex2e import unicode_map + +from docutils.transforms.references import DanglingReferencesVisitor + +class Writer(writers.Writer): + + supported = ('latex','latex2e') + """Formats this writer supports.""" + + settings_spec = ( + 'LaTeX-Specific Options', + 'The LaTeX "--output-encoding" default is "latin-1:strict".', + (('Specify documentclass. Default is "article".', + ['--documentclass'], + {'default': 'article', }), + ('Specify document options. Multiple options can be given, ' + 'separated by commas. Default is "10pt,a4paper".', + ['--documentoptions'], + {'default': '10pt,a4paper', }), + ('Use LaTeX footnotes. LaTeX supports only numbered footnotes (does it?). ' + 'Default: no, uses figures.', + ['--use-latex-footnotes'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Format for footnote references: one of "superscript" or ' + '"brackets". Default is "superscript".', + ['--footnote-references'], + {'choices': ['superscript', 'brackets'], 'default': 'superscript', + 'metavar': '', + 'overrides': 'trim_footnote_reference_space'}), + ('Use LaTeX citations. ' + 'Default: no, uses figures which might get mixed with images.', + ['--use-latex-citations'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Format for block quote attributions: one of "dash" (em-dash ' + 'prefix), "parentheses"/"parens", or "none". Default is "dash".', + ['--attribution'], + {'choices': ['dash', 'parentheses', 'parens', 'none'], + 'default': 'dash', 'metavar': ''}), + ('Specify a stylesheet file. The file will be "input" by latex in ' + 'the document header. Default is no stylesheet (""). ' + 'Overrides --stylesheet-path.', + ['--stylesheet'], + {'default': '', 'metavar': '', + 'overrides': 'stylesheet_path'}), + ('Specify a stylesheet file, relative to the current working ' + 'directory. Overrides --stylesheet.', + ['--stylesheet-path'], + {'metavar': '', 'overrides': 'stylesheet'}), + ('Table of contents by docutils (default) or LaTeX. LaTeX (writer) ' + 'supports only one ToC per document, but docutils does not know of ' + 'pagenumbers. LaTeX table of contents also means LaTeX generates ' + 'sectionnumbers.', + ['--use-latex-toc'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Add parts on top of the section hierarchy.', + ['--use-part-section'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Let LaTeX print author and date, do not show it in docutils ' + 'document info.', + ['--use-latex-docinfo'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Use LaTeX abstract environment for the documents abstract.' + 'Per default the abstract is an unnumbered section.', + ['--use-latex-abstract'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Color of any hyperlinks embedded in text ' + '(default: "blue", "0" to disable).', + ['--hyperlink-color'], {'default': 'blue'}), + ('Enable compound enumerators for nested enumerated lists ' + '(e.g. "1.2.a.ii"). Default: disabled.', + ['--compound-enumerators'], + {'default': None, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Disable compound enumerators for nested enumerated lists. This is ' + 'the default.', + ['--no-compound-enumerators'], + {'action': 'store_false', 'dest': 'compound_enumerators'}), + ('Enable section ("." subsection ...) prefixes for compound ' + 'enumerators. This has no effect without --compound-enumerators. ' + 'Default: disabled.', + ['--section-prefix-for-enumerators'], + {'default': None, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Disable section prefixes for compound enumerators. ' + 'This is the default.', + ['--no-section-prefix-for-enumerators'], + {'action': 'store_false', 'dest': 'section_prefix_for_enumerators'}), + ('Set the separator between section number and enumerator ' + 'for compound enumerated lists. Default is "-".', + ['--section-enumerator-separator'], + {'default': '-', 'metavar': ''}), + ('When possibile, use verbatim for literal-blocks. ' + 'Default is to always use the mbox environment.', + ['--use-verbatim-when-possible'], + {'default': 0, 'action': 'store_true', + 'validator': frontend.validate_boolean}), + ('Table style. "standard" with horizontal and vertical lines, ' + '"booktabs" (LaTeX booktabs style) only horizontal lines ' + 'above and below the table and below the header or "nolines". ' + 'Default: "standard"', + ['--table-style'], + {'choices': ['standard', 'booktabs','nolines'], 'default': 'standard', + 'metavar': ''}), + ('LaTeX graphicx package option. ' + 'Possible values are "dvips", "pdftex". "auto" includes LaTeX code ' + 'to use "pdftex" if processing with pdf(la)tex and dvips otherwise. ' + 'Default is no option.', + ['--graphicx-option'], + {'default': ''}), + ('LaTeX font encoding. ' + 'Possible values are "T1", "OT1", "" or some other fontenc option. ' + 'The font encoding influences available symbols, e.g. "<<" as one ' + 'character. Default is "" which leads to package "ae" (a T1 ' + 'emulation using CM fonts).', + ['--font-encoding'], + {'default': ''}), + ('Per default the latex-writer puts the reference title into ' + 'hyperreferences. Specify "ref*" or "pageref*" to get the section ' + 'number or the page number.', + ['--reference-label'], + {'default': None, }), + ('Specify style and database for bibtex, for example ' + '"--use-bibtex=mystyle,mydb1,mydb2".', + ['--use-bibtex'], + {'default': None, }), + ),) + + settings_defaults = {'output_encoding': 'latin-1'} + + relative_path_settings = ('stylesheet_path',) + + config_section = 'latex2e writer' + config_section_dependencies = ('writers',) + + visitor_attributes = ("head_prefix", "head", + "body_prefix", "body", "body_suffix") + + output = None + """Final translated form of `document`.""" + + def __init__(self): + writers.Writer.__init__(self) + self.translator_class = LaTeXTranslator + + def translate(self): + visitor = self.translator_class(self.document) + self.document.walkabout(visitor) + self.output = visitor.astext() + # copy parts + for attr in self.visitor_attributes: + setattr(self, attr, getattr(visitor, attr)) + + def assemble_parts(self): + writers.Writer.assemble_parts(self) + for part in self.visitor_attributes: + self.parts[part] = ''.join(getattr(self, part)) + + +""" +Notes on LaTeX +-------------- + +* LaTeX does not support multiple tocs in one document. + (might be no limitation except for docutils documentation) + + The "minitoc" latex package can produce per-chapter tocs in + book and report document classes. + +* width + + * linewidth - width of a line in the local environment + * textwidth - the width of text on the page + + Maybe always use linewidth ? + + *Bug* inside a minipage a (e.g. Sidebar) the linewidth is + not changed, needs fix in docutils so that tables + are not too wide. + + So we add locallinewidth set it initially and + on entering sidebar and reset on exit. +""" + +class Babel: + """Language specifics for LaTeX.""" + # country code by a.schlock. + # partly manually converted from iso and babel stuff, dialects and some + _ISO639_TO_BABEL = { + 'no': 'norsk', #XXX added by hand ( forget about nynorsk?) + 'gd': 'scottish', #XXX added by hand + 'hu': 'magyar', #XXX added by hand + 'pt': 'portuguese',#XXX added by hand + 'sl': 'slovenian', + 'af': 'afrikaans', + 'bg': 'bulgarian', + 'br': 'breton', + 'ca': 'catalan', + 'cs': 'czech', + 'cy': 'welsh', + 'da': 'danish', + 'fr': 'french', + # french, francais, canadien, acadian + 'de': 'ngerman', #XXX rather than german + # ngerman, naustrian, german, germanb, austrian + 'el': 'greek', + 'en': 'english', + # english, USenglish, american, UKenglish, british, canadian + 'eo': 'esperanto', + 'es': 'spanish', + 'et': 'estonian', + 'eu': 'basque', + 'fi': 'finnish', + 'ga': 'irish', + 'gl': 'galician', + 'he': 'hebrew', + 'hr': 'croatian', + 'hu': 'hungarian', + 'is': 'icelandic', + 'it': 'italian', + 'la': 'latin', + 'nl': 'dutch', + 'pl': 'polish', + 'pt': 'portuguese', + 'ro': 'romanian', + 'ru': 'russian', + 'sk': 'slovak', + 'sr': 'serbian', + 'sv': 'swedish', + 'tr': 'turkish', + 'uk': 'ukrainian' + } + + def __init__(self,lang): + self.language = lang + # pdflatex does not produce double quotes for ngerman in tt. + self.double_quote_replacment = None + if re.search('^de',self.language): + #self.quotes = ("\"`", "\"'") + self.quotes = ('{\\glqq}', '{\\grqq}') + self.double_quote_replacment = "{\\dq}" + elif re.search('^it',self.language): + self.quotes = ("``", "''") + self.double_quote_replacment = r'{\char`\"}' + else: + self.quotes = ("``", "''") + self.quote_index = 0 + + def next_quote(self): + q = self.quotes[self.quote_index] + self.quote_index = (self.quote_index+1)%2 + return q + + def quote_quotes(self,text): + t = None + for part in text.split('"'): + if t == None: + t = part + else: + t += self.next_quote() + part + return t + + def double_quotes_in_tt (self,text): + if not self.double_quote_replacment: + return text + return text.replace('"', self.double_quote_replacment) + + def get_language(self): + if self._ISO639_TO_BABEL.has_key(self.language): + return self._ISO639_TO_BABEL[self.language] + else: + # support dialects. + l = self.language.split("_")[0] + if self._ISO639_TO_BABEL.has_key(l): + return self._ISO639_TO_BABEL[l] + return None + + +latex_headings = { + 'optionlist_environment' : [ + '\\newcommand{\\optionlistlabel}[1]{\\bf #1 \\hfill}\n' + '\\newenvironment{optionlist}[1]\n' + '{\\begin{list}{}\n' + ' {\\setlength{\\labelwidth}{#1}\n' + ' \\setlength{\\rightmargin}{1cm}\n' + ' \\setlength{\\leftmargin}{\\rightmargin}\n' + ' \\addtolength{\\leftmargin}{\\labelwidth}\n' + ' \\addtolength{\\leftmargin}{\\labelsep}\n' + ' \\renewcommand{\\makelabel}{\\optionlistlabel}}\n' + '}{\\end{list}}\n', + ], + 'lineblock_environment' : [ + '\\newlength{\\lineblockindentation}\n' + '\\setlength{\\lineblockindentation}{2.5em}\n' + '\\newenvironment{lineblock}[1]\n' + '{\\begin{list}{}\n' + ' {\\setlength{\\partopsep}{\\parskip}\n' + ' \\addtolength{\\partopsep}{\\baselineskip}\n' + ' \\topsep0pt\\itemsep0.15\\baselineskip\\parsep0pt\n' + ' \\leftmargin#1}\n' + ' \\raggedright}\n' + '{\\end{list}}\n' + ], + 'footnote_floats' : [ + '% begin: floats for footnotes tweaking.\n', + '\\setlength{\\floatsep}{0.5em}\n', + '\\setlength{\\textfloatsep}{\\fill}\n', + '\\addtolength{\\textfloatsep}{3em}\n', + '\\renewcommand{\\textfraction}{0.5}\n', + '\\renewcommand{\\topfraction}{0.5}\n', + '\\renewcommand{\\bottomfraction}{0.5}\n', + '\\setcounter{totalnumber}{50}\n', + '\\setcounter{topnumber}{50}\n', + '\\setcounter{bottomnumber}{50}\n', + '% end floats for footnotes\n', + ], + 'some_commands' : [ + '% some commands, that could be overwritten in the style file.\n' + '\\newcommand{\\rubric}[1]' + '{\\subsection*{~\\hfill {\\it #1} \\hfill ~}}\n' + '\\newcommand{\\titlereference}[1]{\\textsl{#1}}\n' + '% end of "some commands"\n', + ] + } + +class DocumentClass: + """Details of a LaTeX document class.""" + + def __init__(self, document_class, with_part=False): + self.document_class = document_class + self._with_part = with_part + + def section(self, level): + """ Return the section name at the given level for the specific + document class. + + Level is 1,2,3..., as level 0 is the title.""" + + sections = [ 'section', 'subsection', 'subsubsection', + 'paragraph', 'subparagraph' ] + if self.document_class in ('book', 'report', 'scrreprt', 'scrbook'): + sections.insert(0, 'chapter') + if self._with_part: + sections.insert(0, 'part') + if level <= len(sections): + return sections[level-1] + else: + return sections[-1] + +class Table: + """ Manage a table while traversing. + Maybe change to a mixin defining the visit/departs, but then + class Table internal variables are in the Translator. + + Table style might be + + * standard: horizontal and vertical lines + * booktabs (requires booktabs latex package): only horizontal lines + * nolines, borderless : no lines + """ + def __init__(self,latex_type,table_style): + self._latex_type = latex_type + self._table_style = table_style + self._open = 0 + # miscellaneous attributes + self._attrs = {} + self._col_width = [] + self._rowspan = [] + self.stubs = [] + + def open(self): + self._open = 1 + self._col_specs = [] + self.caption = None + self._attrs = {} + self._in_head = 0 # maybe context with search + def close(self): + self._open = 0 + self._col_specs = None + self.caption = None + self._attrs = {} + self.stubs = [] + def is_open(self): + return self._open + + def set_table_style(self, table_style): + if not table_style in ('standard','booktabs','borderless','nolines'): + return + self._table_style = table_style + + def used_packages(self): + if self._table_style == 'booktabs': + return '\\usepackage{booktabs}\n' + return '' + def get_latex_type(self): + return self._latex_type + + def set(self,attr,value): + self._attrs[attr] = value + def get(self,attr): + if self._attrs.has_key(attr): + return self._attrs[attr] + return None + def get_vertical_bar(self): + if self._table_style == 'standard': + return '|' + return '' + # horizontal lines are drawn below a row, because we. + def get_opening(self): + if self._latex_type == 'longtable': + # otherwise longtable might move before paragraph and subparagraph + prefix = '\\leavevmode\n' + else: + prefix = '' + return '%s\\begin{%s}[c]' % (prefix, self._latex_type) + def get_closing(self): + line = "" + if self._table_style == 'booktabs': + line = '\\bottomrule\n' + elif self._table_style == 'standard': + lines = '\\hline\n' + return '%s\\end{%s}' % (line,self._latex_type) + + def visit_colspec(self, node): + self._col_specs.append(node) + # "stubs" list is an attribute of the tgroup element: + self.stubs.append(node.attributes.get('stub')) + + def get_colspecs(self): + """ + Return column specification for longtable. + + Assumes reST line length being 80 characters. + Table width is hairy. + + === === + ABC DEF + === === + + usually gets to narrow, therefore we add 1 (fiddlefactor). + """ + width = 80 + + total_width = 0.0 + # first see if we get too wide. + for node in self._col_specs: + colwidth = float(node['colwidth']+1) / width + total_width += colwidth + self._col_width = [] + self._rowspan = [] + # donot make it full linewidth + factor = 0.93 + if total_width > 1.0: + factor /= total_width + bar = self.get_vertical_bar() + latex_table_spec = "" + for node in self._col_specs: + colwidth = factor * float(node['colwidth']+1) / width + self._col_width.append(colwidth+0.005) + self._rowspan.append(0) + latex_table_spec += "%sp{%.3f\\locallinewidth}" % (bar,colwidth+0.005) + return latex_table_spec+bar + + def get_column_width(self): + """ return columnwidth for current cell (not multicell) + """ + return "%.2f\\locallinewidth" % self._col_width[self._cell_in_row-1] + + def visit_thead(self): + self._in_thead = 1 + if self._table_style == 'standard': + return ['\\hline\n'] + elif self._table_style == 'booktabs': + return ['\\toprule\n'] + return [] + def depart_thead(self): + a = [] + #if self._table_style == 'standard': + # a.append('\\hline\n') + if self._table_style == 'booktabs': + a.append('\\midrule\n') + if self._latex_type == 'longtable': + a.append('\\endhead\n') + # for longtable one could add firsthead, foot and lastfoot + self._in_thead = 0 + return a + def visit_row(self): + self._cell_in_row = 0 + def depart_row(self): + res = [' \\\\\n'] + self._cell_in_row = None # remove cell counter + for i in range(len(self._rowspan)): + if (self._rowspan[i]>0): + self._rowspan[i] -= 1 + + if self._table_style == 'standard': + rowspans = [] + for i in range(len(self._rowspan)): + if (self._rowspan[i]<=0): + rowspans.append(i+1) + if len(rowspans)==len(self._rowspan): + res.append('\\hline\n') + else: + cline = '' + rowspans.reverse() + # TODO merge clines + while 1: + try: + c_start = rowspans.pop() + except: + break + cline += '\\cline{%d-%d}\n' % (c_start,c_start) + res.append(cline) + return res + + def set_rowspan(self,cell,value): + try: + self._rowspan[cell] = value + except: + pass + def get_rowspan(self,cell): + try: + return self._rowspan[cell] + except: + return 0 + def get_entry_number(self): + return self._cell_in_row + def visit_entry(self): + self._cell_in_row += 1 + def is_stub_column(self): + if len(self.stubs) >= self._cell_in_row: + return self.stubs[self._cell_in_row-1] + return False + + +class LaTeXTranslator(nodes.NodeVisitor): + + # When options are given to the documentclass, latex will pass them + # to other packages, as done with babel. + # Dummy settings might be taken from document settings + + # Templates + # --------- + + latex_head = '\\documentclass[%s]{%s}\n' + linking = '\\usepackage[colorlinks=%s,linkcolor=%s,urlcolor=%s]{hyperref}\n' + stylesheet = '\\input{%s}\n' + # add a generated on day , machine by user using docutils version. + generator = '% generated by Docutils \n' + # Config setting defaults + # ----------------------- + + # use latex tableofcontents or let docutils do it. + use_latex_toc = 0 + + # TODO: use mixins for different implementations. + # list environment for docinfo. else tabularx + use_optionlist_for_docinfo = 0 # NOT YET IN USE + + # Use compound enumerations (1.A.1.) + compound_enumerators = 0 + + # If using compound enumerations, include section information. + section_prefix_for_enumerators = 0 + + # This is the character that separates the section ("." subsection ...) + # prefix from the regular list enumerator. + section_enumerator_separator = '-' + + # default link color + hyperlink_color = "blue" + + def __init__(self, document): + nodes.NodeVisitor.__init__(self, document) + self.settings = settings = document.settings + self.latex_encoding = self.to_latex_encoding(settings.output_encoding) + self.use_latex_toc = settings.use_latex_toc + self.use_latex_docinfo = settings.use_latex_docinfo + self.use_latex_footnotes = settings.use_latex_footnotes + self._use_latex_citations = settings.use_latex_citations + self._reference_label = settings.reference_label + self.hyperlink_color = settings.hyperlink_color + self.compound_enumerators = settings.compound_enumerators + self.font_encoding = settings.font_encoding + self.section_prefix_for_enumerators = ( + settings.section_prefix_for_enumerators) + self.section_enumerator_separator = ( + settings.section_enumerator_separator.replace('_', '\\_')) + if self.hyperlink_color == '0': + self.hyperlink_color = 'black' + self.colorlinks = 'false' + else: + self.colorlinks = 'true' + + if self.settings.use_bibtex: + self.bibtex = self.settings.use_bibtex.split(",",1) + # TODO avoid errors on not declared citations. + else: + self.bibtex = None + # language: labels, bibliographic_fields, and author_separators. + # to allow writing labes for specific languages. + self.language = languages.get_language(settings.language_code) + self.babel = Babel(settings.language_code) + self.author_separator = self.language.author_separators[0] + self.d_options = self.settings.documentoptions + if self.babel.get_language(): + self.d_options += ',%s' % self.babel.get_language() + + self.d_class = DocumentClass(settings.documentclass, + settings.use_part_section) + # object for a table while proccessing. + self.table_stack = [] + self.active_table = Table('longtable',settings.table_style) + + # HACK. Should have more sophisticated typearea handling. + if settings.documentclass.find('scr') == -1: + self.typearea = '\\usepackage[DIV12]{typearea}\n' + else: + if self.d_options.find('DIV') == -1 and self.d_options.find('BCOR') == -1: + self.typearea = '\\typearea{12}\n' + else: + self.typearea = '' + + if self.font_encoding == 'OT1': + fontenc_header = '' + elif self.font_encoding == '': + fontenc_header = '\\usepackage{ae}\n\\usepackage{aeguill}\n' + else: + fontenc_header = '\\usepackage[%s]{fontenc}\n' % (self.font_encoding,) + if self.latex_encoding.startswith('utf8'): + input_encoding = '\\usepackage{ucs}\n\\usepackage[utf8x]{inputenc}\n' + else: + input_encoding = '\\usepackage[%s]{inputenc}\n' % self.latex_encoding + if self.settings.graphicx_option == '': + self.graphicx_package = '\\usepackage{graphicx}\n' + elif self.settings.graphicx_option.lower() == 'auto': + self.graphicx_package = '\n'.join( + ('%Check if we are compiling under latex or pdflatex', + '\\ifx\\pdftexversion\\undefined', + ' \\usepackage{graphicx}', + '\\else', + ' \\usepackage[pdftex]{graphicx}', + '\\fi\n')) + else: + self.graphicx_package = ( + '\\usepackage[%s]{graphicx}\n' % self.settings.graphicx_option) + + self.head_prefix = [ + self.latex_head % (self.d_options,self.settings.documentclass), + '\\usepackage{babel}\n', # language is in documents settings. + fontenc_header, + '\\usepackage{shortvrb}\n', # allows verb in footnotes. + input_encoding, + # * tabularx: for docinfo, automatic width of columns, always on one page. + '\\usepackage{tabularx}\n', + '\\usepackage{longtable}\n', + self.active_table.used_packages(), + # possible other packages. + # * fancyhdr + # * ltxtable is a combination of tabularx and longtable (pagebreaks). + # but ?? + # + # extra space between text in tables and the line above them + '\\setlength{\\extrarowheight}{2pt}\n', + '\\usepackage{amsmath}\n', # what fore amsmath. + self.graphicx_package, + '\\usepackage{color}\n', + '\\usepackage{multirow}\n', + '\\usepackage{ifthen}\n', # before hyperref! + self.linking % (self.colorlinks, self.hyperlink_color, self.hyperlink_color), + self.typearea, + self.generator, + # latex lengths + '\\newlength{\\admonitionwidth}\n', + '\\setlength{\\admonitionwidth}{0.9\\textwidth}\n' + # width for docinfo tablewidth + '\\newlength{\\docinfowidth}\n', + '\\setlength{\\docinfowidth}{0.9\\textwidth}\n' + # linewidth of current environment, so tables are not wider + # than the sidebar: using locallinewidth seems to defer evaluation + # of linewidth, this is fixing it. + '\\newlength{\\locallinewidth}\n', + # will be set later. + ] + self.head_prefix.extend( latex_headings['optionlist_environment'] ) + self.head_prefix.extend( latex_headings['lineblock_environment'] ) + self.head_prefix.extend( latex_headings['footnote_floats'] ) + self.head_prefix.extend( latex_headings['some_commands'] ) + ## stylesheet is last: so it might be possible to overwrite defaults. + stylesheet = utils.get_stylesheet_reference(settings) + if stylesheet: + settings.record_dependencies.add(stylesheet) + self.head_prefix.append(self.stylesheet % (stylesheet)) + + if self.linking: # and maybe check for pdf + self.pdfinfo = [ ] + self.pdfauthor = None + # pdftitle, pdfsubject, pdfauthor, pdfkeywords, + # pdfcreator, pdfproducer + else: + self.pdfinfo = None + # NOTE: Latex wants a date and an author, rst puts this into + # docinfo, so normally we do not want latex author/date handling. + # latex article has its own handling of date and author, deactivate. + # self.astext() adds \title{...} \author{...} \date{...}, even if the + # "..." are empty strings. + self.head = [ ] + # separate title, so we can appen subtitle. + self.title = '' + # if use_latex_docinfo: collects lists of author/organization/contact/address lines + self.author_stack = [] + self.date = '' + + self.body_prefix = ['\\raggedbottom\n'] + self.body = [] + self.body_suffix = ['\n'] + self.section_level = 0 + self.context = [] + self.topic_classes = [] + # column specification for tables + self.table_caption = None + + # Flags to encode + # --------------- + # verbatim: to tell encode not to encode. + self.verbatim = 0 + # insert_newline: to tell encode to replace blanks by "~". + self.insert_none_breaking_blanks = 0 + # insert_newline: to tell encode to add latex newline. + self.insert_newline = 0 + # mbox_newline: to tell encode to add mbox and newline. + self.mbox_newline = 0 + # inside citation reference labels underscores dont need to be escaped. + self.inside_citation_reference_label = 0 + + # Stack of section counters so that we don't have to use_latex_toc. + # This will grow and shrink as processing occurs. + # Initialized for potential first-level sections. + self._section_number = [0] + + # The current stack of enumerations so that we can expand + # them into a compound enumeration. + self._enumeration_counters = [] + + # The maximum number of enumeration counters we've used. + # If we go beyond this number, we need to create a new + # counter; otherwise, just reuse an old one. + self._max_enumeration_counters = 0 + + self._bibitems = [] + + # docinfo. + self.docinfo = None + # inside literal block: no quote mangling. + self.literal_block = 0 + self.literal_block_stack = [] + self.literal = 0 + # true when encoding in math mode + self.mathmode = 0 + + def to_latex_encoding(self,docutils_encoding): + """ + Translate docutils encoding name into latex's. + + Default fallback method is remove "-" and "_" chars from docutils_encoding. + + """ + tr = { "iso-8859-1": "latin1", # west european + "iso-8859-2": "latin2", # east european + "iso-8859-3": "latin3", # esperanto, maltese + "iso-8859-4": "latin4", # north european,scandinavian, baltic + "iso-8859-5": "iso88595", # cyrillic (ISO) + "iso-8859-9": "latin5", # turkish + "iso-8859-15": "latin9", # latin9, update to latin1. + "mac_cyrillic": "maccyr", # cyrillic (on Mac) + "windows-1251": "cp1251", # cyrillic (on Windows) + "koi8-r": "koi8-r", # cyrillic (Russian) + "koi8-u": "koi8-u", # cyrillic (Ukrainian) + "windows-1250": "cp1250", # + "windows-1252": "cp1252", # + "us-ascii": "ascii", # ASCII (US) + # unmatched encodings + #"": "applemac", + #"": "ansinew", # windows 3.1 ansi + #"": "ascii", # ASCII encoding for the range 32--127. + #"": "cp437", # dos latine us + #"": "cp850", # dos latin 1 + #"": "cp852", # dos latin 2 + #"": "decmulti", + #"": "latin10", + #"iso-8859-6": "" # arabic + #"iso-8859-7": "" # greek + #"iso-8859-8": "" # hebrew + #"iso-8859-10": "" # latin6, more complete iso-8859-4 + } + if tr.has_key(docutils_encoding.lower()): + return tr[docutils_encoding.lower()] + # convert: latin-1 and utf-8 and similar things + return docutils_encoding.replace("_", "").replace("-", "").lower() + + def language_label(self, docutil_label): + return self.language.labels[docutil_label] + + latex_equivalents = { + u'\u00A0' : '~', + u'\u2013' : '{--}', + u'\u2014' : '{---}', + u'\u2018' : '`', + u'\u2019' : '\'', + u'\u201A' : ',', + u'\u201C' : '``', + u'\u201D' : '\'\'', + u'\u201E' : ',,', + u'\u2020' : '{\\dag}', + u'\u2021' : '{\\ddag}', + u'\u2026' : '{\\dots}', + u'\u2122' : '{\\texttrademark}', + u'\u21d4' : '{$\\Leftrightarrow$}', + # greek alphabet ? + } + + def unicode_to_latex(self,text): + # see LaTeX codec + # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/252124 + # Only some special chracters are translated, for documents with many + # utf-8 chars one should use the LaTeX unicode package. + for uchar in self.latex_equivalents.keys(): + text = text.replace(uchar,self.latex_equivalents[uchar]) + return text + + def ensure_math(self, text): + if not self.__dict__.has_key('ensure_math_re'): + chars = { + # lnot,pm,twosuperior,threesuperior,mu,onesuperior,times,div + 'latin1' : '\xac\xb1\xb2\xb3\xb5\xb9\xd7\xf7' , + # also latin5 and latin9 + } + self.ensure_math_re = re.compile('([%s])' % chars['latin1']) + text = self.ensure_math_re.sub(r'\\ensuremath{\1}', text) + return text + + def encode(self, text): + """ + Encode special characters (``# $ % & ~ _ ^ \ { }``) in `text` & return + """ + # Escaping with a backslash does not help with backslashes, ~ and ^. + + # < > are only available in math-mode or tt font. (really ?) + # $ starts math- mode. + # AND quotes + if self.verbatim: + return text + # compile the regexps once. do it here so one can see them. + # + # first the braces. + if not self.__dict__.has_key('encode_re_braces'): + self.encode_re_braces = re.compile(r'([{}])') + text = self.encode_re_braces.sub(r'{\\\1}',text) + if not self.__dict__.has_key('encode_re_bslash'): + # find backslash: except in the form '{\{}' or '{\}}'. + self.encode_re_bslash = re.compile(r'(?", '{\\textgreater}') + # then + text = text.replace("&", '{\\&}') + # the ^: + # * verb|^| does not work in mbox. + # * mathmode has wedge. hat{~} would also work. + # text = text.replace("^", '{\\ensuremath{^\\wedge}}') + text = text.replace("^", '{\\textasciicircum}') + text = text.replace("%", '{\\%}') + text = text.replace("#", '{\\#}') + text = text.replace("~", '{\\textasciitilde}') + # Separate compound characters, e.g. "--" to "-{}-". (The + # actual separation is done later; see below.) + separate_chars = '-' + if self.literal_block or self.literal: + # In monospace-font, we also separate ",,", "``" and "''" + # and some other characters which can't occur in + # non-literal text. + separate_chars += ',`\'"<>' + # pdflatex does not produce doublequotes for ngerman. + text = self.babel.double_quotes_in_tt(text) + if self.font_encoding == 'OT1': + # We're using OT1 font-encoding and have to replace + # underscore by underlined blank, because this has + # correct width. + text = text.replace('_', '{\\underline{ }}') + # And the tt-backslash doesn't work in OT1, so we use + # a mirrored slash. + text = text.replace('\\textbackslash', '\\reflectbox{/}') + else: + text = text.replace('_', '{\\_}') + else: + text = self.babel.quote_quotes(text) + if not self.inside_citation_reference_label: + text = text.replace("_", '{\\_}') + for char in separate_chars * 2: + # Do it twice ("* 2") becaues otherwise we would replace + # "---" by "-{}--". + text = text.replace(char + char, char + '{}' + char) + if self.insert_newline or self.literal_block: + # Insert a blank before the newline, to avoid + # ! LaTeX Error: There's no line here to end. + text = text.replace("\n", '~\\\\\n') + elif self.mbox_newline: + if self.literal_block: + closings = "}" * len(self.literal_block_stack) + openings = "".join(self.literal_block_stack) + else: + closings = "" + openings = "" + text = text.replace("\n", "%s}\\\\\n\\mbox{%s" % (closings,openings)) + text = text.replace('[', '{[}').replace(']', '{]}') + if self.insert_none_breaking_blanks: + text = text.replace(' ', '~') + if self.latex_encoding != 'utf8': + text = self.unicode_to_latex(text) + text = self.ensure_math(text) + return text + + def attval(self, text, + whitespace=re.compile('[\n\r\t\v\f]')): + """Cleanse, encode, and return attribute value text.""" + return self.encode(whitespace.sub(' ', text)) + + def astext(self): + if self.pdfinfo is not None and self.pdfauthor: + self.pdfinfo.append('pdfauthor={%s}' % self.pdfauthor) + if self.pdfinfo: + pdfinfo = '\\hypersetup{\n' + ',\n'.join(self.pdfinfo) + '\n}\n' + else: + pdfinfo = '' + head = '\\title{%s}\n\\author{%s}\n\\date{%s}\n' % \ + (self.title, + ' \\and\n'.join(['~\\\\\n'.join(author_lines) + for author_lines in self.author_stack]), + self.date) + return ''.join(self.head_prefix + [head] + self.head + [pdfinfo] + + self.body_prefix + self.body + self.body_suffix) + + def visit_Text(self, node): + self.body.append(self.encode(node.astext())) + + def depart_Text(self, node): + pass + + def visit_address(self, node): + self.visit_docinfo_item(node, 'address') + + def depart_address(self, node): + self.depart_docinfo_item(node) + + def visit_admonition(self, node, name=''): + self.body.append('\\begin{center}\\begin{sffamily}\n') + self.body.append('\\fbox{\\parbox{\\admonitionwidth}{\n') + if name: + self.body.append('\\textbf{\\large '+ self.language.labels[name] + '}\n'); + self.body.append('\\vspace{2mm}\n') + + + def depart_admonition(self, node=None): + self.body.append('}}\n') # end parbox fbox + self.body.append('\\end{sffamily}\n\\end{center}\n'); + + def visit_attention(self, node): + self.visit_admonition(node, 'attention') + + def depart_attention(self, node): + self.depart_admonition() + + def visit_author(self, node): + self.visit_docinfo_item(node, 'author') + + def depart_author(self, node): + self.depart_docinfo_item(node) + + def visit_authors(self, node): + # not used: visit_author is called anyway for each author. + pass + + def depart_authors(self, node): + pass + + def visit_block_quote(self, node): + self.body.append( '\\begin{quote}\n') + + def depart_block_quote(self, node): + self.body.append( '\\end{quote}\n') + + def visit_bullet_list(self, node): + if 'contents' in self.topic_classes: + if self.use_latex_toc: + raise nodes.SkipNode + self.body.append( '\\begin{list}{}{}\n' ) + else: + self.body.append( '\\begin{itemize}\n' ) + + def depart_bullet_list(self, node): + if 'contents' in self.topic_classes: + self.body.append( '\\end{list}\n' ) + else: + self.body.append( '\\end{itemize}\n' ) + + # Imperfect superscript/subscript handling: mathmode italicizes + # all letters by default. + def visit_superscript(self, node): + self.body.append('$^{') + self.mathmode = 1 + + def depart_superscript(self, node): + self.body.append('}$') + self.mathmode = 0 + + def visit_subscript(self, node): + self.body.append('$_{') + self.mathmode = 1 + + def depart_subscript(self, node): + self.body.append('}$') + self.mathmode = 0 + + def visit_caption(self, node): + self.body.append( '\\caption{' ) + + def depart_caption(self, node): + self.body.append('}') + + def visit_caution(self, node): + self.visit_admonition(node, 'caution') + + def depart_caution(self, node): + self.depart_admonition() + + def visit_title_reference(self, node): + self.body.append( '\\titlereference{' ) + + def depart_title_reference(self, node): + self.body.append( '}' ) + + def visit_citation(self, node): + # TODO maybe use cite bibitems + if self._use_latex_citations: + self.context.append(len(self.body)) + else: + self.body.append('\\begin{figure}[b]') + for id in node['ids']: + self.body.append('\\hypertarget{%s}' % id) + + def depart_citation(self, node): + if self._use_latex_citations: + size = self.context.pop() + label = self.body[size] + text = ''.join(self.body[size+1:]) + del self.body[size:] + self._bibitems.append([label, text]) + else: + self.body.append('\\end{figure}\n') + + def visit_citation_reference(self, node): + if self._use_latex_citations: + self.body.append('\\cite{') + self.inside_citation_reference_label = 1 + else: + href = '' + if node.has_key('refid'): + href = node['refid'] + elif node.has_key('refname'): + href = self.document.nameids[node['refname']] + self.body.append('[\\hyperlink{%s}{' % href) + + def depart_citation_reference(self, node): + if self._use_latex_citations: + self.body.append('}') + self.inside_citation_reference_label = 0 + else: + self.body.append('}]') + + def visit_classifier(self, node): + self.body.append( '(\\textbf{' ) + + def depart_classifier(self, node): + self.body.append( '})\n' ) + + def visit_colspec(self, node): + self.active_table.visit_colspec(node) + + def depart_colspec(self, node): + pass + + def visit_comment(self, node): + # Escape end of line by a new comment start in comment text. + self.body.append('%% %s \n' % node.astext().replace('\n', '\n% ')) + raise nodes.SkipNode + + def visit_compound(self, node): + pass + + def depart_compound(self, node): + pass + + def visit_contact(self, node): + self.visit_docinfo_item(node, 'contact') + + def depart_contact(self, node): + self.depart_docinfo_item(node) + + def visit_container(self, node): + pass + + def depart_container(self, node): + pass + + def visit_copyright(self, node): + self.visit_docinfo_item(node, 'copyright') + + def depart_copyright(self, node): + self.depart_docinfo_item(node) + + def visit_danger(self, node): + self.visit_admonition(node, 'danger') + + def depart_danger(self, node): + self.depart_admonition() + + def visit_date(self, node): + self.visit_docinfo_item(node, 'date') + + def depart_date(self, node): + self.depart_docinfo_item(node) + + def visit_decoration(self, node): + pass + + def depart_decoration(self, node): + pass + + def visit_definition(self, node): + pass + + def depart_definition(self, node): + self.body.append('\n') + + def visit_definition_list(self, node): + self.body.append( '\\begin{description}\n' ) + + def depart_definition_list(self, node): + self.body.append( '\\end{description}\n' ) + + def visit_definition_list_item(self, node): + pass + + def depart_definition_list_item(self, node): + pass + + def visit_description(self, node): + self.body.append( ' ' ) + + def depart_description(self, node): + pass + + def visit_docinfo(self, node): + self.docinfo = [] + self.docinfo.append('%' + '_'*75 + '\n') + self.docinfo.append('\\begin{center}\n') + self.docinfo.append('\\begin{tabularx}{\\docinfowidth}{lX}\n') + + def depart_docinfo(self, node): + self.docinfo.append('\\end{tabularx}\n') + self.docinfo.append('\\end{center}\n') + self.body = self.docinfo + self.body + # clear docinfo, so field names are no longer appended. + self.docinfo = None + + def visit_docinfo_item(self, node, name): + if name == 'author': + if not self.pdfinfo == None: + if not self.pdfauthor: + self.pdfauthor = self.attval(node.astext()) + else: + self.pdfauthor += self.author_separator + self.attval(node.astext()) + if self.use_latex_docinfo: + if name in ('author', 'organization', 'contact', 'address'): + # We attach these to the last author. If any of them precedes + # the first author, put them in a separate "author" group (for + # no better semantics). + if name == 'author' or not self.author_stack: + self.author_stack.append([]) + if name == 'address': # newlines are meaningful + self.insert_newline = 1 + text = self.encode(node.astext()) + self.insert_newline = 0 + else: + text = self.attval(node.astext()) + self.author_stack[-1].append(text) + raise nodes.SkipNode + elif name == 'date': + self.date = self.attval(node.astext()) + raise nodes.SkipNode + self.docinfo.append('\\textbf{%s}: &\n\t' % self.language_label(name)) + if name == 'address': + self.insert_newline = 1 + self.docinfo.append('{\\raggedright\n') + self.context.append(' } \\\\\n') + else: + self.context.append(' \\\\\n') + self.context.append(self.docinfo) + self.context.append(len(self.body)) + + def depart_docinfo_item(self, node): + size = self.context.pop() + dest = self.context.pop() + tail = self.context.pop() + tail = self.body[size:] + [tail] + del self.body[size:] + dest.extend(tail) + # for address we did set insert_newline + self.insert_newline = 0 + + def visit_doctest_block(self, node): + self.body.append( '\\begin{verbatim}' ) + self.verbatim = 1 + + def depart_doctest_block(self, node): + self.body.append( '\\end{verbatim}\n' ) + self.verbatim = 0 + + def visit_document(self, node): + self.body_prefix.append('\\begin{document}\n') + # titled document? + if self.use_latex_docinfo or len(node) and isinstance(node[0], nodes.title): + self.body_prefix.append('\\maketitle\n') + # alternative use titlepage environment. + # \begin{titlepage} + # ... + self.body.append('\n\\setlength{\\locallinewidth}{\\linewidth}\n') + + def depart_document(self, node): + # TODO insertion point of bibliography should none automatic. + if self._use_latex_citations and len(self._bibitems)>0: + if not self.bibtex: + widest_label = "" + for bi in self._bibitems: + if len(widest_label) self._max_enumeration_counters: + self._max_enumeration_counters = len(self._enumeration_counters) + self.body.append('\\newcounter{%s}\n' % counter_name) + else: + self.body.append('\\setcounter{%s}{0}\n' % counter_name) + + self.body.append('\\begin{list}{%s\\%s{%s}%s}\n' % \ + (enum_prefix,enum_type,counter_name,enum_suffix)) + self.body.append('{\n') + self.body.append('\\usecounter{%s}\n' % counter_name) + # set start after usecounter, because it initializes to zero. + if node.has_key('start'): + self.body.append('\\addtocounter{%s}{%d}\n' \ + % (counter_name,node['start']-1)) + ## set rightmargin equal to leftmargin + self.body.append('\\setlength{\\rightmargin}{\\leftmargin}\n') + self.body.append('}\n') + + def depart_enumerated_list(self, node): + self.body.append('\\end{list}\n') + self._enumeration_counters.pop() + + def visit_error(self, node): + self.visit_admonition(node, 'error') + + def depart_error(self, node): + self.depart_admonition() + + def visit_field(self, node): + # real output is done in siblings: _argument, _body, _name + pass + + def depart_field(self, node): + self.body.append('\n') + ##self.body.append('%[depart_field]\n') + + def visit_field_argument(self, node): + self.body.append('%[visit_field_argument]\n') + + def depart_field_argument(self, node): + self.body.append('%[depart_field_argument]\n') + + def visit_field_body(self, node): + # BUG by attach as text we loose references. + if self.docinfo: + self.docinfo.append('%s \\\\\n' % self.encode(node.astext())) + raise nodes.SkipNode + # BUG: what happens if not docinfo + + def depart_field_body(self, node): + self.body.append( '\n' ) + + def visit_field_list(self, node): + if not self.docinfo: + self.body.append('\\begin{quote}\n') + self.body.append('\\begin{description}\n') + + def depart_field_list(self, node): + if not self.docinfo: + self.body.append('\\end{description}\n') + self.body.append('\\end{quote}\n') + + def visit_field_name(self, node): + # BUG this duplicates docinfo_item + if self.docinfo: + self.docinfo.append('\\textbf{%s}: &\n\t' % self.encode(node.astext())) + raise nodes.SkipNode + else: + self.body.append('\\item [') + + def depart_field_name(self, node): + if not self.docinfo: + self.body.append(':]') + + def visit_figure(self, node): + if (not node.attributes.has_key('align') or + node.attributes['align'] == 'center'): + # centering does not add vertical space like center. + align = '\n\\centering' + align_end = '' + else: + # TODO non vertical space for other alignments. + align = '\\begin{flush%s}' % node.attributes['align'] + align_end = '\\end{flush%s}' % node.attributes['align'] + self.body.append( '\\begin{figure}[htbp]%s\n' % align ) + self.context.append( '%s\\end{figure}\n' % align_end ) + + def depart_figure(self, node): + self.body.append( self.context.pop() ) + + def visit_footer(self, node): + self.context.append(len(self.body)) + + def depart_footer(self, node): + start = self.context.pop() + footer = (['\n\\begin{center}\small\n'] + + self.body[start:] + ['\n\\end{center}\n']) + self.body_suffix[:0] = footer + del self.body[start:] + + def visit_footnote(self, node): + if self.use_latex_footnotes: + num,text = node.astext().split(None,1) + num = self.encode(num.strip()) + self.body.append('\\footnotetext['+num+']') + self.body.append('{') + else: + self.body.append('\\begin{figure}[b]') + for id in node['ids']: + self.body.append('\\hypertarget{%s}' % id) + + def depart_footnote(self, node): + if self.use_latex_footnotes: + self.body.append('}\n') + else: + self.body.append('\\end{figure}\n') + + def visit_footnote_reference(self, node): + if self.use_latex_footnotes: + self.body.append("\\footnotemark["+self.encode(node.astext())+"]") + raise nodes.SkipNode + href = '' + if node.has_key('refid'): + href = node['refid'] + elif node.has_key('refname'): + href = self.document.nameids[node['refname']] + format = self.settings.footnote_references + if format == 'brackets': + suffix = '[' + self.context.append(']') + elif format == 'superscript': + suffix = '\\raisebox{.5em}[0em]{\\scriptsize' + self.context.append('}') + else: # shouldn't happen + raise AssertionError('Illegal footnote reference format.') + self.body.append('%s\\hyperlink{%s}{' % (suffix,href)) + + def depart_footnote_reference(self, node): + if self.use_latex_footnotes: + return + self.body.append('}%s' % self.context.pop()) + + # footnote/citation label + def label_delim(self, node, bracket, superscript): + if isinstance(node.parent, nodes.footnote): + if self.use_latex_footnotes: + raise nodes.SkipNode + if self.settings.footnote_references == 'brackets': + self.body.append(bracket) + else: + self.body.append(superscript) + else: + assert isinstance(node.parent, nodes.citation) + if not self._use_latex_citations: + self.body.append(bracket) + + def visit_label(self, node): + self.label_delim(node, '[', '$^{') + + def depart_label(self, node): + self.label_delim(node, ']', '}$') + + # elements generated by the framework e.g. section numbers. + def visit_generated(self, node): + pass + + def depart_generated(self, node): + pass + + def visit_header(self, node): + self.context.append(len(self.body)) + + def depart_header(self, node): + start = self.context.pop() + self.body_prefix.append('\n\\verb|begin_header|\n') + self.body_prefix.extend(self.body[start:]) + self.body_prefix.append('\n\\verb|end_header|\n') + del self.body[start:] + + def visit_hint(self, node): + self.visit_admonition(node, 'hint') + + def depart_hint(self, node): + self.depart_admonition() + + def latex_image_length(self, width_str): + match = re.match('(\d*\.?\d*)\s*(\S*)', width_str) + if not match: + # fallback + return width_str + res = width_str + amount, unit = match.groups()[:2] + if unit == "px": + # LaTeX does not know pixels but points + res = "%spt" % amount + elif unit == "%": + res = "%.3f\\linewidth" % (float(amount)/100.0) + return res + + def visit_image(self, node): + attrs = node.attributes + # Add image URI to dependency list, assuming that it's + # referring to a local file. + self.settings.record_dependencies.add(attrs['uri']) + pre = [] # in reverse order + post = [] + include_graphics_options = [] + inline = isinstance(node.parent, nodes.TextElement) + if attrs.has_key('scale'): + # Could also be done with ``scale`` option to + # ``\includegraphics``; doing it this way for consistency. + pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,)) + post.append('}') + if attrs.has_key('width'): + include_graphics_options.append('width=%s' % ( + self.latex_image_length(attrs['width']), )) + if attrs.has_key('height'): + include_graphics_options.append('height=%s' % ( + self.latex_image_length(attrs['height']), )) + if attrs.has_key('align'): + align_prepost = { + # By default latex aligns the top of an image. + (1, 'top'): ('', ''), + (1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'), + (1, 'bottom'): ('\\raisebox{-\\height}{', '}'), + (0, 'center'): ('{\\hfill', '\\hfill}'), + # These 2 don't exactly do the right thing. The image should + # be floated alongside the paragraph. See + # http://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG + (0, 'left'): ('{', '\\hfill}'), + (0, 'right'): ('{\\hfill', '}'),} + try: + pre.append(align_prepost[inline, attrs['align']][0]) + post.append(align_prepost[inline, attrs['align']][1]) + except KeyError: + pass # XXX complain here? + if not inline: + pre.append('\n') + post.append('\n') + pre.reverse() + self.body.extend( pre ) + options = '' + if len(include_graphics_options)>0: + options = '[%s]' % (','.join(include_graphics_options)) + self.body.append( '\\includegraphics%s{%s}' % ( + options, attrs['uri'] ) ) + self.body.extend( post ) + + def depart_image(self, node): + pass + + def visit_important(self, node): + self.visit_admonition(node, 'important') + + def depart_important(self, node): + self.depart_admonition() + + def visit_interpreted(self, node): + # @@@ Incomplete, pending a proper implementation on the + # Parser/Reader end. + self.visit_literal(node) + + def depart_interpreted(self, node): + self.depart_literal(node) + + def visit_legend(self, node): + self.body.append('{\\small ') + + def depart_legend(self, node): + self.body.append('}') + + def visit_line(self, node): + self.body.append('\item[] ') + + def depart_line(self, node): + self.body.append('\n') + + def visit_line_block(self, node): + if isinstance(node.parent, nodes.line_block): + self.body.append('\\item[] \n' + '\\begin{lineblock}{\\lineblockindentation}\n') + else: + self.body.append('\n\\begin{lineblock}{0em}\n') + + def depart_line_block(self, node): + self.body.append('\\end{lineblock}\n') + + def visit_list_item(self, node): + # Append "{}" in case the next character is "[", which would break + # LaTeX's list environment (no numbering and the "[" is not printed). + self.body.append('\\item {} ') + + def depart_list_item(self, node): + self.body.append('\n') + + def visit_literal(self, node): + self.literal = 1 + self.body.append('\\texttt{') + + def depart_literal(self, node): + self.body.append('}') + self.literal = 0 + + def visit_literal_block(self, node): + """ + Render a literal-block. + + Literal blocks are used for "::"-prefixed literal-indented + blocks of text, where the inline markup is not recognized, + but are also the product of the parsed-literal directive, + where the markup is respected. + """ + # In both cases, we want to use a typewriter/monospaced typeface. + # For "real" literal-blocks, we can use \verbatim, while for all + # the others we must use \mbox. + # + # We can distinguish between the two kinds by the number of + # siblings that compose this node: if it is composed by a + # single element, it's surely either a real one or a + # parsed-literal that does not contain any markup. + # + if not self.active_table.is_open(): + # no quote inside tables, to avoid vertical space between + # table border and literal block. + # BUG: fails if normal text preceeds the literal block. + self.body.append('\\begin{quote}') + self.context.append('\\end{quote}\n') + else: + self.body.append('\n') + self.context.append('\n') + if (self.settings.use_verbatim_when_possible and (len(node) == 1) + # in case of a parsed-literal containing just a "**bold**" word: + and isinstance(node[0], nodes.Text)): + self.verbatim = 1 + self.body.append('\\begin{verbatim}\n') + else: + self.literal_block = 1 + self.insert_none_breaking_blanks = 1 + self.body.append('{\\ttfamily \\raggedright \\noindent\n') + # * obey..: is from julien and never worked for me (grubert). + # self.body.append('{\\obeylines\\obeyspaces\\ttfamily\n') + + def depart_literal_block(self, node): + if self.verbatim: + self.body.append('\n\\end{verbatim}\n') + self.verbatim = 0 + else: + self.body.append('\n}') + self.insert_none_breaking_blanks = 0 + self.literal_block = 0 + # obey end: self.body.append('}\n') + self.body.append(self.context.pop()) + + def visit_meta(self, node): + self.body.append('[visit_meta]\n') + # BUG maybe set keywords for pdf + ##self.head.append(self.starttag(node, 'meta', **node.attributes)) + + def depart_meta(self, node): + self.body.append('[depart_meta]\n') + + def visit_note(self, node): + self.visit_admonition(node, 'note') + + def depart_note(self, node): + self.depart_admonition() + + def visit_option(self, node): + if self.context[-1]: + # this is not the first option + self.body.append(', ') + + def depart_option(self, node): + # flag tha the first option is done. + self.context[-1] += 1 + + def visit_option_argument(self, node): + """The delimiter betweeen an option and its argument.""" + self.body.append(node.get('delimiter', ' ')) + + def depart_option_argument(self, node): + pass + + def visit_option_group(self, node): + self.body.append('\\item [') + # flag for first option + self.context.append(0) + + def depart_option_group(self, node): + self.context.pop() # the flag + self.body.append('] ') + + def visit_option_list(self, node): + self.body.append('\\begin{optionlist}{3cm}\n') + + def depart_option_list(self, node): + self.body.append('\\end{optionlist}\n') + + def visit_option_list_item(self, node): + pass + + def depart_option_list_item(self, node): + pass + + def visit_option_string(self, node): + ##self.body.append(self.starttag(node, 'span', '', CLASS='option')) + pass + + def depart_option_string(self, node): + ##self.body.append('') + pass + + def visit_organization(self, node): + self.visit_docinfo_item(node, 'organization') + + def depart_organization(self, node): + self.depart_docinfo_item(node) + + def visit_paragraph(self, node): + index = node.parent.index(node) + if not ('contents' in self.topic_classes or + (isinstance(node.parent, nodes.compound) and + index > 0 and + not isinstance(node.parent[index - 1], nodes.paragraph) and + not isinstance(node.parent[index - 1], nodes.compound))): + self.body.append('\n') + + def depart_paragraph(self, node): + self.body.append('\n') + + def visit_problematic(self, node): + self.body.append('{\\color{red}\\bfseries{}') + + def depart_problematic(self, node): + self.body.append('}') + + def visit_raw(self, node): + if 'latex' in node.get('format', '').split(): + self.body.append(node.astext()) + raise nodes.SkipNode + + def visit_reference(self, node): + # BUG: hash_char "#" is trouble some in LaTeX. + # mbox and other environment do not like the '#'. + hash_char = '\\#' + if node.has_key('refuri'): + href = node['refuri'].replace('#',hash_char) + elif node.has_key('refid'): + href = hash_char + node['refid'] + elif node.has_key('refname'): + href = hash_char + self.document.nameids[node['refname']] + else: + raise AssertionError('Unknown reference.') + self.body.append('\\href{%s}{' % href) + if self._reference_label and not node.has_key('refuri'): + self.body.append('\\%s{%s}}' % (self._reference_label, + href.replace(hash_char, ''))) + raise nodes.SkipNode + + def depart_reference(self, node): + self.body.append('}') + + def visit_revision(self, node): + self.visit_docinfo_item(node, 'revision') + + def depart_revision(self, node): + self.depart_docinfo_item(node) + + def visit_section(self, node): + self.section_level += 1 + # Initialize counter for potential subsections: + self._section_number.append(0) + # Counter for this section's level (initialized by parent section): + self._section_number[self.section_level - 1] += 1 + + def depart_section(self, node): + # Remove counter for potential subsections: + self._section_number.pop() + self.section_level -= 1 + + def visit_sidebar(self, node): + # BUG: this is just a hack to make sidebars render something + self.body.append('\n\\setlength{\\locallinewidth}{0.9\\admonitionwidth}\n') + self.body.append('\\begin{center}\\begin{sffamily}\n') + self.body.append('\\fbox{\\colorbox[gray]{0.80}{\\parbox{\\admonitionwidth}{\n') + + def depart_sidebar(self, node): + self.body.append('}}}\n') # end parbox colorbox fbox + self.body.append('\\end{sffamily}\n\\end{center}\n'); + self.body.append('\n\\setlength{\\locallinewidth}{\\linewidth}\n') + + + attribution_formats = {'dash': ('---', ''), + 'parentheses': ('(', ')'), + 'parens': ('(', ')'), + 'none': ('', '')} + + def visit_attribution(self, node): + prefix, suffix = self.attribution_formats[self.settings.attribution] + self.body.append('\n\\begin{flushright}\n') + self.body.append(prefix) + self.context.append(suffix) + + def depart_attribution(self, node): + self.body.append(self.context.pop() + '\n') + self.body.append('\\end{flushright}\n') + + def visit_status(self, node): + self.visit_docinfo_item(node, 'status') + + def depart_status(self, node): + self.depart_docinfo_item(node) + + def visit_strong(self, node): + self.body.append('\\textbf{') + self.literal_block_stack.append('\\textbf{') + + def depart_strong(self, node): + self.body.append('}') + self.literal_block_stack.pop() + + def visit_substitution_definition(self, node): + raise nodes.SkipNode + + def visit_substitution_reference(self, node): + self.unimplemented_visit(node) + + def visit_subtitle(self, node): + if isinstance(node.parent, nodes.sidebar): + self.body.append('~\\\\\n\\textbf{') + self.context.append('}\n\\smallskip\n') + elif isinstance(node.parent, nodes.document): + self.title = self.title + \ + '\\\\\n\\large{%s}\n' % self.encode(node.astext()) + raise nodes.SkipNode + elif isinstance(node.parent, nodes.section): + self.body.append('\\textbf{') + self.context.append('}\\vspace{0.2cm}\n\n\\noindent ') + + def depart_subtitle(self, node): + self.body.append(self.context.pop()) + + def visit_system_message(self, node): + pass + + def depart_system_message(self, node): + self.body.append('\n') + + def visit_table(self, node): + if self.active_table.is_open(): + self.table_stack.append(self.active_table) + # nesting longtable does not work (e.g. 2007-04-18) + self.active_table = Table('tabular',self.settings.table_style) + self.active_table.open() + for cl in node['classes']: + self.active_table.set_table_style(cl) + self.body.append('\n' + self.active_table.get_opening()) + + def depart_table(self, node): + self.body.append(self.active_table.get_closing() + '\n') + self.active_table.close() + if len(self.table_stack)>0: + self.active_table = self.table_stack.pop() + else: + self.active_table.set_table_style(self.settings.table_style) + + def visit_target(self, node): + # BUG: why not (refuri or refid or refname) means not footnote ? + if not (node.has_key('refuri') or node.has_key('refid') + or node.has_key('refname')): + for id in node['ids']: + self.body.append('\\hypertarget{%s}{' % id) + self.context.append('}' * len(node['ids'])) + elif node.get("refid"): + self.body.append('\\hypertarget{%s}{' % node.get("refid")) + self.context.append('}') + else: + self.context.append('') + + def depart_target(self, node): + self.body.append(self.context.pop()) + + def visit_tbody(self, node): + # BUG write preamble if not yet done (colspecs not []) + # for tables without heads. + if not self.active_table.get('preamble written'): + self.visit_thead(None) + # self.depart_thead(None) + + def depart_tbody(self, node): + pass + + def visit_term(self, node): + self.body.append('\\item[{') + + def depart_term(self, node): + # definition list term. + # \leavevmode results in a line break if the term is followed by a item list. + self.body.append('}] \leavevmode ') + + def visit_tgroup(self, node): + #self.body.append(self.starttag(node, 'colgroup')) + #self.context.append('\n') + pass + + def depart_tgroup(self, node): + pass + + def visit_thead(self, node): + self.body.append('{%s}\n' % self.active_table.get_colspecs()) + if self.active_table.caption: + self.body.append('\\caption{%s}\\\\\n' % self.active_table.caption) + self.active_table.set('preamble written',1) + # TODO longtable supports firsthead and lastfoot too. + self.body.extend(self.active_table.visit_thead()) + + def depart_thead(self, node): + # the table header written should be on every page + # => \endhead + self.body.extend(self.active_table.depart_thead()) + # and the firsthead => \endfirsthead + # BUG i want a "continued from previous page" on every not + # firsthead, but then we need the header twice. + # + # there is a \endfoot and \endlastfoot too. + # but we need the number of columns to + # self.body.append('\\multicolumn{%d}{c}{"..."}\n' % number_of_columns) + # self.body.append('\\hline\n\\endfoot\n') + # self.body.append('\\hline\n') + # self.body.append('\\endlastfoot\n') + + def visit_tip(self, node): + self.visit_admonition(node, 'tip') + + def depart_tip(self, node): + self.depart_admonition() + + def bookmark(self, node): + """Append latex href and pdfbookmarks for titles. + """ + if node.parent['ids']: + for id in node.parent['ids']: + self.body.append('\\hypertarget{%s}{}\n' % id) + if not self.use_latex_toc: + # BUG level depends on style. pdflatex allows level 0 to 3 + # ToC would be the only on level 0 so i choose to decrement the rest. + # "Table of contents" bookmark to see the ToC. To avoid this + # we set all zeroes to one. + l = self.section_level + if l>0: + l = l-1 + # pdftex does not like "_" subscripts in titles + text = self.encode(node.astext()) + for id in node.parent['ids']: + self.body.append('\\pdfbookmark[%d]{%s}{%s}\n' % \ + (l, text, id)) + + def visit_title(self, node): + """Section and other titles.""" + + if isinstance(node.parent, nodes.topic): + # the table of contents. + self.bookmark(node) + if ('contents' in self.topic_classes + and self.use_latex_toc): + self.body.append('\\renewcommand{\\contentsname}{') + self.context.append('}\n\\tableofcontents\n\n\\bigskip\n') + elif ('abstract' in self.topic_classes + and self.settings.use_latex_abstract): + raise nodes.SkipNode + else: # or section titles before the table of contents. + # BUG: latex chokes on center environment with + # "perhaps a missing item", therefore we use hfill. + self.body.append('\\subsubsection*{~\\hfill ') + # the closing brace for subsection. + self.context.append('\\hfill ~}\n') + # TODO: for admonition titles before the first section + # either specify every possible node or ... ? + elif isinstance(node.parent, nodes.sidebar) \ + or isinstance(node.parent, nodes.admonition): + self.body.append('\\textbf{\\large ') + self.context.append('}\n\\smallskip\n') + elif isinstance(node.parent, nodes.table): + # caption must be written after column spec + self.active_table.caption = self.encode(node.astext()) + raise nodes.SkipNode + elif self.section_level == 0: + # document title + self.title = self.encode(node.astext()) + if not self.pdfinfo == None: + self.pdfinfo.append( 'pdftitle={%s}' % self.encode(node.astext()) ) + raise nodes.SkipNode + else: + self.body.append('\n\n') + self.body.append('%' + '_' * 75) + self.body.append('\n\n') + self.bookmark(node) + + if self.use_latex_toc: + section_star = "" + else: + section_star = "*" + + section_name = self.d_class.section(self.section_level) + self.body.append('\\%s%s{' % (section_name, section_star)) + # MAYBE postfix paragraph and subparagraph with \leavemode to + # ensure floatables stay in the section and text starts on a new line. + self.context.append('}\n') + + def depart_title(self, node): + self.body.append(self.context.pop()) + for id in node.parent['ids']: + self.body.append('\\label{%s}\n' % id) + + def visit_topic(self, node): + self.topic_classes = node['classes'] + if ('abstract' in self.topic_classes + and self.settings.use_latex_abstract): + self.body.append('\\begin{abstract}\n') + + def depart_topic(self, node): + if ('abstract' in self.topic_classes + and self.settings.use_latex_abstract): + self.body.append('\\end{abstract}\n') + self.topic_classes = [] + if 'contents' in node['classes'] and self.use_latex_toc: + pass + else: + self.body.append('\n') + + def visit_inline(self, node): # titlereference + classes = node.get('classes', ['Unknown', ]) + for cls in classes: + self.body.append( '\\docutilsrole%s{' % cls) + self.context.append('}'*len(classes)) + + def depart_inline(self, node): + self.body.append(self.context.pop()) + + def visit_rubric(self, node): + self.body.append('\\rubric{') + self.context.append('}\n') + + def depart_rubric(self, node): + self.body.append(self.context.pop()) + + def visit_transition(self, node): + self.body.append('\n\n') + self.body.append('%' + '_' * 75) + self.body.append('\n\\hspace*{\\fill}\\hrulefill\\hspace*{\\fill}') + self.body.append('\n\n') + + def depart_transition(self, node): + pass + + def visit_version(self, node): + self.visit_docinfo_item(node, 'version') + + def depart_version(self, node): + self.depart_docinfo_item(node) + + def visit_warning(self, node): + self.visit_admonition(node, 'warning') + + def depart_warning(self, node): + self.depart_admonition() + + def unimplemented_visit(self, node): + raise NotImplementedError('visiting unimplemented node type: %s' + % node.__class__.__name__) + +# def unknown_visit(self, node): +# def default_visit(self, node): + +# vim: set ts=4 et ai : diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/latex2e/latex2e.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/latex2e/latex2e.tex Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,74 @@ +% latex include file for docutils latex writer +% -------------------------------------------- +% +% CVS: $Id: latex2e.tex 4163 2005-12-09 04:21:34Z goodger $ +% +% This is included at the end of the latex header in the generated file, +% to allow overwriting defaults, although this could get hairy. +% Generated files should process well standalone too, LaTeX might give a +% message about a missing file. + +% donot indent first line of paragraph. +\setlength{\parindent}{0pt} +\setlength{\parskip}{5pt plus 2pt minus 1pt} + +% sloppy +% ------ +% Less strict (opposite to default fussy) space size between words. Therefore +% less hyphenation. +\sloppy + +% fonts +% ----- +% times for pdf generation, gives smaller pdf files. +% +% But in standard postscript fonts: courier and times/helvetica do not fit. +% Maybe use pslatex. +\usepackage{times} + +% pagestyle +% --------- +% headings might put section titles in the page heading, but not if +% the table of contents is done by docutils. +% If pagestyle{headings} is used, \geometry{headheight=10pt,headsep=1pt} +% should be set too. +%\pagestyle{plain} +% +% or use fancyhdr (untested !) +%\usepackage{fancyhdr} +%\pagestyle{fancy} +%\addtolength{\headheight}{\\baselineskip} +%\renewcommand{\sectionmark}[1]{\markboth{#1}{}} +%\renewcommand{\subsectionmark}[1]{\markright{#1}} +%\fancyhf{} +%\fancyhead[LE,RO]{\\bfseries\\textsf{\Large\\thepage}} +%\fancyhead[LO]{\\textsf{\\footnotesize\\rightmark}} +%\fancyhead[RE]{\\textsc{\\textsf{\\footnotesize\leftmark}}} +%\\fancyfoot[LE,RO]{\\bfseries\\textsf{\scriptsize Docutils}} +%\fancyfoot[RE,LO]{\\textsf{\scriptsize\\today}} + +% geometry +% -------- +% = papersizes and margins +%\geometry{a4paper,twoside,tmargin=1.5cm, +% headheight=1cm,headsep=0.75cm} + +% Do section number display +% ------------------------- +%\makeatletter +%\def\@seccntformat#1{} +%\makeatother +% no numbers in toc +%\renewcommand{\numberline}[1]{} + + +% change maketitle +% ---------------- +%\renewcommand{\maketitle}{ +% \begin{titlepage} +% \begin{center} +% \textsf{TITLE \@title} \\ +% Date: \today +% \end{center} +% \end{titlepage} +%} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/__init__.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,825 @@ +# $Id: __init__.py 5174 2007-05-31 00:01:52Z wiemann $ +# Author: Lea Wiemann +# Copyright: This module has been placed in the public domain. + +""" +LaTeX2e document tree Writer. +""" + +# Thanks to Engelbert Gruber and various contributors for the original +# LaTeX writer, some code and many ideas of which have been used for +# this writer. + +__docformat__ = 'reStructuredText' + + +import re +import os.path +from types import ListType + +import docutils +from docutils import nodes, writers, utils +from docutils.writers.newlatex2e import unicode_map +from docutils.transforms import writer_aux + + +class Writer(writers.Writer): + + supported = ('newlatex', 'newlatex2e') + """Formats this writer supports.""" + + default_stylesheet = 'base.tex' + + default_stylesheet_path = utils.relative_path( + os.path.join(os.getcwd(), 'dummy'), + os.path.join(os.path.dirname(__file__), default_stylesheet)) + + settings_spec = ( + 'LaTeX-Specific Options', + 'Note that this LaTeX writer is still EXPERIMENTAL and not ' + 'feature-complete. ', + (('Specify a stylesheet file. The path is used verbatim to include ' + 'the file. Overrides --stylesheet-path.', + ['--stylesheet'], + {'default': '', 'metavar': '', + 'overrides': 'stylesheet_path'}), + ('Specify a stylesheet file, relative to the current working ' + 'directory. Overrides --stylesheet. Default: "%s"' + % default_stylesheet_path, + ['--stylesheet-path'], + {'metavar': '', 'overrides': 'stylesheet', + 'default': default_stylesheet_path}), + ('Specify a user stylesheet file. See --stylesheet.', + ['--user-stylesheet'], + {'default': '', 'metavar': '', + 'overrides': 'user_stylesheet_path'}), + ('Specify a user stylesheet file. See --stylesheet-path.', + ['--user-stylesheet-path'], + {'metavar': '', 'overrides': 'user_stylesheet'}) + ),) + + settings_defaults = { + # Many Unicode characters are provided by unicode_map.py, so + # we can default to latin-1. + 'output_encoding': 'latin-1', + 'output_encoding_error_handler': 'strict', + # Since we are using superscript footnotes, it is necessary to + # trim whitespace in front of footnote references. + 'trim_footnote_reference_space': 1, + # Currently unsupported: + 'docinfo_xform': 0, + # During development: + 'traceback': 1 + } + + relative_path_settings = ('stylesheet_path', 'user_stylesheet_path') + + config_section = 'newlatex2e writer' + config_section_dependencies = ('writers',) + + output = None + """Final translated form of `document`.""" + + def get_transforms(self): + return writers.Writer.get_transforms(self) + [ + writer_aux.Compound, writer_aux.Admonitions] + + def __init__(self): + writers.Writer.__init__(self) + self.translator_class = LaTeXTranslator + + def translate(self): + visitor = self.translator_class(self.document) + self.document.walkabout(visitor) + assert not visitor.context, 'context not empty: %s' % visitor.context + self.output = visitor.astext() + self.head = visitor.header + self.body = visitor.body + + +class LaTeXException(Exception): + """ + Exception base class to for exceptions which influence the + automatic generation of LaTeX code. + """ + + +class SkipAttrParentLaTeX(LaTeXException): + """ + Do not generate ``\DECattr`` and ``\renewcommand{\DEVparent}{...}`` for this + node. + + To be raised from ``before_...`` methods. + """ + + +class SkipParentLaTeX(LaTeXException): + """ + Do not generate ``\renewcommand{\DEVparent}{...}`` for this node. + + To be raised from ``before_...`` methods. + """ + + +class LaTeXTranslator(nodes.SparseNodeVisitor): + + # Country code by a.schlock. + # Partly manually converted from iso and babel stuff. + iso639_to_babel = { + 'no': 'norsk', # added by hand + 'gd': 'scottish', # added by hand + 'sl': 'slovenian', + 'af': 'afrikaans', + 'bg': 'bulgarian', + 'br': 'breton', + 'ca': 'catalan', + 'cs': 'czech', + 'cy': 'welsh', + 'da': 'danish', + 'fr': 'french', + # french, francais, canadien, acadian + 'de': 'ngerman', + # ngerman, naustrian, german, germanb, austrian + 'el': 'greek', + 'en': 'english', + # english, USenglish, american, UKenglish, british, canadian + 'eo': 'esperanto', + 'es': 'spanish', + 'et': 'estonian', + 'eu': 'basque', + 'fi': 'finnish', + 'ga': 'irish', + 'gl': 'galician', + 'he': 'hebrew', + 'hr': 'croatian', + 'hu': 'hungarian', + 'is': 'icelandic', + 'it': 'italian', + 'la': 'latin', + 'nl': 'dutch', + 'pl': 'polish', + 'pt': 'portuguese', + 'ro': 'romanian', + 'ru': 'russian', + 'sk': 'slovak', + 'sr': 'serbian', + 'sv': 'swedish', + 'tr': 'turkish', + 'uk': 'ukrainian' + } + + # Start with left double quote. + left_quote = 1 + + def __init__(self, document): + nodes.NodeVisitor.__init__(self, document) + self.settings = document.settings + self.header = [] + self.body = [] + self.context = [] + self.stylesheet_path = utils.get_stylesheet_reference( + self.settings, os.path.join(os.getcwd(), 'dummy')) + if self.stylesheet_path: + self.settings.record_dependencies.add(self.stylesheet_path) + # This ugly hack will be cleaned up when refactoring the + # stylesheet mess. + self.settings.stylesheet = self.settings.user_stylesheet + self.settings.stylesheet_path = self.settings.user_stylesheet_path + self.user_stylesheet_path = utils.get_stylesheet_reference( + self.settings, os.path.join(os.getcwd(), 'dummy')) + if self.user_stylesheet_path: + self.settings.record_dependencies.add(self.user_stylesheet_path) + self.write_header() + + def write_header(self): + a = self.header.append + a('%% Generated by Docutils %s .' + % docutils.__version__) + a('') + a('% Docutils settings:') + lang = self.settings.language_code or '' + a(r'\providecommand{\DEVlanguageiso}{%s}' % lang) + a(r'\providecommand{\DEVlanguagebabel}{%s}' % self.iso639_to_babel.get( + lang, self.iso639_to_babel.get(lang.split('_')[0], ''))) + a('') + if self.user_stylesheet_path: + a('% User stylesheet:') + a(r'\input{%s}' % self.user_stylesheet_path) + a('% Docutils stylesheet:') + a(r'\input{%s}' % self.stylesheet_path) + a('') + a('% Default definitions for Docutils nodes:') + for node_name in nodes.node_class_names: + a(r'\providecommand{\DN%s}[1]{#1}' % node_name.replace('_', '')) + a('') + a('% Auxiliary definitions:') + for attr in (r'\DEVparent \DEVattrlen \DEVtitleastext ' + r'\DEVsinglebackref \DEVmultiplebackrefs' + ).split(): + # Later set using \renewcommand. + a(r'\providecommand{%s}{DOCUTILSUNINITIALIZEDVARIABLE}' % attr) + for attr in (r'\DEVparagraphindented \DEVhassubtitle').split(): + # Initialize as boolean variables. + a(r'\providecommand{%s}{false}' % attr) + a('\n\n') + + unicode_map = unicode_map.unicode_map # comprehensive Unicode map + # Fix problems with unimap.py. + unicode_map.update({ + # We have AE or T1 encoding, so "``" etc. work. The macros + # from unimap.py may *not* work. + u'\u201C': '{``}', + u'\u201D': "{''}", + u'\u201E': '{,,}', + }) + + character_map = { + '\\': r'{\textbackslash}', + '{': r'{\{}', + '}': r'{\}}', + '$': r'{\$}', + '&': r'{\&}', + '%': r'{\%}', + '#': r'{\#}', + '[': r'{[}', + ']': r'{]}', + '-': r'{-}', + '`': r'{`}', + "'": r"{'}", + ',': r'{,}', + '"': r'{"}', + '|': r'{\textbar}', + '<': r'{\textless}', + '>': r'{\textgreater}', + '^': r'{\textasciicircum}', + '~': r'{\textasciitilde}', + '_': r'{\DECtextunderscore}', + } + character_map.update(unicode_map) + #character_map.update(special_map) + + # `att_map` is for encoding attributes. According to + # , + # the following characters are special: # $ % & ~ _ ^ \ { } + # These work without special treatment in macro parameters: + # $, &, ~, _, ^ + att_map = {'#': '\\#', + '%': '\\%', + # We cannot do anything about backslashes. + '\\': '', + '{': '\\{', + '}': '\\}', + # The quotation mark may be redefined by babel. + '"': '"{}', + } + att_map.update(unicode_map) + + def encode(self, text, attval=None): + """ + Encode special characters in ``text`` and return it. + + If attval is true, preserve as much as possible verbatim (used + in attribute value encoding). If attval is 'width' or + 'height', `text` is interpreted as a length value. + """ + if attval in ('width', 'height'): + match = re.match(r'([0-9.]+)(\S*)$', text) + assert match, '%s="%s" must be a length' % (attval, text) + value, unit = match.groups() + if unit == '%': + value = str(float(value) / 100) + unit = r'\DECrelativeunit' + elif unit in ('', 'px'): + # If \DECpixelunit is "pt", this gives the same notion + # of pixels as graphicx. This is a bit of a hack. + value = str(float(value) * 0.75) + unit = '\DECpixelunit' + return '%s%s' % (value, unit) + if attval: + get = self.att_map.get + else: + get = self.character_map.get + text = ''.join([get(c, c) for c in text]) + if (self.literal_block or self.inline_literal) and not attval: + # NB: We can have inline literals within literal blocks. + # Shrink '\r\n'. + text = text.replace('\r\n', '\n') + # Convert space. If "{ }~~~~~" is wrapped (at the + # brace-enclosed space "{ }"), the following non-breaking + # spaces ("~~~~") do *not* wind up at the beginning of the + # next line. Also note that no hyphenation is done if the + # breaking space ("{ }") comes *after* the non-breaking + # spaces. + if self.literal_block: + # Replace newlines with real newlines. + text = text.replace('\n', '\mbox{}\\\\{}') + replace_fn = self.encode_replace_for_literal_block_spaces + else: + replace_fn = self.encode_replace_for_inline_literal_spaces + text = re.sub(r'\s+', replace_fn, text) + # Protect hyphens; if we don't, line breaks will be + # possible at the hyphens and even the \textnhtt macro + # from the hyphenat package won't change that. + text = text.replace('-', r'\mbox{-}') + text = text.replace("'", r'{\DECtextliteralsinglequote}') + return text + else: + if not attval: + # Replace space with single protected space. + text = re.sub(r'\s+', '{ }', text) + # Replace double quotes with macro calls. + L = [] + for part in text.split(self.character_map['"']): + if L: + # Insert quote. + L.append(self.left_quote and r'{\DECtextleftdblquote}' + or r'{\DECtextrightdblquote}') + self.left_quote = not self.left_quote + L.append(part) + return ''.join(L) + else: + return text + + def encode_replace_for_literal_block_spaces(self, match): + return '~' * len(match.group()) + + def encode_replace_for_inline_literal_spaces(self, match): + return '{ }' + '~' * (len(match.group()) - 1) + + def astext(self): + return '\n'.join(self.header) + (''.join(self.body)) + + def append(self, text, newline='%\n'): + """ + Append text, stripping newlines, producing nice LaTeX code. + """ + lines = [' ' * self.indentation_level + line + newline + for line in text.splitlines(0)] + self.body.append(''.join(lines)) + + def visit_Text(self, node): + self.append(self.encode(node.astext())) + + def depart_Text(self, node): + pass + + def is_indented(self, paragraph): + """Return true if `paragraph` should be first-line-indented.""" + assert isinstance(paragraph, nodes.paragraph) + siblings = [n for n in paragraph.parent if + self.is_visible(n) and not isinstance(n, nodes.Titular)] + index = siblings.index(paragraph) + if ('continued' in paragraph['classes'] or + index > 0 and isinstance(siblings[index-1], nodes.transition)): + return 0 + # Indent all but the first paragraphs. + return index > 0 + + def before_paragraph(self, node): + self.append(r'\renewcommand{\DEVparagraphindented}{%s}' + % (self.is_indented(node) and 'true' or 'false')) + + def before_title(self, node): + self.append(r'\renewcommand{\DEVtitleastext}{%s}' + % self.encode(node.astext())) + self.append(r'\renewcommand{\DEVhassubtitle}{%s}' + % ((len(node.parent) > 2 and + isinstance(node.parent[1], nodes.subtitle)) + and 'true' or 'false')) + + def before_generated(self, node): + if 'sectnum' in node['classes']: + node[0] = node[0].strip() + + literal_block = 0 + + def visit_literal_block(self, node): + self.literal_block = 1 + + def depart_literal_block(self, node): + self.literal_block = 0 + + visit_doctest_block = visit_literal_block + depart_doctest_block = depart_literal_block + + inline_literal = 0 + + def visit_literal(self, node): + self.inline_literal += 1 + + def depart_literal(self, node): + self.inline_literal -= 1 + + def _make_encodable(self, text): + """ + Return text (a unicode object) with all unencodable characters + replaced with '?'. + + Thus, the returned unicode string is guaranteed to be encodable. + """ + encoding = self.settings.output_encoding + return text.encode(encoding, 'replace').decode(encoding) + + def visit_comment(self, node): + """ + Insert the comment unchanged into the document, replacing + unencodable characters with '?'. + + (This is done in order not to fail if comments contain unencodable + characters, because our default encoding is not UTF-8.) + """ + self.append('\n'.join(['% ' + self._make_encodable(line) for line + in node.astext().splitlines(0)]), newline='\n') + raise nodes.SkipChildren + + def before_topic(self, node): + if 'contents' in node['classes']: + for bullet_list in list(node.traverse(nodes.bullet_list)): + p = bullet_list.parent + if isinstance(p, nodes.list_item): + p.parent.insert(p.parent.index(p) + 1, bullet_list) + del p[1] + for paragraph in node.traverse(nodes.paragraph): + paragraph.attributes.update(paragraph[0].attributes) + paragraph[:] = paragraph[0] + paragraph.parent['tocrefid'] = paragraph['refid'] + node['contents'] = 1 + else: + node['contents'] = 0 + + bullet_list_level = 0 + + def visit_bullet_list(self, node): + self.append(r'\DECsetbullet{\labelitem%s}' % + ['i', 'ii', 'iii', 'iv'][min(self.bullet_list_level, 3)]) + self.bullet_list_level += 1 + + def depart_bullet_list(self, node): + self.bullet_list_level -= 1 + + enum_styles = {'arabic': 'arabic', 'loweralpha': 'alph', 'upperalpha': + 'Alph', 'lowerroman': 'roman', 'upperroman': 'Roman'} + + enum_counter = 0 + + def visit_enumerated_list(self, node): + # We create our own enumeration list environment. This allows + # to set the style and starting value and unlimited nesting. + # Maybe the actual creation (\DEC) can be moved to the + # stylesheet? + self.enum_counter += 1 + enum_prefix = self.encode(node['prefix']) + enum_suffix = self.encode(node['suffix']) + enum_type = '\\' + self.enum_styles.get(node['enumtype'], r'arabic') + start = node.get('start', 1) - 1 + counter = 'Denumcounter%d' % self.enum_counter + self.append(r'\DECmakeenumeratedlist{%s}{%s}{%s}{%s}{%s}{' + % (enum_prefix, enum_type, enum_suffix, counter, start)) + # for Emacs: } + + def depart_enumerated_list(self, node): + self.append('}') # for Emacs: { + + def before_list_item(self, node): + # XXX needs cleanup. + if (len(node) and (isinstance(node[-1], nodes.TextElement) or + isinstance(node[-1], nodes.Text)) and + node.parent.index(node) == len(node.parent) - 1): + node['lastitem'] = 'true' + + before_line = before_list_item + + def before_raw(self, node): + if 'latex' in node.get('format', '').split(): + # We're inserting the text in before_raw and thus outside + # of \DN... and \DECattr in order to make grouping with + # curly brackets work. + self.append(node.astext()) + raise nodes.SkipChildren + + def process_backlinks(self, node, type): + """ + Add LaTeX handling code for backlinks of footnote or citation + node `node`. `type` is either 'footnote' or 'citation'. + """ + self.append(r'\renewcommand{\DEVsinglebackref}{}') + self.append(r'\renewcommand{\DEVmultiplebackrefs}{}') + if len(node['backrefs']) > 1: + refs = [] + for i in range(len(node['backrefs'])): + # \DECmulticitationbacklink or \DECmultifootnotebacklink. + refs.append(r'\DECmulti%sbacklink{%s}{%s}' + % (type, node['backrefs'][i], i + 1)) + self.append(r'\renewcommand{\DEVmultiplebackrefs}{(%s){ }}' + % ', '.join(refs)) + elif len(node['backrefs']) == 1: + self.append(r'\renewcommand{\DEVsinglebackref}{%s}' + % node['backrefs'][0]) + + def visit_footnote(self, node): + self.process_backlinks(node, 'footnote') + + def visit_citation(self, node): + self.process_backlinks(node, 'citation') + + def before_table(self, node): + # A table contains exactly one tgroup. See before_tgroup. + pass + + def before_tgroup(self, node): + widths = [] + total_width = 0 + for i in range(int(node['cols'])): + assert isinstance(node[i], nodes.colspec) + widths.append(int(node[i]['colwidth']) + 1) + total_width += widths[-1] + del node[:len(widths)] + tablespec = '|' + for w in widths: + # 0.93 is probably wrong in many cases. XXX Find a + # solution which works *always*. + tablespec += r'p{%s\textwidth}|' % (0.93 * w / + max(total_width, 60)) + self.append(r'\DECmaketable{%s}{' % tablespec) + self.context.append('}') + raise SkipAttrParentLaTeX + + def depart_tgroup(self, node): + self.append(self.context.pop()) + + def before_row(self, node): + raise SkipAttrParentLaTeX + + def before_thead(self, node): + raise SkipAttrParentLaTeX + + def before_tbody(self, node): + raise SkipAttrParentLaTeX + + def is_simply_entry(self, node): + return (len(node) == 1 and isinstance(node[0], nodes.paragraph) or + len(node) == 0) + + def before_entry(self, node): + is_leftmost = 0 + if node.hasattr('morerows'): + self.document.reporter.severe('Rowspans are not supported.') + # Todo: Add empty cells below rowspanning cell and issue + # warning instead of severe. + if node.hasattr('morecols'): + # The author got a headache trying to implement + # multicolumn support. + if not self.is_simply_entry(node): + self.document.reporter.severe( + 'Colspanning table cells may only contain one paragraph.') + # Todo: Same as above. + # The number of columns this entry spans (as a string). + colspan = int(node['morecols']) + 1 + del node['morecols'] + else: + colspan = 1 + # Macro to call -- \DECcolspan or \DECcolspanleft. + macro_name = r'\DECcolspan' + if node.parent.index(node) == 0: + # Leftmost column. + macro_name += 'left' + is_leftmost = 1 + if colspan > 1: + self.append('%s{%s}{' % (macro_name, colspan)) + self.context.append('}') + else: + # Do not add a multicolumn with colspan 1 beacuse we need + # at least one non-multicolumn cell per column to get the + # desired column widths, and we can only do colspans with + # cells consisting of only one paragraph. + if not is_leftmost: + self.append(r'\DECsubsequententry{') + self.context.append('}') + else: + self.context.append('') + if isinstance(node.parent.parent, nodes.thead): + node['tableheaderentry'] = 'true' + + # Don't add \renewcommand{\DEVparent}{...} because there must + # not be any non-expandable commands in front of \multicolumn. + raise SkipParentLaTeX + + def depart_entry(self, node): + self.append(self.context.pop()) + + def before_substitution_definition(self, node): + raise nodes.SkipNode + + indentation_level = 0 + + def node_name(self, node): + return node.__class__.__name__.replace('_', '') + + # Attribute propagation order. + attribute_order = ['align', 'classes', 'ids'] + + def attribute_cmp(self, a1, a2): + """ + Compare attribute names `a1` and `a2`. Used in + propagate_attributes to determine propagation order. + + See built-in function `cmp` for return value. + """ + if a1 in self.attribute_order and a2 in self.attribute_order: + return cmp(self.attribute_order.index(a1), + self.attribute_order.index(a2)) + if (a1 in self.attribute_order) != (a2 in self.attribute_order): + # Attributes not in self.attribute_order come last. + return a1 in self.attribute_order and -1 or 1 + else: + return cmp(a1, a2) + + def propagate_attributes(self, node): + # Propagate attributes using \DECattr macros. + node_name = self.node_name(node) + attlist = [] + if isinstance(node, nodes.Element): + attlist = node.attlist() + attlist.sort(lambda pair1, pair2: self.attribute_cmp(pair1[0], + pair2[0])) + # `numatts` may be greater than len(attlist) due to list + # attributes. + numatts = 0 + pass_contents = self.pass_contents(node) + for key, value in attlist: + if isinstance(value, ListType): + self.append(r'\renewcommand{\DEVattrlen}{%s}' % len(value)) + for i in range(len(value)): + self.append(r'\DECattr{%s}{%s}{%s}{%s}{' % + (i+1, key, self.encode(value[i], attval=key), + node_name)) + if not pass_contents: + self.append('}') + numatts += len(value) + else: + self.append(r'\DECattr{}{%s}{%s}{%s}{' % + (key, self.encode(unicode(value), attval=key), + node_name)) + if not pass_contents: + self.append('}') + numatts += 1 + if pass_contents: + self.context.append('}' * numatts) # for Emacs: { + else: + self.context.append('') + + def visit_docinfo(self, node): + raise NotImplementedError('Docinfo not yet implemented.') + + def visit_document(self, node): + document = node + # Move IDs into TextElements. This won't work for images. + # Need to review this. + for node in document.traverse(nodes.Element): + if node.has_key('ids') and not isinstance(node, + nodes.TextElement): + next_text_element = node.next_node(nodes.TextElement) + if next_text_element: + next_text_element['ids'].extend(node['ids']) + node['ids'] = [] + + def pass_contents(self, node): + r""" + Return True if the node contents should be passed in + \DN{} and \DECattr{}{}{}{}{}. + Return False if the node contents should be passed in + \DECvisit \DECdepart, and no + attribute handler should be called. + """ + # Passing the whole document or whole sections as parameters + # to \DN... or \DECattr causes LaTeX to run out of memory. + return not isinstance(node, (nodes.document, nodes.section)) + + def dispatch_visit(self, node): + skip_attr = skip_parent = 0 + # TreePruningException to be propagated. + tree_pruning_exception = None + if hasattr(self, 'before_' + node.__class__.__name__): + try: + getattr(self, 'before_' + node.__class__.__name__)(node) + except SkipParentLaTeX: + skip_parent = 1 + except SkipAttrParentLaTeX: + skip_attr = 1 + skip_parent = 1 + except nodes.SkipNode: + raise + except (nodes.SkipChildren, nodes.SkipSiblings), instance: + tree_pruning_exception = instance + except nodes.SkipDeparture: + raise NotImplementedError( + 'SkipDeparture not usable in LaTeX writer') + + if not isinstance(node, nodes.Text): + node_name = self.node_name(node) + # attribute_deleters will be appended to self.context. + attribute_deleters = [] + if not skip_parent and not isinstance(node, nodes.document): + self.append(r'\renewcommand{\DEVparent}{%s}' + % self.node_name(node.parent)) + for name, value in node.attlist(): + if not isinstance(value, ListType) and not ':' in name: + # For non-list and non-special (like + # 'xml:preserve') attributes, set + # \DEVcurrentNA to the + # attribute value, so that the value of the + # attribute is available in the node handler + # and all children. + macro = r'\DEVcurrentN%sA%s' % (node_name, name) + self.append(r'\def%s{%s}' % ( + macro, self.encode(unicode(value), attval=name))) + # Make the attribute undefined afterwards. + attribute_deleters.append(r'\let%s=\relax' % macro) + self.context.append('\n'.join(attribute_deleters)) + if self.pass_contents(node): + # Call \DN{}. + self.append(r'\DN%s{' % node_name) + self.context.append('}') + else: + # Call \DECvisit + # \DECdepart. (Maybe we should use LaTeX + # environments for this?) + self.append(r'\DECvisit%s' % node_name) + self.context.append(r'\DECdepart%s' % node_name) + self.indentation_level += 1 + if not skip_attr: + self.propagate_attributes(node) + else: + self.context.append('') + + if (isinstance(node, nodes.TextElement) and + not isinstance(node.parent, nodes.TextElement)): + # Reset current quote to left. + self.left_quote = 1 + + # Call visit_... method. + try: + nodes.SparseNodeVisitor.dispatch_visit(self, node) + except LaTeXException: + raise NotImplementedError( + 'visit_... methods must not raise LaTeXExceptions') + + if tree_pruning_exception: + # Propagate TreePruningException raised in before_... method. + raise tree_pruning_exception + + def is_invisible(self, node): + # Return true if node is invisible or moved away in the LaTeX + # rendering. + return (not isinstance(node, nodes.Text) and + (isinstance(node, nodes.Invisible) or + isinstance(node, nodes.footnote) or + isinstance(node, nodes.citation) or + # Assume raw nodes to be invisible. + isinstance(node, nodes.raw) or + # Floating image or figure. + node.get('align') in ('left', 'right'))) + + def is_visible(self, node): + return not self.is_invisible(node) + + def needs_space(self, node): + """Two nodes for which `needs_space` is true need auxiliary space.""" + # Return true if node is a visible block-level element. + return ((isinstance(node, nodes.Body) or + isinstance(node, nodes.topic)) and + not (self.is_invisible(node) or + isinstance(node.parent, nodes.TextElement))) + + def always_needs_space(self, node): + """ + Always add space around nodes for which `always_needs_space()` + is true, regardless of whether the other node needs space as + well. (E.g. transition next to section.) + """ + return isinstance(node, nodes.transition) + + def dispatch_departure(self, node): + # Call departure method. + nodes.SparseNodeVisitor.dispatch_departure(self, node) + + if not isinstance(node, nodes.Text): + # Close attribute and node handler call (\DN...{...}). + self.indentation_level -= 1 + self.append(self.context.pop() + self.context.pop()) + # Delete \DECcurrentN... attribute macros. + self.append(self.context.pop()) + # Get next sibling. + next_node = node.next_node( + ascend=0, siblings=1, descend=0, + condition=self.is_visible) + # Insert space if necessary. + if (self.needs_space(node) and self.needs_space(next_node) or + self.always_needs_space(node) or + self.always_needs_space(next_node)): + if isinstance(node, nodes.paragraph) and isinstance(next_node, nodes.paragraph): + # Space between paragraphs. + self.append(r'\DECparagraphspace') + else: + # One of the elements is not a paragraph. + self.append(r'\DECauxiliaryspace') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/base.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/base.tex Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,1180 @@ +% System stylesheet for the new LaTeX writer, newlatex2e. + +% Major parts of the rendering are done in this stylesheet and not in the +% Python module. + +% For development notes, see notes.txt. + +% User documentation (in the stylesheet for now; that may change though): + +% Naming conventions: +% All uppercase letters in macro names have a specific meaning. +% \D...: All macros introduced by the Docutils LaTeX writer start with "D". +% \DS: Setup function (called at the bottom of this stylesheet). +% \DN{}: Handler for Docutils document tree node `node`; called by +% the Python module. +% \DEV: External variable, set by the Python module. +% \DEC: External command. It is called by the Python module and must be +% defined in this stylesheet. +% \DNA{}{}{}{}{}: +% Attribute handler for `attribute` set on nodes of type `nodename`. +% See below for a discussion of attribute handlers. +% \DA{}{}{}{}{}: +% Attribute handler for all `attribute`. Called only when no specific +% \DNA handler is defined. +% \DNC{}: +% Handler for `class`, when set on nodes of type `nodename`. +% \DC{}: +% Handler for `class`. Called only when no specific \DNC +% handler is defined. +% \D: Generic variable or function. + +% Attribute handlers: +% TODO + +% --------------------------------------------------------------------------- + +% Having to intersperse code with \makeatletter-\makeatother pairs is very +% annoying, so we call \makeatletter at the top and \makeatother at the +% bottom. Just be aware that you cannot use "@" as a text character inside +% this stylesheet. +\makeatletter + +% Print-mode (as opposed to online mode e.g. with Adobe Reader). +% This causes for example blue hyperlinks. +\providecommand{\Dprinting}{false} + +% \DSearly is called right after \documentclass. +\providecommand{\DSearly}{} +% \DSlate is called at the end of the stylesheet (right before the document +% tree). +\providecommand{\DSlate}{} + +% Use the KOMA script article class. +\providecommand{\Ddocumentclass}{scrartcl} +\providecommand{\Ddocumentoptions}{a4paper} +\providecommand{\DSdocumentclass}{ + \documentclass[\Ddocumentoptions]{\Ddocumentclass} } + +% Todo: This should be movable to the bottom, but it isn't as long as +% we use \usepackage commands at the top level of this stylesheet +% (which we shouldn't). +\DSdocumentclass + +\providecommand{\DSpackages}{ + % Load miscellaneous packages. + % Note 1: Many of the packages loaded here are used throughout this stylesheet. + % If one of these packages does not work on your system or in your scenario, + % please let us know, so we can consider making the package optional. + % Note 2: It would appear cleaner to load packages where they are used. + % However, since using a wrong package loading order can lead to *very* + % subtle bugs, we centralize the loading of most packages here. + \DSfontencoding % load font encoding packages + \DSlanguage % load babel + % Using \ifthenelse conditionals. + \usepackage{ifthen} % before hyperref (really!) + % There is not support for *not* using hyperref because it's used in many + % places. If this is a problem (e.g. because hyperref doesn't work on your + % system), please let us know. + \usepackage[colorlinks=false,pdfborder={0 0 0}]{hyperref} + % Get color, e.g. for links and system messages. + \usepackage{color} + % Get \textnhtt macro (non-hyphenating type writer). + \usepackage{hyphenat} + % For sidebars. + \usepackage{picins} + % We use longtable to create tables. + \usepackage{longtable} + % Images. + \usepackage{graphicx} + % These packages might be useful (some just add magic pixie dust), so + % evaluate them: + %\usepackage{fixmath} + %\usepackage{amsmath} + % Add some missing symbols like \textonehalf. + \usepackage{textcomp} +} + +\providecommand{\DSfontencoding}{ + % Set up font encoding. Called by \DSpackages. + % AE is a T1 emulation. It provides mostly the same characters and + % features as T1-encoded fonts but doesn't use bitmap fonts (which are + % unsuitable for online reading and subtle for printers). + \usepackage{ae} + % Provide the characters not contained in AE from EC bitmap fonts. + \usepackage{aecompl} + % Guillemets ("<<", ">>") in AE. + \usepackage{aeguill} +} + +\providecommand{\DSsymbols}{% + % Fix up symbols. + % The Euro symbol in Computer Modern looks, um, funny. Let's get a + % proper Euro symbol. + \usepackage{eurosym}% + \renewcommand{\texteuro}{\euro}% +} + +% Taken from +% +% and modified. Used with permission. +\providecommand{\Dprovidelength}[2]{% + \begingroup% + \escapechar\m@ne% + \xdef\@gtempa{{\string#1}}% + \endgroup% + \expandafter\@ifundefined\@gtempa% + {\newlength{#1}\setlength{#1}{#2}}% + {}% +} + +\providecommand{\Dprovidecounter}[2]{% + % Like \newcounter except that it doesn't crash if the counter + % already exists. + \@ifundefined{c@#1}{\newcounter{#1}\setcounter{#1}{#2}}{} +} + +\Dprovidelength{\Dboxparindent}{\parindent} + +\providecommand{\Dmakebox}[1]{% + % Make a centered, frameless box. Useful e.g. for block quotes. + % Do not use minipages here, but create pseudo-lists to allow + % page-breaking. (Don't use KOMA-script's addmargin environment + % because it messes up bullet lists.) + \Dmakelistenvironment{}{}{% + \setlength{\parskip}{0pt}% + \setlength{\parindent}{\Dboxparindent}% + \item{#1}% + }% +} + +\providecommand{\Dmakefbox}[1]{% + % Make a centered, framed box. Useful e.g. for admonitions. + \vspace{0.4\baselineskip}% + \begin{center}% + \fbox{% + \begin{minipage}[t]{0.9\linewidth}% + \setlength{\parindent}{\Dboxparindent}% + #1% + \end{minipage}% + }% + \end{center}% + \vspace{0.4\baselineskip}% +} + +% We do not currently recognize the difference between an end-sentence and a +% mid-sentence period (". " vs. ". " in plain text). So \frenchspacing is +% appropriate. +\providecommand{\DSfrenchspacing}{\frenchspacing} + + +\Dprovidelength{\Dblocklevelvspace}{% + % Space between block-level elements other than paragraphs. + 0.7\baselineskip plus 0.3\baselineskip minus 0.2\baselineskip% +} +\providecommand{\DECauxiliaryspace}{% + \ifthenelse{\equal{\Dneedvspace}{true}}{\vspace{\Dblocklevelvspace}}{}% + \par\noindent% +} +\providecommand{\DECparagraphspace}{\par} +\providecommand{\Dneedvspace}{true} + +\providecommand{\DSlanguage}{% + % Set up babel. + \usepackage[\DEVlanguagebabel]{babel} +} + +\providecommand{\Difdefined}[3]{\@ifundefined{#1}{#3}{#2}} + +% Handler for 'classes' attribute (called for each class attribute). +\providecommand{\DAclasses}[5]{% + % Dispatch to \DNC. + \Difdefined{DN#4C#3}{% + % Pass only contents, nothing else! + \csname DN#4C#3\endcsname{#5}% + }{% + % Otherwise, dispatch to \DC. + \Difdefined{DC#3}{% + \csname DC#3\endcsname{#5}% + }{% + #5% + }% + }% +} + +\providecommand{\DECattr}[5]{% + % Global attribute dispatcher, called inside the document tree. + % Parameters: + % 1. Attribute number. + % 2. Attribute name. + % 3. Attribute value. + % 4. Node name. + % 5. Node contents. + \Difdefined{DN#4A#2}{% + % Dispatch to \DNA. + \csname DN#4A#2\endcsname{#1}{#2}{#3}{#4}{#5}% + }{\Difdefined{DA#2}{% + % Otherwise dispatch to \DA. + \csname DA#2\endcsname{#1}{#2}{#3}{#4}{#5}% + }{% + % Otherwise simply run the contents without calling a handler. + #5% + }}% +} + +% ---------- Link handling ---------- +% Targets and references. + +\providecommand{\Draisedlink}[1]{% + % Anchors are placed on the base line by default. This is a bad thing for + % inline context, so we raise the anchor (normally by \baselineskip). + \Hy@raisedlink{#1}% +} + +% References. +% We're assuming here that the "refid" and "refuri" attributes occur +% only in inline context (in TextElements). +\providecommand{\DArefid}[5]{% + \ifthenelse{\equal{#4}{reference}}{% + \Dexplicitreference{\##3}{#5}% + }{% + % If this is not a target node (targets with refids are + % uninteresting and should be silently dropped). + \ifthenelse{\not\equal{#4}{target}}{% + % If this is a footnote reference, call special macro. + \ifthenelse{\equal{#4}{footnotereference}}{% + \Dimplicitfootnotereference{\##3}{#5}% + }{% + \ifthenelse{\equal{#4}{citationreference}}{% + \Dimplicitcitationreference{\##3}{#5}% + }{% + \Dimplicitreference{\##3}{#5}% + }% + }% + }{}% + }% +} +\providecommand{\DArefuri}[5]{% + \ifthenelse{\equal{#4}{target}}{% + % The node name is 'target', so this is a hyperlink target, like this: + % .. _mytarget: URI + % Hyperlink targets are ignored because they are invisible. + }{% + % If a non-target node has a refuri attribute, it must be an explicit URI + % reference (i.e. node name is 'reference'). + \Durireference{#3}{#5}% + }% +} +% Targets. +\providecommand{\DAids}[5]{% + \label{#3}% + \ifthenelse{\equal{#4}{footnotereference}}{% + {% + \renewcommand{\HyperRaiseLinkDefault}{% + % Dirty hack to make backrefs to footnote references work. + % For some reason, \baselineskip is 0pt in fn references. + 0.5\Doriginalbaselineskip% + }% + \Draisedlink{\hypertarget{#3}{}}#5% + }% + }{% + \Draisedlink{\hypertarget{#3}{}}#5% + }% +} +\providecommand{\Dimplicitreference}[2]{% + % Create implicit reference to ID. Implicit references occur + % e.g. in TOC-backlinks of section titles. Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{#2}% +} +\providecommand{\Dimplicitfootnotereference}[2]{% + % Ditto, but for the special case of footnotes. + % We want them to be rendered like explicit references. + \Dexplicitreference{#1}{#2}% +} +\providecommand{\Dimplicitcitationreference}[2]{% + % Ditto for citation references. + \Dimplicitfootnotereference{#1}{#2}% +} +\providecommand{\Dcolorexplicitreference}{% + \ifthenelse{\equal{\Dprinting}{true}}{\color{black}}{\color{blue}}% +} +\providecommand{\Dexplicitreference}[2]{% + % Create explicit reference to ID, e.g. created with "foo_". + % Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{{\Dcolorexplicitreference#2}}% +} +\providecommand{\Dcolorurireference}{\Dcolorexplicitreference} +\providecommand{\Durireference}[2]{% + % Create reference to URI. Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{{\Dcolorurireference#2}}% +} + +\Dprovidecounter{Dpdfbookmarkid}{0}% +\providecommand{\Dpdfbookmark}[1]{% + % Temporarily decrement Desctionlevel counter. + \addtocounter{Dsectionlevel}{-1}% + %\typeout{\arabic{Dsectionlevel}}% + %\typeout{#1}% + %\typeout{docutils\roman{Dpdfbookmarkid}}% + %\typeout{}% + \pdfbookmark[\arabic{Dsectionlevel}]{#1}{docutils\arabic{Dpdfbookmarkid}}% + \addtocounter{Dsectionlevel}{1}% + \addtocounter{Dpdfbookmarkid}{1}% +} +% ---------- End of Link Handling ---------- + +\providecommand{\DNparagraph}[1]{% + \ifthenelse{\equal{\DEVparagraphindented}{true}}{\indent}{\noindent}% + #1% +} +\providecommand{\Dformatboxtitle}[1]{{\Large\textbf{#1}}} +\providecommand{\Dformatboxsubtitle}[1]{{\large\textbf{#1}}} +\providecommand{\Dtopictitle}[1]{% + \Difinsidetoc{\vspace{1em}\par}{}% + \noindent\Dformatboxtitle{#1}% + \ifthenelse{\equal{\DEVhassubtitle}{false}}{\vspace{1em}}{\vspace{0.5em}}% + \par% +} +\providecommand{\Dadmonitiontitle}[1]{% + \Dtopictitle{#1}% +} +\providecommand{\Dtopicsubtitle}[1]{% + \noindent\Dformatboxsubtitle{#1}% + \vspace{1em}% + \par% +} +\providecommand{\Dsidebartitle}[1]{\Dtopictitle{#1}} +\providecommand{\Dsidebarsubtitle}[1]{\Dtopicsubtitle{#1}} +\providecommand{\Ddocumenttitle}[1]{% + \begin{center}{\Huge#1}\end{center}% + \ifthenelse{\equal{\DEVhassubtitle}{true}}{\vspace{0.1cm}}{\vspace{1cm}}% +} +\providecommand{\Ddocumentsubtitle}[1]{% + \begin{center}{\huge#1}\end{center}% + \vspace{1cm}% +} +% Can be overwritten by user stylesheet. +\providecommand{\Dformatsectiontitle}[1]{#1} +\providecommand{\Dformatsectionsubtitle}[1]{\Dformatsectiontitle{#1}} +\providecommand{\Dbookmarksectiontitle}[1]{% + % Return text suitable for use in \section*, \subsection*, etc., + % containing a PDF bookmark. Parameter: The title (as node tree). + \Draisedlink{\Dpdfbookmark{\DEVtitleastext}}% + #1% +} +\providecommand{\Dsectiontitlehook}[1]{#1} +\providecommand{\Dsectiontitle}[1]{% + \Dsectiontitlehook{% + \Ddispatchsectiontitle{\Dbookmarksectiontitle{\Dformatsectiontitle{#1}}}% + }% +} +\providecommand{\Ddispatchsectiontitle}[1]{% + \@ifundefined{Dsectiontitle\roman{Dsectionlevel}}{% + \Ddeepsectiontitle{#1}% + }{% + \csname Dsectiontitle\roman{Dsectionlevel}\endcsname{#1}% + }% +} +\providecommand{\Ddispatchsectionsubtitle}[1]{% + \Ddispatchsectiontitle{#1}% +} +\providecommand{\Dsectiontitlei}[1]{\section*{#1}} +\providecommand{\Dsectiontitleii}[1]{\subsection*{#1}} +\providecommand{\Ddeepsectiontitle}[1]{% + % Anything below \subsubsection (like \paragraph or \subparagraph) + % is useless because it uses the same font. The only way to + % (visually) distinguish such deeply nested sections is to use + % section numbering. + \subsubsection*{#1}% +} +\providecommand{\Dsectionsubtitlehook}[1]{#1} +\Dprovidelength{\Dsectionsubtitleraisedistance}{0.7em} +\providecommand{\Dsectionsubtitlescaling}{0.85} +\providecommand{\Dsectionsubtitle}[1]{% + \Dsectionsubtitlehook{% + % Move the subtitle nearer to the title. + \vspace{-\Dsectionsubtitleraisedistance}% + % Don't create a PDF bookmark. + \Ddispatchsectionsubtitle{% + \Dformatsectionsubtitle{\scalebox{\Dsectionsubtitlescaling}{#1}}% + }% + }% +} +\providecommand{\DNtitle}[1]{% + % Dispatch to \Dtitle. + \csname D\DEVparent title\endcsname{#1}% +} +\providecommand{\DNsubtitle}[1]{% + % Dispatch to \Dsubtitle. + \csname D\DEVparent subtitle\endcsname{#1}% +} + +\providecommand{\DNliteralblock}[1]{% + \Dmakelistenvironment{}{% + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + \setlength{\leftmargin}{0pt}% + }{}% + \setlength{\rightmargin}{0pt}% + }{% + \raggedright\item\noindent\nohyphens{\textnhtt{#1\Dfinalstrut}}% + }% +} +\providecommand{\DNdoctestblock}[1]{\DNliteralblock{#1}} +\providecommand{\DNliteral}[1]{\textnhtt{#1}} +\providecommand{\DNemphasis}[1]{\emph{#1}} +\providecommand{\DNstrong}[1]{\textbf{#1}} +\providecommand{\DECvisitdocument}{\begin{document}\noindent} +\providecommand{\DECdepartdocument}{\end{document}} +\providecommand{\DNtopic}[1]{% + \ifthenelse{\equal{\DEVcurrentNtopicAcontents}{1}}{% + \addtocounter{Dtoclevel}{1}% + \par\noindent% + #1% + \addtocounter{Dtoclevel}{-1}% + }{% + \par\noindent% + \Dmakebox{#1}% + }% +} +\providecommand{\DNadmonition}[1]{% + \DNtopic{#1}% +} +\providecommand{\Dformatrubric}[1]{\textbf{#1}} +\Dprovidelength{\Dprerubricspace}{0.3em} +\providecommand{\DNrubric}[1]{% + \vspace{\Dprerubricspace}\par\noindent\Dformatrubric{#1}\par% +} + +\providecommand{\Dbullet}{} +\providecommand{\DECsetbullet}[1]{\renewcommand{\Dbullet}{#1}} +\providecommand{\DNbulletlist}[1]{% + \Difinsidetoc{% + \Dtocbulletlist{#1}% + }{% + \Dmakelistenvironment{\Dbullet}{}{#1}% + }% +} +% Todo: So what on earth is @pnumwidth? +\renewcommand{\@pnumwidth}{2.2em} +\providecommand{\DNlistitem}[1]{% + \Difinsidetoc{% + \ifthenelse{\equal{\theDtoclevel}{1}\and\equal{\Dlocaltoc}{false}}{% + {% + \par\addvspace{1em}\noindent% + \sectfont% + #1\hfill\pageref{\DEVcurrentNlistitemAtocrefid}% + }% + }{% + \@dottedtocline{0}{\Dtocindent}{0em}{#1}{% + \pageref{\DEVcurrentNlistitemAtocrefid}% + }% + }% + }{% + \item{#1}% + }% +} +\providecommand{\DNenumeratedlist}[1]{#1} +\Dprovidecounter{Dsectionlevel}{0} +\providecommand{\Dvisitsectionhook}{} +\providecommand{\Ddepartsectionhook}{} +\providecommand{\DECvisitsection}{% + \addtocounter{Dsectionlevel}{1}% + \Dvisitsectionhook% +} +\providecommand{\DECdepartsection}{% + \Ddepartsectionhook% + \addtocounter{Dsectionlevel}{-1}% +} + +% Using \_ will cause hyphenation after _ even in \textnhtt-typewriter +% because the hyphenat package redefines \_. So we use +% \textunderscore here. +\providecommand{\DECtextunderscore}{\textunderscore} + +\providecommand{\Dtextinlineliteralfirstspace}{{ }} +\providecommand{\Dtextinlineliteralsecondspace}{{~}} + +\Dprovidelength{\Dlistspacing}{0.8\baselineskip} + +\providecommand{\Dsetlistrightmargin}{% + \ifthenelse{\lengthtest{\linewidth>12em}}{% + % Equal margins. + \setlength{\rightmargin}{\leftmargin}% + }{% + % If the line is narrower than 10em, we don't remove any further + % space from the right. + \setlength{\rightmargin}{0pt}% + }% +} +\providecommand{\Dresetlistdepth}{false} +\Dprovidelength{\Doriginallabelsep}{\labelsep} +\providecommand{\Dmakelistenvironment}[3]{% + % Make list environment with support for unlimited nesting and with + % reasonable default lengths. Parameters: + % 1. Label (same as in list environment). + % 2. Spacing (same as in list environment). + % 3. List contents (contents of list environment). + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + % Unfortunately, vertical spacing doesn't work correctly when + % using lists inside tabular environments, so we use a minipage. + \begin{minipage}[t]{\linewidth}% + }{}% + {% + \renewcommand{\Dneedvspace}{false}% + % \parsep0.5\baselineskip + \renewcommand{\Dresetlistdepth}{false}% + \ifnum \@listdepth>5% + \protect\renewcommand{\Dresetlistdepth}{true}% + \@listdepth=5% + \fi% + \begin{list}{% + #1% + }{% + \setlength{\itemsep}{0pt}% + \setlength{\partopsep}{0pt}% + \setlength{\topsep}{0pt}% + % List should take 90% of total width. + \setlength{\leftmargin}{0.05\linewidth}% + \ifthenelse{\lengthtest{\leftmargin<1.8em}}{% + \setlength{\leftmargin}{1.8em}% + }{}% + \setlength{\labelsep}{\Doriginallabelsep}% + \Dsetlistrightmargin% + #2% + }{% + #3% + }% + \end{list}% + \ifthenelse{\equal{\Dresetlistdepth}{true}}{\@listdepth=5}{}% + }% + \ifthenelse{\equal{\Dinsidetabular}{true}}{\end{minipage}}{}% +} +\providecommand{\Dfinalstrut}{\@finalstrut\@arstrutbox} +\providecommand{\DAlastitem}[5]{#5\Dfinalstrut} + +\Dprovidelength{\Ditemsep}{0pt} +\providecommand{\DECmakeenumeratedlist}[6]{% + % Make enumerated list. + % Parameters: + % - prefix + % - type (\arabic, \roman, ...) + % - suffix + % - suggested counter name + % - start number - 1 + % - list contents + \newcounter{#4}% + \Dmakelistenvironment{#1#2{#4}#3}{% + % Use as much space as needed for the label. + \setlength{\labelwidth}{10em}% + % Reserve enough space so that the label doesn't go beyond the + % left margin of preceding paragraphs. Like that: + % + % A paragraph. + % + % 1. First item. + \setlength{\leftmargin}{2.5em}% + \Dsetlistrightmargin% + \setlength{\itemsep}{\Ditemsep}% + % Use counter recommended by Python module. + \usecounter{#4}% + % Set start value. + \addtocounter{#4}{#5}% + }{% + % The list contents. + #6% + }% +} + + +% Single quote in literal mode. \textquotesingle from package +% textcomp has wrong width when using package ae, so we use a normal +% single curly quote here. +\providecommand{\DECtextliteralsinglequote}{'} + + +% "Tabular lists" are field lists and options lists (not definition +% lists because there the term always appears on its own line). We'll +% use the terminology of field lists now ("field", "field name", +% "field body"), but the same is also analogously applicable to option +% lists. +% +% We want these lists to be breakable across pages. We cannot +% automatically get the narrowest possible size for the left column +% (i.e. the field names or option groups) because tabularx does not +% support multi-page tables, ltxtable needs to have the table in an +% external file and we don't want to clutter the user's directories +% with auxiliary files created by the filecontents environment, and +% ltablex is not included in teTeX. +% +% Thus we set a fixed length for the left column and use list +% environments. This also has the nice side effect that breaking is +% now possible anywhere, not just between fields. +% +% Note that we are creating a distinct list environment for each +% field. There is no macro for a whole tabular list! +\Dprovidelength{\Dtabularlistfieldnamewidth}{6em} +\Dprovidelength{\Dtabularlistfieldnamesep}{0.5em} +\providecommand{\Dinsidetabular}{false} +\providecommand{\Dsavefieldname}{} +\providecommand{\Dsavefieldbody}{} +\Dprovidelength{\Dusedfieldnamewidth}{0pt} +\Dprovidelength{\Drealfieldnamewidth}{0pt} +\providecommand{\Dtabularlistfieldname}[1]{\renewcommand{\Dsavefieldname}{#1}} +\providecommand{\Dtabularlistfieldbody}[1]{\renewcommand{\Dsavefieldbody}{#1}} +\Dprovidelength{\Dparskiptemp}{0pt} +\providecommand{\Dtabularlistfield}[1]{% + {% + % This only saves field name and field body in \Dsavefieldname and + % \Dsavefieldbody, resp. It does not insert any text into the + % document. + #1% + % Recalculate the real field name width everytime we encounter a + % tabular list field because it may have been changed using a + % "raw" node. + \setlength{\Drealfieldnamewidth}{\Dtabularlistfieldnamewidth}% + \addtolength{\Drealfieldnamewidth}{\Dtabularlistfieldnamesep}% + \Dmakelistenvironment{% + \makebox[\Drealfieldnamewidth][l]{\Dsavefieldname}% + }{% + \setlength{\labelwidth}{\Drealfieldnamewidth}% + \setlength{\leftmargin}{\Drealfieldnamewidth}% + \setlength{\rightmargin}{0pt}% + \setlength{\labelsep}{0pt}% + }{% + \item% + \settowidth{\Dusedfieldnamewidth}{\Dsavefieldname}% + \setlength{\Dparskiptemp}{\parskip}% + \ifthenelse{% + \lengthtest{\Dusedfieldnamewidth>\Dtabularlistfieldnamewidth}% + }{% + \mbox{}\par% + \setlength{\parskip}{0pt}% + }{}% + \Dsavefieldbody% + \setlength{\parskip}{\Dparskiptemp}% + %XXX Why did we need this? + %\@finalstrut\@arstrutbox% + }% + \par% + }% +} + +\providecommand{\Dformatfieldname}[1]{\textbf{#1:}} +\providecommand{\DNfieldlist}[1]{#1} +\providecommand{\DNfield}[1]{\Dtabularlistfield{#1}} +\providecommand{\DNfieldname}[1]{% + \Dtabularlistfieldname{% + \Dformatfieldname{#1}% + }% +} +\providecommand{\DNfieldbody}[1]{\Dtabularlistfieldbody{#1}} + +\providecommand{\Dformatoptiongroup}[1]{% + % Format option group, e.g. "-f file, --input file". + \texttt{#1}% +} +\providecommand{\Dformatoption}[1]{% + % Format option, e.g. "-f file". + % Put into mbox to avoid line-breaking at spaces. + \mbox{#1}% +} +\providecommand{\Dformatoptionstring}[1]{% + % Format option string, e.g. "-f". + #1% +} +\providecommand{\Dformatoptionargument}[1]{% + % Format option argument, e.g. "file". + \textsl{#1}% +} +\providecommand{\Dformatoptiondescription}[1]{% + % Format option description, e.g. + % "\DNparagraph{Read input data from file.}" + #1% +} +\providecommand{\DNoptionlist}[1]{#1} +\providecommand{\Doptiongroupjoiner}{,{ }} +\providecommand{\Disfirstoption}{% + % Auxiliary macro indicating if a given option is the first child + % of its option group (if it's not, it has to preceded by + % \Doptiongroupjoiner). + false% +} +\providecommand{\DNoptionlistitem}[1]{% + \Dtabularlistfield{#1}% +} +\providecommand{\DNoptiongroup}[1]{% + \renewcommand{\Disfirstoption}{true}% + \Dtabularlistfieldname{\Dformatoptiongroup{#1}}% +} +\providecommand{\DNoption}[1]{% + % If this is not the first option in this option group, add a + % joiner. + \ifthenelse{\equal{\Disfirstoption}{true}}{% + \renewcommand{\Disfirstoption}{false}% + }{% + \Doptiongroupjoiner% + }% + \Dformatoption{#1}% +} +\providecommand{\DNoptionstring}[1]{\Dformatoptionstring{#1}} +\providecommand{\DNoptionargument}[1]{{ }\Dformatoptionargument{#1}} +\providecommand{\DNdescription}[1]{% + \Dtabularlistfieldbody{\Dformatoptiondescription{#1}}% +} + +\providecommand{\DNdefinitionlist}[1]{% + \begin{description}% + \parskip0pt% + #1% + \end{description}% +} +\providecommand{\DNdefinitionlistitem}[1]{% + % LaTeX expects the label in square brackets; we provide an empty + % label. + \item[]#1% +} +\providecommand{\Dformatterm}[1]{#1} +\providecommand{\DNterm}[1]{\hspace{-5pt}\Dformatterm{#1}} +% I'm still not sure what's the best rendering for classifiers. The +% colon syntax is used by reStructuredText, so it's at least WYSIWYG. +% Use slanted text because italic would cause too much emphasis. +\providecommand{\Dformatclassifier}[1]{\textsl{#1}} +\providecommand{\DNclassifier}[1]{~:~\Dformatclassifier{#1}} +\providecommand{\Dformatdefinition}[1]{#1} +\providecommand{\DNdefinition}[1]{\par\Dformatdefinition{#1}} + +\providecommand{\Dlineblockindentation}{2.5em} +\providecommand{\DNlineblock}[1]{% + \Dmakelistenvironment{}{% + \ifthenelse{\equal{\DEVparent}{lineblock}}{% + % Parent is a line block, so indent. + \setlength{\leftmargin}{\Dlineblockindentation}% + }{% + % At top level; don't indent. + \setlength{\leftmargin}{0pt}% + }% + \setlength{\rightmargin}{0pt}% + \setlength{\parsep}{0pt}% + }{% + #1% + }% +} +\providecommand{\DNline}[1]{\item#1} + +\providecommand{\DNtransition}{% + \raisebox{0.25em}{\parbox{\linewidth}{\hspace*{\fill}\hrulefill\hrulefill\hspace*{\fill}}}% +} + +\providecommand{\Dformatblockquote}[1]{% + % Format contents of block quote. + % This occurs in block-level context, so we cannot use \textsl. + {\slshape#1}% +} +\providecommand{\Dformatattribution}[1]{---\textup{#1}} +\providecommand{\DNblockquote}[1]{% + \Dmakebox{% + \Dformatblockquote{#1} + }% +} +\providecommand{\DNattribution}[1]{% + \par% + \begin{flushright}\Dformatattribution{#1}\end{flushright}% +} + + +% Sidebars: +% Vertical and horizontal margins. +\Dprovidelength{\Dsidebarvmargin}{0.5em} +\Dprovidelength{\Dsidebarhmargin}{1em} +% Padding (space between contents and frame). +\Dprovidelength{\Dsidebarpadding}{1em} +% Frame width. +\Dprovidelength{\Dsidebarframewidth}{2\fboxrule} +% Position ("l" or "r"). +\providecommand{\Dsidebarposition}{r} +% Width. +\Dprovidelength{\Dsidebarwidth}{0.45\linewidth} +\providecommand{\DNsidebar}[1]{ + \parpic[\Dsidebarposition]{% + \begin{minipage}[t]{\Dsidebarwidth}% + % Doing this with nested minipages is ugly, but I haven't found + % another way to place vertical space before and after the fbox. + \vspace{\Dsidebarvmargin}% + {% + \setlength{\fboxrule}{\Dsidebarframewidth}% + \setlength{\fboxsep}{\Dsidebarpadding}% + \fbox{% + \begin{minipage}[t]{\linewidth}% + \setlength{\parindent}{\Dboxparindent}% + #1% + \end{minipage}% + }% + }% + \vspace{\Dsidebarvmargin}% + \end{minipage}% + }% +} + + +% Citations and footnotes. +\providecommand{\Dformatfootnote}[1]{% + % Format footnote. + {% + \footnotesize#1% + % \par is necessary for LaTeX to adjust baselineskip to the + % changed font size. + \par% + }% +} +\providecommand{\Dformatcitation}[1]{\Dformatfootnote{#1}} +\Dprovidelength{\Doriginalbaselineskip}{0pt} +\providecommand{\DNfootnotereference}[1]{% + {% + % \baselineskip is 0pt in \textsuperscript, so we save it here. + \setlength{\Doriginalbaselineskip}{\baselineskip}% + \textsuperscript{#1}% + }% +} +\providecommand{\DNcitationreference}[1]{{[}#1{]}} +\Dprovidelength{\Dfootnotesep}{3.5pt} +\providecommand{\Dsetfootnotespacing}{% + % Spacing commands executed at the beginning of footnotes. + \setlength{\parindent}{0pt}% + \hspace{1em}% +} +\providecommand{\DNfootnote}[1]{% + % See ltfloat.dtx for details. + {% + \insert\footins{% + % BUG: This is too small if the user adds + % \onehalfspacing or \doublespace. + \vspace{\Dfootnotesep}% + \Dsetfootnotespacing% + \Dformatfootnote{#1}% + }% + }% +} +\providecommand{\DNcitation}[1]{\DNfootnote{#1}} +\providecommand{\Dformatfootnotelabel}[1]{% + % Keep \footnotesize in footnote labels (\textsuperscript would + % reduce the font size even more). + \textsuperscript{\footnotesize#1{ }}% +} +\providecommand{\Dformatcitationlabel}[1]{{[}#1{]}{ }} +\providecommand{\Dformatmultiplebackrefs}[1]{% + % If in printing mode, do not write out multiple backrefs. + \ifthenelse{\equal{\Dprinting}{true}}{}{\textsl{#1}}% +} +\providecommand{\Dthislabel}{} +\providecommand{\DNlabel}[1]{% + % Footnote or citatation label. + \renewcommand{\Dthislabel}{#1}% + \ifthenelse{\not\equal{\DEVsinglebackref}{}}{% + \let\Doriginallabel=\Dthislabel% + \def\Dthislabel{% + \Dsinglefootnotebacklink{\DEVsinglebackref}{\Doriginallabel}% + }% + }{}% + \ifthenelse{\equal{\DEVparent}{footnote}}{% + % Footnote label. + \Dformatfootnotelabel{\Dthislabel}% + }{% + \ifthenelse{\equal{\DEVparent}{citation}}{% + % Citation label. + \Dformatcitationlabel{\Dthislabel}% + }{}% + }% + % If there are multiple backrefs, add them now. + \Dformatmultiplebackrefs{\DEVmultiplebackrefs}% +} +\providecommand{\Dsinglefootnotebacklink}[2]{% + % Create normal backlink of a footnote label. Parameters: + % 1. ID. + % 2. Link text. + % Treat like a footnote reference. + \Dimplicitfootnotereference{\##1}{#2}% +} +\providecommand{\DECmultifootnotebacklink}[2]{% + % Create generated backlink, as in (1, 2). Parameters: + % 1. ID. + % 2. Link text. + % Treat like a footnote reference. + \Dimplicitfootnotereference{\##1}{#2}% +} +\providecommand{\Dsinglecitationbacklink}[2]{\Dsinglefootnotebacklink{#1}{#2}} +\providecommand{\DECmulticitationbacklink}[2]{\DECmultifootnotebacklink{#1}{#2}} + + +\providecommand{\DECmaketable}[2]{% + % Make table. Parameters: + % 1. Table spec (like "|p|p|"). + % 2. Table contents. + {% + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + % Inside longtable; we cannot have nested longtables. + \begin{tabular}{#1}% + \hline% + #2% + \end{tabular}% + }{% + \renewcommand{\Dinsidetabular}{true}% + \begin{longtable}{#1}% + \hline% + #2% + \end{longtable}% + }% + }% +} +\providecommand{\DNthead}[1]{% + #1% + \endhead% +} +\providecommand{\DNrow}[1]{% + #1\tabularnewline% + \hline% +} +\providecommand{\Dinsidemulticolumn}{false} +\providecommand{\Dcompensatingmulticol}[3]{% + \multicolumn{#1}{#2}{% + {% + \renewcommand{\Dinsidemulticolumn}{true}% + % Compensate for weird missing vertical space at top of paragraph. + \raisebox{-2.5pt}{#3}% + }% + }% +} +\providecommand{\DECcolspan}[2]{% + % Take care of the morecols attribute (but incremented by 1). + &% + \Dcompensatingmulticol{#1}{l|}{#2}% +} +\providecommand{\DECcolspanleft}[2]{% + % Like \Dmorecols, but called for the leftmost entries in a table + % row. + \Dcompensatingmulticol{#1}{|l|}{#2}% +} +\providecommand{\DECsubsequententry}[1]{% + % +} +\providecommand{\DNentry}[1]{% + % The following sequence adds minimal vertical space above the top + % lines of the first cell paragraph, so that vertical space is + % balanced at the top and bottom of table cells. + \ifthenelse{\equal{\Dinsidemulticolumn}{false}}{% + \vspace{-1em}\vspace{-\parskip}\par% + }{}% + #1% + % No need to add an ampersand ("&"); that's done by \DECsubsequententry. +} +\providecommand{\DAtableheaderentry}[5]{\Dformattableheaderentry{#5}} +\providecommand{\Dformattableheaderentry}[1]{{\bfseries#1}} + + +\providecommand{\DNsystemmessage}[1]{% + {% + \ifthenelse{\equal{\Dprinting}{false}}{\color{red}}{}% + \bfseries% + #1% + }% +} + + +\providecommand{\Dinsidehalign}{false} +\newsavebox{\Dalignedimagebox} +\Dprovidelength{\Dalignedimagewidth}{0pt} +\providecommand{\Dhalign}[2]{% + % Horizontally align the contents to the left or right so that the + % text flows around it. + % Parameters: + % 1. l or r + % 2. Contents. + \renewcommand{\Dinsidehalign}{true}% + % For some obscure reason \parpic consumes some vertical space. + \vspace{-3pt}% + % Now we do something *really* ugly, but this enables us to wrap the + % image in a minipage while still allowing tight frames when + % class=border (see \DNimageCborder). + \sbox{\Dalignedimagebox}{#2}% + \settowidth{\Dalignedimagewidth}{\usebox{\Dalignedimagebox}}% + \parpic[#1]{% + \begin{minipage}[b]{\Dalignedimagewidth}% + % Compensate for previously added space, but not entirely. + \vspace*{2.0pt}% + \vspace*{\Dfloatimagetopmargin}% + \usebox{\Dalignedimagebox}% + \vspace*{1.5pt}% + \vspace*{\Dfloatimagebottommargin}% + \end{minipage}% + }% + \renewcommand{\Dinsidehalign}{false}% +} + + +% Maximum width of an image. +\providecommand{\Dimagemaxwidth}{\linewidth} +\providecommand{\Dfloatimagemaxwidth}{0.5\linewidth} +% Auxiliary variable. +\Dprovidelength{\Dcurrentimagewidth}{0pt} +\providecommand{\DNimageAalign}[5]{% + \ifthenelse{\equal{#3}{left}}{% + \Dhalign{l}{#5}% + }{% + \ifthenelse{\equal{#3}{right}}{% + \Dhalign{r}{#5}% + }{% + \ifthenelse{\equal{#3}{center}}{% + % Text floating around centered figures is a bad idea. Thus + % we use a center environment. Note that no extra space is + % added by the writer, so the space added by the center + % environment is fine. + \begin{center}#5\end{center}% + }{% + #5% + }% + }% + }% +} +% Base path for images. +\providecommand{\Dimagebase}{} +% Auxiliary command. Current image path. +\providecommand{\Dimagepath}{} +\providecommand{\DNimageAuri}[5]{% + % Insert image. We treat the URI like a path here. + \renewcommand{\Dimagepath}{\Dimagebase#3}% + \Difdefined{DcurrentNimageAwidth}{% + \Dwidthimage{\DEVcurrentNimageAwidth}{\Dimagepath}% + }{% + \Dsimpleimage{\Dimagepath}% + }% +} +\Dprovidelength{\Dfloatimagevmargin}{0pt} +\providecommand{\Dfloatimagetopmargin}{\Dfloatimagevmargin} +\providecommand{\Dfloatimagebottommargin}{\Dfloatimagevmargin} +\providecommand{\Dwidthimage}[2]{% + % Image with specified width. + % Parameters: + % 1. Image width. + % 2. Image path. + % Need to make bottom-alignment dependent on align attribute (add + % functional test first). Need to observe height attribute. + %\begin{minipage}[b]{#1}% + \includegraphics[width=#1,height=\textheight,keepaspectratio]{#2}% + %\end{minipage}% +} +\providecommand{\Dcurrentimagemaxwidth}{} +\providecommand{\Dsimpleimage}[1]{% + % Insert image, without much parametrization. + \settowidth{\Dcurrentimagewidth}{\includegraphics{#1}}% + \ifthenelse{\equal{\Dinsidehalign}{true}}{% + \renewcommand{\Dcurrentimagemaxwidth}{\Dfloatimagemaxwidth}% + }{% + \renewcommand{\Dcurrentimagemaxwidth}{\Dimagemaxwidth}% + }% + \ifthenelse{\lengthtest{\Dcurrentimagewidth>\Dcurrentimagemaxwidth}}{% + \Dwidthimage{\Dcurrentimagemaxwidth}{#1}% + }{% + \Dwidthimage{\Dcurrentimagewidth}{#1}% + }% +} +\providecommand{\Dwidthimage}[2]{% + % Image with specified width. + % Parameters: + % 1. Image width. + % 2. Image path. + \Dwidthimage{#1}{#2}% +} + +% Figures. +\providecommand{\DNfigureAalign}[5]{% + % Hack to make it work Right Now. + %\def\DEVcurrentNimageAwidth{\DEVcurrentNfigureAwidth}% + % + %\def\DEVcurrentNimageAwidth{\linewidth}% + \DNimageAalign{#1}{#2}{#3}{#4}{% + \begin{minipage}[b]{0.4\linewidth}#5\end{minipage}}% + %\let\DEVcurrentNimageAwidth=\relax% + % + %\let\DEVcurrentNimageAwidth=\relax% +} +\providecommand{\DNcaption}[1]{\par\noindent{\slshape#1}} +\providecommand{\DNlegend}[1]{\DECauxiliaryspace#1} + +\providecommand{\DCborder}[1]{\fbox{#1}} +% No padding between image and border. +\providecommand{\DNimageCborder}[1]{\frame{#1}} + + +% Need to replace with language-specific stuff. Maybe look at +% csquotes.sty and ask the author for permission to use parts of it. +\providecommand{\DECtextleftdblquote}{``} +\providecommand{\DECtextrightdblquote}{''} + +% Table of contents: +\Dprovidelength{\Dtocininitialsectnumwidth}{2.4em} +\Dprovidelength{\Dtocadditionalsectnumwidth}{0.7em} +% Level inside a table of contents. While this is at -1, we are not +% inside a TOC. +\Dprovidecounter{Dtoclevel}{-1}% +\providecommand{\Dlocaltoc}{false}% +\providecommand{\DNtopicClocal}[1]{% + \renewcommand{\Dlocaltoc}{true}% + \addtolength{\Dtocsectnumwidth}{2\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocindent}{-2\Dtocadditionalsectnumwidth}% + #1% + \addtolength{\Dtocindent}{2\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocsectnumwidth}{-2\Dtocadditionalsectnumwidth}% + \renewcommand{\Dlocaltoc}{false}% +} +\Dprovidelength{\Dtocindent}{0pt}% +\Dprovidelength{\Dtocsectnumwidth}{\Dtocininitialsectnumwidth} +% Compensate for one additional TOC indentation space so that the +% top-level is unindented. +\addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth} +\addtolength{\Dtocindent}{-\Dtocsectnumwidth} +\providecommand{\Difinsidetoc}[2]{% + \ifthenelse{\not\equal{\theDtoclevel}{-1}}{#1}{#2}% +} +\providecommand{\DNgeneratedCsectnum}[1]{% + \Difinsidetoc{% + % Section number inside TOC. + \makebox[\Dtocsectnumwidth][l]{#1}% + }{% + % Section number inside section title. + #1\quad% + }% +} +\providecommand{\Dtocbulletlist}[1]{% + \addtocounter{Dtoclevel}{1}% + \addtolength{\Dtocindent}{\Dtocsectnumwidth}% + \addtolength{\Dtocsectnumwidth}{\Dtocadditionalsectnumwidth}% + #1% + \addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocindent}{-\Dtocsectnumwidth}% + \addtocounter{Dtoclevel}{-1}% +} + + +% For \DECpixelunit, the length value is pre-multiplied with 0.75, so by +% specifying "pt" we get the same notion of "pixel" as graphicx. +\providecommand{\DECpixelunit}{pt} +% Normally lengths are relative to the current linewidth. +\providecommand{\DECrelativeunit}{\linewidth} + + +% ACTION: These commands actually *do* something. +% Ultimately, everything should be done here, and no active content should be +% above (not even \usepackage). + +\DSearly +\DSpackages +\DSfrenchspacing +\DSsymbols +\DSlate + +\makeatother diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/unicode_map.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/newlatex2e/unicode_map.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,2369 @@ +# $Id$ +# Author: Lea Wiemann +# Copyright: This file has been placed in the public domain. + +# This is a mapping of Unicode characters to LaTeX equivalents. +# The information has been extracted from +# , written by +# David Carlisle and Sebastian Rahtz. +# +# The extraction has been done by the "create_unimap.py" script +# located at . + +unicode_map = {u'\xa0': '$~$', +u'\xa1': '{\\textexclamdown}', +u'\xa2': '{\\textcent}', +u'\xa3': '{\\textsterling}', +u'\xa4': '{\\textcurrency}', +u'\xa5': '{\\textyen}', +u'\xa6': '{\\textbrokenbar}', +u'\xa7': '{\\textsection}', +u'\xa8': '{\\textasciidieresis}', +u'\xa9': '{\\textcopyright}', +u'\xaa': '{\\textordfeminine}', +u'\xab': '{\\guillemotleft}', +u'\xac': '$\\lnot$', +u'\xad': '$\\-$', +u'\xae': '{\\textregistered}', +u'\xaf': '{\\textasciimacron}', +u'\xb0': '{\\textdegree}', +u'\xb1': '$\\pm$', +u'\xb2': '${^2}$', +u'\xb3': '${^3}$', +u'\xb4': '{\\textasciiacute}', +u'\xb5': '$\\mathrm{\\mu}$', +u'\xb6': '{\\textparagraph}', +u'\xb7': '$\\cdot$', +u'\xb8': '{\\c{}}', +u'\xb9': '${^1}$', +u'\xba': '{\\textordmasculine}', +u'\xbb': '{\\guillemotright}', +u'\xbc': '{\\textonequarter}', +u'\xbd': '{\\textonehalf}', +u'\xbe': '{\\textthreequarters}', +u'\xbf': '{\\textquestiondown}', +u'\xc0': '{\\`{A}}', +u'\xc1': "{\\'{A}}", +u'\xc2': '{\\^{A}}', +u'\xc3': '{\\~{A}}', +u'\xc4': '{\\"{A}}', +u'\xc5': '{\\AA}', +u'\xc6': '{\\AE}', +u'\xc7': '{\\c{C}}', +u'\xc8': '{\\`{E}}', +u'\xc9': "{\\'{E}}", +u'\xca': '{\\^{E}}', +u'\xcb': '{\\"{E}}', +u'\xcc': '{\\`{I}}', +u'\xcd': "{\\'{I}}", +u'\xce': '{\\^{I}}', +u'\xcf': '{\\"{I}}', +u'\xd0': '{\\DH}', +u'\xd1': '{\\~{N}}', +u'\xd2': '{\\`{O}}', +u'\xd3': "{\\'{O}}", +u'\xd4': '{\\^{O}}', +u'\xd5': '{\\~{O}}', +u'\xd6': '{\\"{O}}', +u'\xd7': '{\\texttimes}', +u'\xd8': '{\\O}', +u'\xd9': '{\\`{U}}', +u'\xda': "{\\'{U}}", +u'\xdb': '{\\^{U}}', +u'\xdc': '{\\"{U}}', +u'\xdd': "{\\'{Y}}", +u'\xde': '{\\TH}', +u'\xdf': '{\\ss}', +u'\xe0': '{\\`{a}}', +u'\xe1': "{\\'{a}}", +u'\xe2': '{\\^{a}}', +u'\xe3': '{\\~{a}}', +u'\xe4': '{\\"{a}}', +u'\xe5': '{\\aa}', +u'\xe6': '{\\ae}', +u'\xe7': '{\\c{c}}', +u'\xe8': '{\\`{e}}', +u'\xe9': "{\\'{e}}", +u'\xea': '{\\^{e}}', +u'\xeb': '{\\"{e}}', +u'\xec': '{\\`{\\i}}', +u'\xed': "{\\'{\\i}}", +u'\xee': '{\\^{\\i}}', +u'\xef': '{\\"{\\i}}', +u'\xf0': '{\\dh}', +u'\xf1': '{\\~{n}}', +u'\xf2': '{\\`{o}}', +u'\xf3': "{\\'{o}}", +u'\xf4': '{\\^{o}}', +u'\xf5': '{\\~{o}}', +u'\xf6': '{\\"{o}}', +u'\xf7': '$\\div$', +u'\xf8': '{\\o}', +u'\xf9': '{\\`{u}}', +u'\xfa': "{\\'{u}}", +u'\xfb': '{\\^{u}}', +u'\xfc': '{\\"{u}}', +u'\xfd': "{\\'{y}}", +u'\xfe': '{\\th}', +u'\xff': '{\\"{y}}', +u'\u0100': '{\\={A}}', +u'\u0101': '{\\={a}}', +u'\u0102': '{\\u{A}}', +u'\u0103': '{\\u{a}}', +u'\u0104': '{\\k{A}}', +u'\u0105': '{\\k{a}}', +u'\u0106': "{\\'{C}}", +u'\u0107': "{\\'{c}}", +u'\u0108': '{\\^{C}}', +u'\u0109': '{\\^{c}}', +u'\u010a': '{\\.{C}}', +u'\u010b': '{\\.{c}}', +u'\u010c': '{\\v{C}}', +u'\u010d': '{\\v{c}}', +u'\u010e': '{\\v{D}}', +u'\u010f': '{\\v{d}}', +u'\u0110': '{\\DJ}', +u'\u0111': '{\\dj}', +u'\u0112': '{\\={E}}', +u'\u0113': '{\\={e}}', +u'\u0114': '{\\u{E}}', +u'\u0115': '{\\u{e}}', +u'\u0116': '{\\.{E}}', +u'\u0117': '{\\.{e}}', +u'\u0118': '{\\k{E}}', +u'\u0119': '{\\k{e}}', +u'\u011a': '{\\v{E}}', +u'\u011b': '{\\v{e}}', +u'\u011c': '{\\^{G}}', +u'\u011d': '{\\^{g}}', +u'\u011e': '{\\u{G}}', +u'\u011f': '{\\u{g}}', +u'\u0120': '{\\.{G}}', +u'\u0121': '{\\.{g}}', +u'\u0122': '{\\c{G}}', +u'\u0123': '{\\c{g}}', +u'\u0124': '{\\^{H}}', +u'\u0125': '{\\^{h}}', +u'\u0126': '{{\\fontencoding{LELA}\\selectfont\\char40}}', +u'\u0127': '$\\Elzxh$', +u'\u0128': '{\\~{I}}', +u'\u0129': '{\\~{\\i}}', +u'\u012a': '{\\={I}}', +u'\u012b': '{\\={\\i}}', +u'\u012c': '{\\u{I}}', +u'\u012d': '{\\u{\\i}}', +u'\u012e': '{\\k{I}}', +u'\u012f': '{\\k{i}}', +u'\u0130': '{\\.{I}}', +u'\u0131': '{\\i}', +u'\u0132': '{IJ}', +u'\u0133': '{ij}', +u'\u0134': '{\\^{J}}', +u'\u0135': '{\\^{\\j}}', +u'\u0136': '{\\c{K}}', +u'\u0137': '{\\c{k}}', +u'\u0138': '{{\\fontencoding{LELA}\\selectfont\\char91}}', +u'\u0139': "{\\'{L}}", +u'\u013a': "{\\'{l}}", +u'\u013b': '{\\c{L}}', +u'\u013c': '{\\c{l}}', +u'\u013d': '{\\v{L}}', +u'\u013e': '{\\v{l}}', +u'\u013f': '{{\\fontencoding{LELA}\\selectfont\\char201}}', +u'\u0140': '{{\\fontencoding{LELA}\\selectfont\\char202}}', +u'\u0141': '{\\L}', +u'\u0142': '{\\l}', +u'\u0143': "{\\'{N}}", +u'\u0144': "{\\'{n}}", +u'\u0145': '{\\c{N}}', +u'\u0146': '{\\c{n}}', +u'\u0147': '{\\v{N}}', +u'\u0148': '{\\v{n}}', +u'\u0149': "{'n}", +u'\u014a': '{\\NG}', +u'\u014b': '{\\ng}', +u'\u014c': '{\\={O}}', +u'\u014d': '{\\={o}}', +u'\u014e': '{\\u{O}}', +u'\u014f': '{\\u{o}}', +u'\u0150': '{\\H{O}}', +u'\u0151': '{\\H{o}}', +u'\u0152': '{\\OE}', +u'\u0153': '{\\oe}', +u'\u0154': "{\\'{R}}", +u'\u0155': "{\\'{r}}", +u'\u0156': '{\\c{R}}', +u'\u0157': '{\\c{r}}', +u'\u0158': '{\\v{R}}', +u'\u0159': '{\\v{r}}', +u'\u015a': "{\\'{S}}", +u'\u015b': "{\\'{s}}", +u'\u015c': '{\\^{S}}', +u'\u015d': '{\\^{s}}', +u'\u015e': '{\\c{S}}', +u'\u015f': '{\\c{s}}', +u'\u0160': '{\\v{S}}', +u'\u0161': '{\\v{s}}', +u'\u0162': '{\\c{T}}', +u'\u0163': '{\\c{t}}', +u'\u0164': '{\\v{T}}', +u'\u0165': '{\\v{t}}', +u'\u0166': '{{\\fontencoding{LELA}\\selectfont\\char47}}', +u'\u0167': '{{\\fontencoding{LELA}\\selectfont\\char63}}', +u'\u0168': '{\\~{U}}', +u'\u0169': '{\\~{u}}', +u'\u016a': '{\\={U}}', +u'\u016b': '{\\={u}}', +u'\u016c': '{\\u{U}}', +u'\u016d': '{\\u{u}}', +u'\u016e': '{\\r{U}}', +u'\u016f': '{\\r{u}}', +u'\u0170': '{\\H{U}}', +u'\u0171': '{\\H{u}}', +u'\u0172': '{\\k{U}}', +u'\u0173': '{\\k{u}}', +u'\u0174': '{\\^{W}}', +u'\u0175': '{\\^{w}}', +u'\u0176': '{\\^{Y}}', +u'\u0177': '{\\^{y}}', +u'\u0178': '{\\"{Y}}', +u'\u0179': "{\\'{Z}}", +u'\u017a': "{\\'{z}}", +u'\u017b': '{\\.{Z}}', +u'\u017c': '{\\.{z}}', +u'\u017d': '{\\v{Z}}', +u'\u017e': '{\\v{z}}', +u'\u0192': '$f$', +u'\u0195': '{\\texthvlig}', +u'\u019e': '{\\textnrleg}', +u'\u01aa': '$\\eth$', +u'\u01ba': '{{\\fontencoding{LELA}\\selectfont\\char195}}', +u'\u01c2': '{\\textdoublepipe}', +u'\u01f5': "{\\'{g}}", +u'\u0250': '$\\Elztrna$', +u'\u0252': '$\\Elztrnsa$', +u'\u0254': '$\\Elzopeno$', +u'\u0256': '$\\Elzrtld$', +u'\u0258': '{{\\fontencoding{LEIP}\\selectfont\\char61}}', +u'\u0259': '$\\Elzschwa$', +u'\u025b': '$\\varepsilon$', +u'\u0261': '{g}', +u'\u0263': '$\\Elzpgamma$', +u'\u0264': '$\\Elzpbgam$', +u'\u0265': '$\\Elztrnh$', +u'\u026c': '$\\Elzbtdl$', +u'\u026d': '$\\Elzrtll$', +u'\u026f': '$\\Elztrnm$', +u'\u0270': '$\\Elztrnmlr$', +u'\u0271': '$\\Elzltlmr$', +u'\u0272': '{\\Elzltln}', +u'\u0273': '$\\Elzrtln$', +u'\u0277': '$\\Elzclomeg$', +u'\u0278': '{\\textphi}', +u'\u0279': '$\\Elztrnr$', +u'\u027a': '$\\Elztrnrl$', +u'\u027b': '$\\Elzrttrnr$', +u'\u027c': '$\\Elzrl$', +u'\u027d': '$\\Elzrtlr$', +u'\u027e': '$\\Elzfhr$', +u'\u027f': '{{\\fontencoding{LEIP}\\selectfont\\char202}}', +u'\u0282': '$\\Elzrtls$', +u'\u0283': '$\\Elzesh$', +u'\u0287': '$\\Elztrnt$', +u'\u0288': '$\\Elzrtlt$', +u'\u028a': '$\\Elzpupsil$', +u'\u028b': '$\\Elzpscrv$', +u'\u028c': '$\\Elzinvv$', +u'\u028d': '$\\Elzinvw$', +u'\u028e': '$\\Elztrny$', +u'\u0290': '$\\Elzrtlz$', +u'\u0292': '$\\Elzyogh$', +u'\u0294': '$\\Elzglst$', +u'\u0295': '$\\Elzreglst$', +u'\u0296': '$\\Elzinglst$', +u'\u029e': '{\\textturnk}', +u'\u02a4': '$\\Elzdyogh$', +u'\u02a7': '$\\Elztesh$', +u'\u02bc': "{'}", +u'\u02c7': '{\\textasciicaron}', +u'\u02c8': '$\\Elzverts$', +u'\u02cc': '$\\Elzverti$', +u'\u02d0': '$\\Elzlmrk$', +u'\u02d1': '$\\Elzhlmrk$', +u'\u02d2': '$\\Elzsbrhr$', +u'\u02d3': '$\\Elzsblhr$', +u'\u02d4': '$\\Elzrais$', +u'\u02d5': '$\\Elzlow$', +u'\u02d8': '{\\textasciibreve}', +u'\u02d9': '{\\textperiodcentered}', +u'\u02da': '{\\r{}}', +u'\u02db': '{\\k{}}', +u'\u02dc': '{\\texttildelow}', +u'\u02dd': '{\\H{}}', +u'\u02e5': '{\\tone{55}}', +u'\u02e6': '{\\tone{44}}', +u'\u02e7': '{\\tone{33}}', +u'\u02e8': '{\\tone{22}}', +u'\u02e9': '{\\tone{11}}', +u'\u0300': '{\\`}', +u'\u0301': "{\\'}", +u'\u0302': '{\\^}', +u'\u0303': '{\\~}', +u'\u0304': '{\\=}', +u'\u0306': '{\\u}', +u'\u0307': '{\\.}', +u'\u0308': '{\\"}', +u'\u030a': '{\\r}', +u'\u030b': '{\\H}', +u'\u030c': '{\\v}', +u'\u030f': '{\\cyrchar\\C}', +u'\u0311': '{{\\fontencoding{LECO}\\selectfont\\char177}}', +u'\u0318': '{{\\fontencoding{LECO}\\selectfont\\char184}}', +u'\u0319': '{{\\fontencoding{LECO}\\selectfont\\char185}}', +u'\u0321': '$\\Elzpalh$', +u'\u0322': '{\\Elzrh}', +u'\u0327': '{\\c}', +u'\u0328': '{\\k}', +u'\u032a': '$\\Elzsbbrg$', +u'\u032b': '{{\\fontencoding{LECO}\\selectfont\\char203}}', +u'\u032f': '{{\\fontencoding{LECO}\\selectfont\\char207}}', +u'\u0335': '{\\Elzxl}', +u'\u0336': '{\\Elzbar}', +u'\u0337': '{{\\fontencoding{LECO}\\selectfont\\char215}}', +u'\u0338': '{{\\fontencoding{LECO}\\selectfont\\char216}}', +u'\u033a': '{{\\fontencoding{LECO}\\selectfont\\char218}}', +u'\u033b': '{{\\fontencoding{LECO}\\selectfont\\char219}}', +u'\u033c': '{{\\fontencoding{LECO}\\selectfont\\char220}}', +u'\u033d': '{{\\fontencoding{LECO}\\selectfont\\char221}}', +u'\u0361': '{{\\fontencoding{LECO}\\selectfont\\char225}}', +u'\u0386': "{\\'{A}}", +u'\u0388': "{\\'{E}}", +u'\u0389': "{\\'{H}}", +u'\u038a': "{\\'{}{I}}", +u'\u038c': "{\\'{}O}", +u'\u038e': "$\\mathrm{'Y}$", +u'\u038f': "$\\mathrm{'\\Omega}$", +u'\u0390': '$\\acute{\\ddot{\\iota}}$', +u'\u0391': '$\\Alpha$', +u'\u0392': '$\\Beta$', +u'\u0393': '$\\Gamma$', +u'\u0394': '$\\Delta$', +u'\u0395': '$\\Epsilon$', +u'\u0396': '$\\Zeta$', +u'\u0397': '$\\Eta$', +u'\u0398': '$\\Theta$', +u'\u0399': '$\\Iota$', +u'\u039a': '$\\Kappa$', +u'\u039b': '$\\Lambda$', +u'\u039c': '$M$', +u'\u039d': '$N$', +u'\u039e': '$\\Xi$', +u'\u039f': '$O$', +u'\u03a0': '$\\Pi$', +u'\u03a1': '$\\Rho$', +u'\u03a3': '$\\Sigma$', +u'\u03a4': '$\\Tau$', +u'\u03a5': '$\\Upsilon$', +u'\u03a6': '$\\Phi$', +u'\u03a7': '$\\Chi$', +u'\u03a8': '$\\Psi$', +u'\u03a9': '$\\Omega$', +u'\u03aa': '$\\mathrm{\\ddot{I}}$', +u'\u03ab': '$\\mathrm{\\ddot{Y}}$', +u'\u03ac': "{\\'{$\\alpha$}}", +u'\u03ad': '$\\acute{\\epsilon}$', +u'\u03ae': '$\\acute{\\eta}$', +u'\u03af': '$\\acute{\\iota}$', +u'\u03b0': '$\\acute{\\ddot{\\upsilon}}$', +u'\u03b1': '$\\alpha$', +u'\u03b2': '$\\beta$', +u'\u03b3': '$\\gamma$', +u'\u03b4': '$\\delta$', +u'\u03b5': '$\\epsilon$', +u'\u03b6': '$\\zeta$', +u'\u03b7': '$\\eta$', +u'\u03b8': '{\\texttheta}', +u'\u03b9': '$\\iota$', +u'\u03ba': '$\\kappa$', +u'\u03bb': '$\\lambda$', +u'\u03bc': '$\\mu$', +u'\u03bd': '$\\nu$', +u'\u03be': '$\\xi$', +u'\u03bf': '$o$', +u'\u03c0': '$\\pi$', +u'\u03c1': '$\\rho$', +u'\u03c2': '$\\varsigma$', +u'\u03c3': '$\\sigma$', +u'\u03c4': '$\\tau$', +u'\u03c5': '$\\upsilon$', +u'\u03c6': '$\\varphi$', +u'\u03c7': '$\\chi$', +u'\u03c8': '$\\psi$', +u'\u03c9': '$\\omega$', +u'\u03ca': '$\\ddot{\\iota}$', +u'\u03cb': '$\\ddot{\\upsilon}$', +u'\u03cc': "{\\'{o}}", +u'\u03cd': '$\\acute{\\upsilon}$', +u'\u03ce': '$\\acute{\\omega}$', +u'\u03d0': '{\\Pisymbol{ppi022}{87}}', +u'\u03d1': '{\\textvartheta}', +u'\u03d2': '$\\Upsilon$', +u'\u03d5': '$\\phi$', +u'\u03d6': '$\\varpi$', +u'\u03da': '$\\Stigma$', +u'\u03dc': '$\\Digamma$', +u'\u03dd': '$\\digamma$', +u'\u03de': '$\\Koppa$', +u'\u03e0': '$\\Sampi$', +u'\u03f0': '$\\varkappa$', +u'\u03f1': '$\\varrho$', +u'\u03f4': '{\\textTheta}', +u'\u03f6': '$\\backepsilon$', +u'\u0401': '{\\cyrchar\\CYRYO}', +u'\u0402': '{\\cyrchar\\CYRDJE}', +u'\u0403': "{\\cyrchar{\\'\\CYRG}}", +u'\u0404': '{\\cyrchar\\CYRIE}', +u'\u0405': '{\\cyrchar\\CYRDZE}', +u'\u0406': '{\\cyrchar\\CYRII}', +u'\u0407': '{\\cyrchar\\CYRYI}', +u'\u0408': '{\\cyrchar\\CYRJE}', +u'\u0409': '{\\cyrchar\\CYRLJE}', +u'\u040a': '{\\cyrchar\\CYRNJE}', +u'\u040b': '{\\cyrchar\\CYRTSHE}', +u'\u040c': "{\\cyrchar{\\'\\CYRK}}", +u'\u040e': '{\\cyrchar\\CYRUSHRT}', +u'\u040f': '{\\cyrchar\\CYRDZHE}', +u'\u0410': '{\\cyrchar\\CYRA}', +u'\u0411': '{\\cyrchar\\CYRB}', +u'\u0412': '{\\cyrchar\\CYRV}', +u'\u0413': '{\\cyrchar\\CYRG}', +u'\u0414': '{\\cyrchar\\CYRD}', +u'\u0415': '{\\cyrchar\\CYRE}', +u'\u0416': '{\\cyrchar\\CYRZH}', +u'\u0417': '{\\cyrchar\\CYRZ}', +u'\u0418': '{\\cyrchar\\CYRI}', +u'\u0419': '{\\cyrchar\\CYRISHRT}', +u'\u041a': '{\\cyrchar\\CYRK}', +u'\u041b': '{\\cyrchar\\CYRL}', +u'\u041c': '{\\cyrchar\\CYRM}', +u'\u041d': '{\\cyrchar\\CYRN}', +u'\u041e': '{\\cyrchar\\CYRO}', +u'\u041f': '{\\cyrchar\\CYRP}', +u'\u0420': '{\\cyrchar\\CYRR}', +u'\u0421': '{\\cyrchar\\CYRS}', +u'\u0422': '{\\cyrchar\\CYRT}', +u'\u0423': '{\\cyrchar\\CYRU}', +u'\u0424': '{\\cyrchar\\CYRF}', +u'\u0425': '{\\cyrchar\\CYRH}', +u'\u0426': '{\\cyrchar\\CYRC}', +u'\u0427': '{\\cyrchar\\CYRCH}', +u'\u0428': '{\\cyrchar\\CYRSH}', +u'\u0429': '{\\cyrchar\\CYRSHCH}', +u'\u042a': '{\\cyrchar\\CYRHRDSN}', +u'\u042b': '{\\cyrchar\\CYRERY}', +u'\u042c': '{\\cyrchar\\CYRSFTSN}', +u'\u042d': '{\\cyrchar\\CYREREV}', +u'\u042e': '{\\cyrchar\\CYRYU}', +u'\u042f': '{\\cyrchar\\CYRYA}', +u'\u0430': '{\\cyrchar\\cyra}', +u'\u0431': '{\\cyrchar\\cyrb}', +u'\u0432': '{\\cyrchar\\cyrv}', +u'\u0433': '{\\cyrchar\\cyrg}', +u'\u0434': '{\\cyrchar\\cyrd}', +u'\u0435': '{\\cyrchar\\cyre}', +u'\u0436': '{\\cyrchar\\cyrzh}', +u'\u0437': '{\\cyrchar\\cyrz}', +u'\u0438': '{\\cyrchar\\cyri}', +u'\u0439': '{\\cyrchar\\cyrishrt}', +u'\u043a': '{\\cyrchar\\cyrk}', +u'\u043b': '{\\cyrchar\\cyrl}', +u'\u043c': '{\\cyrchar\\cyrm}', +u'\u043d': '{\\cyrchar\\cyrn}', +u'\u043e': '{\\cyrchar\\cyro}', +u'\u043f': '{\\cyrchar\\cyrp}', +u'\u0440': '{\\cyrchar\\cyrr}', +u'\u0441': '{\\cyrchar\\cyrs}', +u'\u0442': '{\\cyrchar\\cyrt}', +u'\u0443': '{\\cyrchar\\cyru}', +u'\u0444': '{\\cyrchar\\cyrf}', +u'\u0445': '{\\cyrchar\\cyrh}', +u'\u0446': '{\\cyrchar\\cyrc}', +u'\u0447': '{\\cyrchar\\cyrch}', +u'\u0448': '{\\cyrchar\\cyrsh}', +u'\u0449': '{\\cyrchar\\cyrshch}', +u'\u044a': '{\\cyrchar\\cyrhrdsn}', +u'\u044b': '{\\cyrchar\\cyrery}', +u'\u044c': '{\\cyrchar\\cyrsftsn}', +u'\u044d': '{\\cyrchar\\cyrerev}', +u'\u044e': '{\\cyrchar\\cyryu}', +u'\u044f': '{\\cyrchar\\cyrya}', +u'\u0451': '{\\cyrchar\\cyryo}', +u'\u0452': '{\\cyrchar\\cyrdje}', +u'\u0453': "{\\cyrchar{\\'\\cyrg}}", +u'\u0454': '{\\cyrchar\\cyrie}', +u'\u0455': '{\\cyrchar\\cyrdze}', +u'\u0456': '{\\cyrchar\\cyrii}', +u'\u0457': '{\\cyrchar\\cyryi}', +u'\u0458': '{\\cyrchar\\cyrje}', +u'\u0459': '{\\cyrchar\\cyrlje}', +u'\u045a': '{\\cyrchar\\cyrnje}', +u'\u045b': '{\\cyrchar\\cyrtshe}', +u'\u045c': "{\\cyrchar{\\'\\cyrk}}", +u'\u045e': '{\\cyrchar\\cyrushrt}', +u'\u045f': '{\\cyrchar\\cyrdzhe}', +u'\u0460': '{\\cyrchar\\CYROMEGA}', +u'\u0461': '{\\cyrchar\\cyromega}', +u'\u0462': '{\\cyrchar\\CYRYAT}', +u'\u0464': '{\\cyrchar\\CYRIOTE}', +u'\u0465': '{\\cyrchar\\cyriote}', +u'\u0466': '{\\cyrchar\\CYRLYUS}', +u'\u0467': '{\\cyrchar\\cyrlyus}', +u'\u0468': '{\\cyrchar\\CYRIOTLYUS}', +u'\u0469': '{\\cyrchar\\cyriotlyus}', +u'\u046a': '{\\cyrchar\\CYRBYUS}', +u'\u046c': '{\\cyrchar\\CYRIOTBYUS}', +u'\u046d': '{\\cyrchar\\cyriotbyus}', +u'\u046e': '{\\cyrchar\\CYRKSI}', +u'\u046f': '{\\cyrchar\\cyrksi}', +u'\u0470': '{\\cyrchar\\CYRPSI}', +u'\u0471': '{\\cyrchar\\cyrpsi}', +u'\u0472': '{\\cyrchar\\CYRFITA}', +u'\u0474': '{\\cyrchar\\CYRIZH}', +u'\u0478': '{\\cyrchar\\CYRUK}', +u'\u0479': '{\\cyrchar\\cyruk}', +u'\u047a': '{\\cyrchar\\CYROMEGARND}', +u'\u047b': '{\\cyrchar\\cyromegarnd}', +u'\u047c': '{\\cyrchar\\CYROMEGATITLO}', +u'\u047d': '{\\cyrchar\\cyromegatitlo}', +u'\u047e': '{\\cyrchar\\CYROT}', +u'\u047f': '{\\cyrchar\\cyrot}', +u'\u0480': '{\\cyrchar\\CYRKOPPA}', +u'\u0481': '{\\cyrchar\\cyrkoppa}', +u'\u0482': '{\\cyrchar\\cyrthousands}', +u'\u0488': '{\\cyrchar\\cyrhundredthousands}', +u'\u0489': '{\\cyrchar\\cyrmillions}', +u'\u048c': '{\\cyrchar\\CYRSEMISFTSN}', +u'\u048d': '{\\cyrchar\\cyrsemisftsn}', +u'\u048e': '{\\cyrchar\\CYRRTICK}', +u'\u048f': '{\\cyrchar\\cyrrtick}', +u'\u0490': '{\\cyrchar\\CYRGUP}', +u'\u0491': '{\\cyrchar\\cyrgup}', +u'\u0492': '{\\cyrchar\\CYRGHCRS}', +u'\u0493': '{\\cyrchar\\cyrghcrs}', +u'\u0494': '{\\cyrchar\\CYRGHK}', +u'\u0495': '{\\cyrchar\\cyrghk}', +u'\u0496': '{\\cyrchar\\CYRZHDSC}', +u'\u0497': '{\\cyrchar\\cyrzhdsc}', +u'\u0498': '{\\cyrchar\\CYRZDSC}', +u'\u0499': '{\\cyrchar\\cyrzdsc}', +u'\u049a': '{\\cyrchar\\CYRKDSC}', +u'\u049b': '{\\cyrchar\\cyrkdsc}', +u'\u049c': '{\\cyrchar\\CYRKVCRS}', +u'\u049d': '{\\cyrchar\\cyrkvcrs}', +u'\u049e': '{\\cyrchar\\CYRKHCRS}', +u'\u049f': '{\\cyrchar\\cyrkhcrs}', +u'\u04a0': '{\\cyrchar\\CYRKBEAK}', +u'\u04a1': '{\\cyrchar\\cyrkbeak}', +u'\u04a2': '{\\cyrchar\\CYRNDSC}', +u'\u04a3': '{\\cyrchar\\cyrndsc}', +u'\u04a4': '{\\cyrchar\\CYRNG}', +u'\u04a5': '{\\cyrchar\\cyrng}', +u'\u04a6': '{\\cyrchar\\CYRPHK}', +u'\u04a7': '{\\cyrchar\\cyrphk}', +u'\u04a8': '{\\cyrchar\\CYRABHHA}', +u'\u04a9': '{\\cyrchar\\cyrabhha}', +u'\u04aa': '{\\cyrchar\\CYRSDSC}', +u'\u04ab': '{\\cyrchar\\cyrsdsc}', +u'\u04ac': '{\\cyrchar\\CYRTDSC}', +u'\u04ad': '{\\cyrchar\\cyrtdsc}', +u'\u04ae': '{\\cyrchar\\CYRY}', +u'\u04af': '{\\cyrchar\\cyry}', +u'\u04b0': '{\\cyrchar\\CYRYHCRS}', +u'\u04b1': '{\\cyrchar\\cyryhcrs}', +u'\u04b2': '{\\cyrchar\\CYRHDSC}', +u'\u04b3': '{\\cyrchar\\cyrhdsc}', +u'\u04b4': '{\\cyrchar\\CYRTETSE}', +u'\u04b5': '{\\cyrchar\\cyrtetse}', +u'\u04b6': '{\\cyrchar\\CYRCHRDSC}', +u'\u04b7': '{\\cyrchar\\cyrchrdsc}', +u'\u04b8': '{\\cyrchar\\CYRCHVCRS}', +u'\u04b9': '{\\cyrchar\\cyrchvcrs}', +u'\u04ba': '{\\cyrchar\\CYRSHHA}', +u'\u04bb': '{\\cyrchar\\cyrshha}', +u'\u04bc': '{\\cyrchar\\CYRABHCH}', +u'\u04bd': '{\\cyrchar\\cyrabhch}', +u'\u04be': '{\\cyrchar\\CYRABHCHDSC}', +u'\u04bf': '{\\cyrchar\\cyrabhchdsc}', +u'\u04c0': '{\\cyrchar\\CYRpalochka}', +u'\u04c3': '{\\cyrchar\\CYRKHK}', +u'\u04c4': '{\\cyrchar\\cyrkhk}', +u'\u04c7': '{\\cyrchar\\CYRNHK}', +u'\u04c8': '{\\cyrchar\\cyrnhk}', +u'\u04cb': '{\\cyrchar\\CYRCHLDSC}', +u'\u04cc': '{\\cyrchar\\cyrchldsc}', +u'\u04d4': '{\\cyrchar\\CYRAE}', +u'\u04d5': '{\\cyrchar\\cyrae}', +u'\u04d8': '{\\cyrchar\\CYRSCHWA}', +u'\u04d9': '{\\cyrchar\\cyrschwa}', +u'\u04e0': '{\\cyrchar\\CYRABHDZE}', +u'\u04e1': '{\\cyrchar\\cyrabhdze}', +u'\u04e8': '{\\cyrchar\\CYROTLD}', +u'\u04e9': '{\\cyrchar\\cyrotld}', +u'\u2002': '{\\hspace{0.6em}}', +u'\u2003': '{\\hspace{1em}}', +u'\u2004': '{\\hspace{0.33em}}', +u'\u2005': '{\\hspace{0.25em}}', +u'\u2006': '{\\hspace{0.166em}}', +u'\u2007': '{\\hphantom{0}}', +u'\u2008': '{\\hphantom{,}}', +u'\u2009': '{\\hspace{0.167em}}', +u'\u200a': '$\\mkern1mu$', +u'\u2010': '{-}', +u'\u2013': '{\\textendash}', +u'\u2014': '{\\textemdash}', +u'\u2015': '{\\rule{1em}{1pt}}', +u'\u2016': '$\\Vert$', +u'\u2018': '{`}', +u'\u2019': "{'}", +u'\u201a': '{,}', +u'\u201b': '$\\Elzreapos$', +u'\u201c': '{\\textquotedblleft}', +u'\u201d': '{\\textquotedblright}', +u'\u201e': '{,,}', +u'\u2020': '{\\textdagger}', +u'\u2021': '{\\textdaggerdbl}', +u'\u2022': '{\\textbullet}', +u'\u2024': '{.}', +u'\u2025': '{..}', +u'\u2026': '{\\ldots}', +u'\u2030': '{\\textperthousand}', +u'\u2031': '{\\textpertenthousand}', +u'\u2032': "${'}$", +u'\u2033': "${''}$", +u'\u2034': "${'''}$", +u'\u2035': '$\\backprime$', +u'\u2039': '{\\guilsinglleft}', +u'\u203a': '{\\guilsinglright}', +u'\u2057': "$''''$", +u'\u205f': '{\\mkern4mu}', +u'\u2060': '{\\nolinebreak}', +u'\u20a7': '{\\ensuremath{\\Elzpes}}', +u'\u20ac': '{\\mbox{\\texteuro}}', +u'\u20db': '$\\dddot$', +u'\u20dc': '$\\ddddot$', +u'\u2102': '$\\mathbb{C}$', +u'\u210a': '{\\mathscr{g}}', +u'\u210b': '$\\mathscr{H}$', +u'\u210c': '$\\mathfrak{H}$', +u'\u210d': '$\\mathbb{H}$', +u'\u210f': '$\\hslash$', +u'\u2110': '$\\mathscr{I}$', +u'\u2111': '$\\mathfrak{I}$', +u'\u2112': '$\\mathscr{L}$', +u'\u2113': '$\\mathscr{l}$', +u'\u2115': '$\\mathbb{N}$', +u'\u2116': '{\\cyrchar\\textnumero}', +u'\u2118': '$\\wp$', +u'\u2119': '$\\mathbb{P}$', +u'\u211a': '$\\mathbb{Q}$', +u'\u211b': '$\\mathscr{R}$', +u'\u211c': '$\\mathfrak{R}$', +u'\u211d': '$\\mathbb{R}$', +u'\u211e': '$\\Elzxrat$', +u'\u2122': '{\\texttrademark}', +u'\u2124': '$\\mathbb{Z}$', +u'\u2126': '$\\Omega$', +u'\u2127': '$\\mho$', +u'\u2128': '$\\mathfrak{Z}$', +u'\u2129': '$\\ElsevierGlyph{2129}$', +u'\u212b': '{\\AA}', +u'\u212c': '$\\mathscr{B}$', +u'\u212d': '$\\mathfrak{C}$', +u'\u212f': '$\\mathscr{e}$', +u'\u2130': '$\\mathscr{E}$', +u'\u2131': '$\\mathscr{F}$', +u'\u2133': '$\\mathscr{M}$', +u'\u2134': '$\\mathscr{o}$', +u'\u2135': '$\\aleph$', +u'\u2136': '$\\beth$', +u'\u2137': '$\\gimel$', +u'\u2138': '$\\daleth$', +u'\u2153': '$\\textfrac{1}{3}$', +u'\u2154': '$\\textfrac{2}{3}$', +u'\u2155': '$\\textfrac{1}{5}$', +u'\u2156': '$\\textfrac{2}{5}$', +u'\u2157': '$\\textfrac{3}{5}$', +u'\u2158': '$\\textfrac{4}{5}$', +u'\u2159': '$\\textfrac{1}{6}$', +u'\u215a': '$\\textfrac{5}{6}$', +u'\u215b': '$\\textfrac{1}{8}$', +u'\u215c': '$\\textfrac{3}{8}$', +u'\u215d': '$\\textfrac{5}{8}$', +u'\u215e': '$\\textfrac{7}{8}$', +u'\u2190': '$\\leftarrow$', +u'\u2191': '$\\uparrow$', +u'\u2192': '$\\rightarrow$', +u'\u2193': '$\\downarrow$', +u'\u2194': '$\\leftrightarrow$', +u'\u2195': '$\\updownarrow$', +u'\u2196': '$\\nwarrow$', +u'\u2197': '$\\nearrow$', +u'\u2198': '$\\searrow$', +u'\u2199': '$\\swarrow$', +u'\u219a': '$\\nleftarrow$', +u'\u219b': '$\\nrightarrow$', +u'\u219c': '$\\arrowwaveright$', +u'\u219d': '$\\arrowwaveright$', +u'\u219e': '$\\twoheadleftarrow$', +u'\u21a0': '$\\twoheadrightarrow$', +u'\u21a2': '$\\leftarrowtail$', +u'\u21a3': '$\\rightarrowtail$', +u'\u21a6': '$\\mapsto$', +u'\u21a9': '$\\hookleftarrow$', +u'\u21aa': '$\\hookrightarrow$', +u'\u21ab': '$\\looparrowleft$', +u'\u21ac': '$\\looparrowright$', +u'\u21ad': '$\\leftrightsquigarrow$', +u'\u21ae': '$\\nleftrightarrow$', +u'\u21b0': '$\\Lsh$', +u'\u21b1': '$\\Rsh$', +u'\u21b3': '$\\ElsevierGlyph{21B3}$', +u'\u21b6': '$\\curvearrowleft$', +u'\u21b7': '$\\curvearrowright$', +u'\u21ba': '$\\circlearrowleft$', +u'\u21bb': '$\\circlearrowright$', +u'\u21bc': '$\\leftharpoonup$', +u'\u21bd': '$\\leftharpoondown$', +u'\u21be': '$\\upharpoonright$', +u'\u21bf': '$\\upharpoonleft$', +u'\u21c0': '$\\rightharpoonup$', +u'\u21c1': '$\\rightharpoondown$', +u'\u21c2': '$\\downharpoonright$', +u'\u21c3': '$\\downharpoonleft$', +u'\u21c4': '$\\rightleftarrows$', +u'\u21c5': '$\\dblarrowupdown$', +u'\u21c6': '$\\leftrightarrows$', +u'\u21c7': '$\\leftleftarrows$', +u'\u21c8': '$\\upuparrows$', +u'\u21c9': '$\\rightrightarrows$', +u'\u21ca': '$\\downdownarrows$', +u'\u21cb': '$\\leftrightharpoons$', +u'\u21cc': '$\\rightleftharpoons$', +u'\u21cd': '$\\nLeftarrow$', +u'\u21ce': '$\\nLeftrightarrow$', +u'\u21cf': '$\\nRightarrow$', +u'\u21d0': '$\\Leftarrow$', +u'\u21d1': '$\\Uparrow$', +u'\u21d2': '$\\Rightarrow$', +u'\u21d3': '$\\Downarrow$', +u'\u21d4': '$\\Leftrightarrow$', +u'\u21d5': '$\\Updownarrow$', +u'\u21da': '$\\Lleftarrow$', +u'\u21db': '$\\Rrightarrow$', +u'\u21dd': '$\\rightsquigarrow$', +u'\u21f5': '$\\DownArrowUpArrow$', +u'\u2200': '$\\forall$', +u'\u2201': '$\\complement$', +u'\u2202': '$\\partial$', +u'\u2203': '$\\exists$', +u'\u2204': '$\\nexists$', +u'\u2205': '$\\varnothing$', +u'\u2207': '$\\nabla$', +u'\u2208': '$\\in$', +u'\u2209': '$\\not\\in$', +u'\u220b': '$\\ni$', +u'\u220c': '$\\not\\ni$', +u'\u220f': '$\\prod$', +u'\u2210': '$\\coprod$', +u'\u2211': '$\\sum$', +u'\u2212': '{-}', +u'\u2213': '$\\mp$', +u'\u2214': '$\\dotplus$', +u'\u2216': '$\\setminus$', +u'\u2217': '${_\\ast}$', +u'\u2218': '$\\circ$', +u'\u2219': '$\\bullet$', +u'\u221a': '$\\surd$', +u'\u221d': '$\\propto$', +u'\u221e': '$\\infty$', +u'\u221f': '$\\rightangle$', +u'\u2220': '$\\angle$', +u'\u2221': '$\\measuredangle$', +u'\u2222': '$\\sphericalangle$', +u'\u2223': '$\\mid$', +u'\u2224': '$\\nmid$', +u'\u2225': '$\\parallel$', +u'\u2226': '$\\nparallel$', +u'\u2227': '$\\wedge$', +u'\u2228': '$\\vee$', +u'\u2229': '$\\cap$', +u'\u222a': '$\\cup$', +u'\u222b': '$\\int$', +u'\u222c': '$\\int\\!\\int$', +u'\u222d': '$\\int\\!\\int\\!\\int$', +u'\u222e': '$\\oint$', +u'\u222f': '$\\surfintegral$', +u'\u2230': '$\\volintegral$', +u'\u2231': '$\\clwintegral$', +u'\u2232': '$\\ElsevierGlyph{2232}$', +u'\u2233': '$\\ElsevierGlyph{2233}$', +u'\u2234': '$\\therefore$', +u'\u2235': '$\\because$', +u'\u2237': '$\\Colon$', +u'\u2238': '$\\ElsevierGlyph{2238}$', +u'\u223a': '$\\mathbin{{:}\\!\\!{-}\\!\\!{:}}$', +u'\u223b': '$\\homothetic$', +u'\u223c': '$\\sim$', +u'\u223d': '$\\backsim$', +u'\u223e': '$\\lazysinv$', +u'\u2240': '$\\wr$', +u'\u2241': '$\\not\\sim$', +u'\u2242': '$\\ElsevierGlyph{2242}$', +u'\u2243': '$\\simeq$', +u'\u2244': '$\\not\\simeq$', +u'\u2245': '$\\cong$', +u'\u2246': '$\\approxnotequal$', +u'\u2247': '$\\not\\cong$', +u'\u2248': '$\\approx$', +u'\u2249': '$\\not\\approx$', +u'\u224a': '$\\approxeq$', +u'\u224b': '$\\tildetrpl$', +u'\u224c': '$\\allequal$', +u'\u224d': '$\\asymp$', +u'\u224e': '$\\Bumpeq$', +u'\u224f': '$\\bumpeq$', +u'\u2250': '$\\doteq$', +u'\u2251': '$\\doteqdot$', +u'\u2252': '$\\fallingdotseq$', +u'\u2253': '$\\risingdotseq$', +u'\u2254': '{:=}', +u'\u2255': '$=:$', +u'\u2256': '$\\eqcirc$', +u'\u2257': '$\\circeq$', +u'\u2259': '$\\estimates$', +u'\u225a': '$\\ElsevierGlyph{225A}$', +u'\u225b': '$\\starequal$', +u'\u225c': '$\\triangleq$', +u'\u225f': '$\\ElsevierGlyph{225F}$', +u'\u2260': '$\\not =$', +u'\u2261': '$\\equiv$', +u'\u2262': '$\\not\\equiv$', +u'\u2264': '$\\leq$', +u'\u2265': '$\\geq$', +u'\u2266': '$\\leqq$', +u'\u2267': '$\\geqq$', +u'\u2268': '$\\lneqq$', +u'\u2269': '$\\gneqq$', +u'\u226a': '$\\ll$', +u'\u226b': '$\\gg$', +u'\u226c': '$\\between$', +u'\u226d': '$\\not\\kern-0.3em\\times$', +u'\u226e': '$\\not<$', +u'\u226f': '$\\not>$', +u'\u2270': '$\\not\\leq$', +u'\u2271': '$\\not\\geq$', +u'\u2272': '$\\lessequivlnt$', +u'\u2273': '$\\greaterequivlnt$', +u'\u2274': '$\\ElsevierGlyph{2274}$', +u'\u2275': '$\\ElsevierGlyph{2275}$', +u'\u2276': '$\\lessgtr$', +u'\u2277': '$\\gtrless$', +u'\u2278': '$\\notlessgreater$', +u'\u2279': '$\\notgreaterless$', +u'\u227a': '$\\prec$', +u'\u227b': '$\\succ$', +u'\u227c': '$\\preccurlyeq$', +u'\u227d': '$\\succcurlyeq$', +u'\u227e': '$\\precapprox$', +u'\u227f': '$\\succapprox$', +u'\u2280': '$\\not\\prec$', +u'\u2281': '$\\not\\succ$', +u'\u2282': '$\\subset$', +u'\u2283': '$\\supset$', +u'\u2284': '$\\not\\subset$', +u'\u2285': '$\\not\\supset$', +u'\u2286': '$\\subseteq$', +u'\u2287': '$\\supseteq$', +u'\u2288': '$\\not\\subseteq$', +u'\u2289': '$\\not\\supseteq$', +u'\u228a': '$\\subsetneq$', +u'\u228b': '$\\supsetneq$', +u'\u228e': '$\\uplus$', +u'\u228f': '$\\sqsubset$', +u'\u2290': '$\\sqsupset$', +u'\u2291': '$\\sqsubseteq$', +u'\u2292': '$\\sqsupseteq$', +u'\u2293': '$\\sqcap$', +u'\u2294': '$\\sqcup$', +u'\u2295': '$\\oplus$', +u'\u2296': '$\\ominus$', +u'\u2297': '$\\otimes$', +u'\u2298': '$\\oslash$', +u'\u2299': '$\\odot$', +u'\u229a': '$\\circledcirc$', +u'\u229b': '$\\circledast$', +u'\u229d': '$\\circleddash$', +u'\u229e': '$\\boxplus$', +u'\u229f': '$\\boxminus$', +u'\u22a0': '$\\boxtimes$', +u'\u22a1': '$\\boxdot$', +u'\u22a2': '$\\vdash$', +u'\u22a3': '$\\dashv$', +u'\u22a4': '$\\top$', +u'\u22a5': '$\\perp$', +u'\u22a7': '$\\truestate$', +u'\u22a8': '$\\forcesextra$', +u'\u22a9': '$\\Vdash$', +u'\u22aa': '$\\Vvdash$', +u'\u22ab': '$\\VDash$', +u'\u22ac': '$\\nvdash$', +u'\u22ad': '$\\nvDash$', +u'\u22ae': '$\\nVdash$', +u'\u22af': '$\\nVDash$', +u'\u22b2': '$\\vartriangleleft$', +u'\u22b3': '$\\vartriangleright$', +u'\u22b4': '$\\trianglelefteq$', +u'\u22b5': '$\\trianglerighteq$', +u'\u22b6': '$\\original$', +u'\u22b7': '$\\image$', +u'\u22b8': '$\\multimap$', +u'\u22b9': '$\\hermitconjmatrix$', +u'\u22ba': '$\\intercal$', +u'\u22bb': '$\\veebar$', +u'\u22be': '$\\rightanglearc$', +u'\u22c0': '$\\ElsevierGlyph{22C0}$', +u'\u22c1': '$\\ElsevierGlyph{22C1}$', +u'\u22c2': '$\\bigcap$', +u'\u22c3': '$\\bigcup$', +u'\u22c4': '$\\diamond$', +u'\u22c5': '$\\cdot$', +u'\u22c6': '$\\star$', +u'\u22c7': '$\\divideontimes$', +u'\u22c8': '$\\bowtie$', +u'\u22c9': '$\\ltimes$', +u'\u22ca': '$\\rtimes$', +u'\u22cb': '$\\leftthreetimes$', +u'\u22cc': '$\\rightthreetimes$', +u'\u22cd': '$\\backsimeq$', +u'\u22ce': '$\\curlyvee$', +u'\u22cf': '$\\curlywedge$', +u'\u22d0': '$\\Subset$', +u'\u22d1': '$\\Supset$', +u'\u22d2': '$\\Cap$', +u'\u22d3': '$\\Cup$', +u'\u22d4': '$\\pitchfork$', +u'\u22d6': '$\\lessdot$', +u'\u22d7': '$\\gtrdot$', +u'\u22d8': '$\\verymuchless$', +u'\u22d9': '$\\verymuchgreater$', +u'\u22da': '$\\lesseqgtr$', +u'\u22db': '$\\gtreqless$', +u'\u22de': '$\\curlyeqprec$', +u'\u22df': '$\\curlyeqsucc$', +u'\u22e2': '$\\not\\sqsubseteq$', +u'\u22e3': '$\\not\\sqsupseteq$', +u'\u22e5': '$\\Elzsqspne$', +u'\u22e6': '$\\lnsim$', +u'\u22e7': '$\\gnsim$', +u'\u22e8': '$\\precedesnotsimilar$', +u'\u22e9': '$\\succnsim$', +u'\u22ea': '$\\ntriangleleft$', +u'\u22eb': '$\\ntriangleright$', +u'\u22ec': '$\\ntrianglelefteq$', +u'\u22ed': '$\\ntrianglerighteq$', +u'\u22ee': '$\\vdots$', +u'\u22ef': '$\\cdots$', +u'\u22f0': '$\\upslopeellipsis$', +u'\u22f1': '$\\downslopeellipsis$', +u'\u2305': '{\\barwedge}', +u'\u2306': '$\\perspcorrespond$', +u'\u2308': '$\\lceil$', +u'\u2309': '$\\rceil$', +u'\u230a': '$\\lfloor$', +u'\u230b': '$\\rfloor$', +u'\u2315': '$\\recorder$', +u'\u2316': '$\\mathchar"2208$', +u'\u231c': '$\\ulcorner$', +u'\u231d': '$\\urcorner$', +u'\u231e': '$\\llcorner$', +u'\u231f': '$\\lrcorner$', +u'\u2322': '$\\frown$', +u'\u2323': '$\\smile$', +u'\u2329': '$\\langle$', +u'\u232a': '$\\rangle$', +u'\u233d': '$\\ElsevierGlyph{E838}$', +u'\u23a3': '$\\Elzdlcorn$', +u'\u23b0': '$\\lmoustache$', +u'\u23b1': '$\\rmoustache$', +u'\u2423': '{\\textvisiblespace}', +u'\u2460': '{\\ding{172}}', +u'\u2461': '{\\ding{173}}', +u'\u2462': '{\\ding{174}}', +u'\u2463': '{\\ding{175}}', +u'\u2464': '{\\ding{176}}', +u'\u2465': '{\\ding{177}}', +u'\u2466': '{\\ding{178}}', +u'\u2467': '{\\ding{179}}', +u'\u2468': '{\\ding{180}}', +u'\u2469': '{\\ding{181}}', +u'\u24c8': '$\\circledS$', +u'\u2506': '$\\Elzdshfnc$', +u'\u2519': '$\\Elzsqfnw$', +u'\u2571': '$\\diagup$', +u'\u25a0': '{\\ding{110}}', +u'\u25a1': '$\\square$', +u'\u25aa': '$\\blacksquare$', +u'\u25ad': '$\\fbox{~~}$', +u'\u25af': '$\\Elzvrecto$', +u'\u25b1': '$\\ElsevierGlyph{E381}$', +u'\u25b2': '{\\ding{115}}', +u'\u25b3': '$\\bigtriangleup$', +u'\u25b4': '$\\blacktriangle$', +u'\u25b5': '$\\vartriangle$', +u'\u25b8': '$\\blacktriangleright$', +u'\u25b9': '$\\triangleright$', +u'\u25bc': '{\\ding{116}}', +u'\u25bd': '$\\bigtriangledown$', +u'\u25be': '$\\blacktriangledown$', +u'\u25bf': '$\\triangledown$', +u'\u25c2': '$\\blacktriangleleft$', +u'\u25c3': '$\\triangleleft$', +u'\u25c6': '{\\ding{117}}', +u'\u25ca': '$\\lozenge$', +u'\u25cb': '$\\bigcirc$', +u'\u25cf': '{\\ding{108}}', +u'\u25d0': '$\\Elzcirfl$', +u'\u25d1': '$\\Elzcirfr$', +u'\u25d2': '$\\Elzcirfb$', +u'\u25d7': '{\\ding{119}}', +u'\u25d8': '$\\Elzrvbull$', +u'\u25e7': '$\\Elzsqfl$', +u'\u25e8': '$\\Elzsqfr$', +u'\u25ea': '$\\Elzsqfse$', +u'\u25ef': '$\\bigcirc$', +u'\u2605': '{\\ding{72}}', +u'\u2606': '{\\ding{73}}', +u'\u260e': '{\\ding{37}}', +u'\u261b': '{\\ding{42}}', +u'\u261e': '{\\ding{43}}', +u'\u263e': '{\\rightmoon}', +u'\u263f': '{\\mercury}', +u'\u2640': '{\\venus}', +u'\u2642': '{\\male}', +u'\u2643': '{\\jupiter}', +u'\u2644': '{\\saturn}', +u'\u2645': '{\\uranus}', +u'\u2646': '{\\neptune}', +u'\u2647': '{\\pluto}', +u'\u2648': '{\\aries}', +u'\u2649': '{\\taurus}', +u'\u264a': '{\\gemini}', +u'\u264b': '{\\cancer}', +u'\u264c': '{\\leo}', +u'\u264d': '{\\virgo}', +u'\u264e': '{\\libra}', +u'\u264f': '{\\scorpio}', +u'\u2650': '{\\sagittarius}', +u'\u2651': '{\\capricornus}', +u'\u2652': '{\\aquarius}', +u'\u2653': '{\\pisces}', +u'\u2660': '{\\ding{171}}', +u'\u2662': '$\\diamond$', +u'\u2663': '{\\ding{168}}', +u'\u2665': '{\\ding{170}}', +u'\u2666': '{\\ding{169}}', +u'\u2669': '{\\quarternote}', +u'\u266a': '{\\eighthnote}', +u'\u266d': '$\\flat$', +u'\u266e': '$\\natural$', +u'\u266f': '$\\sharp$', +u'\u2701': '{\\ding{33}}', +u'\u2702': '{\\ding{34}}', +u'\u2703': '{\\ding{35}}', +u'\u2704': '{\\ding{36}}', +u'\u2706': '{\\ding{38}}', +u'\u2707': '{\\ding{39}}', +u'\u2708': '{\\ding{40}}', +u'\u2709': '{\\ding{41}}', +u'\u270c': '{\\ding{44}}', +u'\u270d': '{\\ding{45}}', +u'\u270e': '{\\ding{46}}', +u'\u270f': '{\\ding{47}}', +u'\u2710': '{\\ding{48}}', +u'\u2711': '{\\ding{49}}', +u'\u2712': '{\\ding{50}}', +u'\u2713': '{\\ding{51}}', +u'\u2714': '{\\ding{52}}', +u'\u2715': '{\\ding{53}}', +u'\u2716': '{\\ding{54}}', +u'\u2717': '{\\ding{55}}', +u'\u2718': '{\\ding{56}}', +u'\u2719': '{\\ding{57}}', +u'\u271a': '{\\ding{58}}', +u'\u271b': '{\\ding{59}}', +u'\u271c': '{\\ding{60}}', +u'\u271d': '{\\ding{61}}', +u'\u271e': '{\\ding{62}}', +u'\u271f': '{\\ding{63}}', +u'\u2720': '{\\ding{64}}', +u'\u2721': '{\\ding{65}}', +u'\u2722': '{\\ding{66}}', +u'\u2723': '{\\ding{67}}', +u'\u2724': '{\\ding{68}}', +u'\u2725': '{\\ding{69}}', +u'\u2726': '{\\ding{70}}', +u'\u2727': '{\\ding{71}}', +u'\u2729': '{\\ding{73}}', +u'\u272a': '{\\ding{74}}', +u'\u272b': '{\\ding{75}}', +u'\u272c': '{\\ding{76}}', +u'\u272d': '{\\ding{77}}', +u'\u272e': '{\\ding{78}}', +u'\u272f': '{\\ding{79}}', +u'\u2730': '{\\ding{80}}', +u'\u2731': '{\\ding{81}}', +u'\u2732': '{\\ding{82}}', +u'\u2733': '{\\ding{83}}', +u'\u2734': '{\\ding{84}}', +u'\u2735': '{\\ding{85}}', +u'\u2736': '{\\ding{86}}', +u'\u2737': '{\\ding{87}}', +u'\u2738': '{\\ding{88}}', +u'\u2739': '{\\ding{89}}', +u'\u273a': '{\\ding{90}}', +u'\u273b': '{\\ding{91}}', +u'\u273c': '{\\ding{92}}', +u'\u273d': '{\\ding{93}}', +u'\u273e': '{\\ding{94}}', +u'\u273f': '{\\ding{95}}', +u'\u2740': '{\\ding{96}}', +u'\u2741': '{\\ding{97}}', +u'\u2742': '{\\ding{98}}', +u'\u2743': '{\\ding{99}}', +u'\u2744': '{\\ding{100}}', +u'\u2745': '{\\ding{101}}', +u'\u2746': '{\\ding{102}}', +u'\u2747': '{\\ding{103}}', +u'\u2748': '{\\ding{104}}', +u'\u2749': '{\\ding{105}}', +u'\u274a': '{\\ding{106}}', +u'\u274b': '{\\ding{107}}', +u'\u274d': '{\\ding{109}}', +u'\u274f': '{\\ding{111}}', +u'\u2750': '{\\ding{112}}', +u'\u2751': '{\\ding{113}}', +u'\u2752': '{\\ding{114}}', +u'\u2756': '{\\ding{118}}', +u'\u2758': '{\\ding{120}}', +u'\u2759': '{\\ding{121}}', +u'\u275a': '{\\ding{122}}', +u'\u275b': '{\\ding{123}}', +u'\u275c': '{\\ding{124}}', +u'\u275d': '{\\ding{125}}', +u'\u275e': '{\\ding{126}}', +u'\u2761': '{\\ding{161}}', +u'\u2762': '{\\ding{162}}', +u'\u2763': '{\\ding{163}}', +u'\u2764': '{\\ding{164}}', +u'\u2765': '{\\ding{165}}', +u'\u2766': '{\\ding{166}}', +u'\u2767': '{\\ding{167}}', +u'\u2776': '{\\ding{182}}', +u'\u2777': '{\\ding{183}}', +u'\u2778': '{\\ding{184}}', +u'\u2779': '{\\ding{185}}', +u'\u277a': '{\\ding{186}}', +u'\u277b': '{\\ding{187}}', +u'\u277c': '{\\ding{188}}', +u'\u277d': '{\\ding{189}}', +u'\u277e': '{\\ding{190}}', +u'\u277f': '{\\ding{191}}', +u'\u2780': '{\\ding{192}}', +u'\u2781': '{\\ding{193}}', +u'\u2782': '{\\ding{194}}', +u'\u2783': '{\\ding{195}}', +u'\u2784': '{\\ding{196}}', +u'\u2785': '{\\ding{197}}', +u'\u2786': '{\\ding{198}}', +u'\u2787': '{\\ding{199}}', +u'\u2788': '{\\ding{200}}', +u'\u2789': '{\\ding{201}}', +u'\u278a': '{\\ding{202}}', +u'\u278b': '{\\ding{203}}', +u'\u278c': '{\\ding{204}}', +u'\u278d': '{\\ding{205}}', +u'\u278e': '{\\ding{206}}', +u'\u278f': '{\\ding{207}}', +u'\u2790': '{\\ding{208}}', +u'\u2791': '{\\ding{209}}', +u'\u2792': '{\\ding{210}}', +u'\u2793': '{\\ding{211}}', +u'\u2794': '{\\ding{212}}', +u'\u2798': '{\\ding{216}}', +u'\u2799': '{\\ding{217}}', +u'\u279a': '{\\ding{218}}', +u'\u279b': '{\\ding{219}}', +u'\u279c': '{\\ding{220}}', +u'\u279d': '{\\ding{221}}', +u'\u279e': '{\\ding{222}}', +u'\u279f': '{\\ding{223}}', +u'\u27a0': '{\\ding{224}}', +u'\u27a1': '{\\ding{225}}', +u'\u27a2': '{\\ding{226}}', +u'\u27a3': '{\\ding{227}}', +u'\u27a4': '{\\ding{228}}', +u'\u27a5': '{\\ding{229}}', +u'\u27a6': '{\\ding{230}}', +u'\u27a7': '{\\ding{231}}', +u'\u27a8': '{\\ding{232}}', +u'\u27a9': '{\\ding{233}}', +u'\u27aa': '{\\ding{234}}', +u'\u27ab': '{\\ding{235}}', +u'\u27ac': '{\\ding{236}}', +u'\u27ad': '{\\ding{237}}', +u'\u27ae': '{\\ding{238}}', +u'\u27af': '{\\ding{239}}', +u'\u27b1': '{\\ding{241}}', +u'\u27b2': '{\\ding{242}}', +u'\u27b3': '{\\ding{243}}', +u'\u27b4': '{\\ding{244}}', +u'\u27b5': '{\\ding{245}}', +u'\u27b6': '{\\ding{246}}', +u'\u27b7': '{\\ding{247}}', +u'\u27b8': '{\\ding{248}}', +u'\u27b9': '{\\ding{249}}', +u'\u27ba': '{\\ding{250}}', +u'\u27bb': '{\\ding{251}}', +u'\u27bc': '{\\ding{252}}', +u'\u27bd': '{\\ding{253}}', +u'\u27be': '{\\ding{254}}', +u'\u27f5': '$\\longleftarrow$', +u'\u27f6': '$\\longrightarrow$', +u'\u27f7': '$\\longleftrightarrow$', +u'\u27f8': '$\\Longleftarrow$', +u'\u27f9': '$\\Longrightarrow$', +u'\u27fa': '$\\Longleftrightarrow$', +u'\u27fc': '$\\longmapsto$', +u'\u27ff': '$\\sim\\joinrel\\leadsto$', +u'\u2905': '$\\ElsevierGlyph{E212}$', +u'\u2912': '$\\UpArrowBar$', +u'\u2913': '$\\DownArrowBar$', +u'\u2923': '$\\ElsevierGlyph{E20C}$', +u'\u2924': '$\\ElsevierGlyph{E20D}$', +u'\u2925': '$\\ElsevierGlyph{E20B}$', +u'\u2926': '$\\ElsevierGlyph{E20A}$', +u'\u2927': '$\\ElsevierGlyph{E211}$', +u'\u2928': '$\\ElsevierGlyph{E20E}$', +u'\u2929': '$\\ElsevierGlyph{E20F}$', +u'\u292a': '$\\ElsevierGlyph{E210}$', +u'\u2933': '$\\ElsevierGlyph{E21C}$', +u'\u2936': '$\\ElsevierGlyph{E21A}$', +u'\u2937': '$\\ElsevierGlyph{E219}$', +u'\u2940': '$\\Elolarr$', +u'\u2941': '$\\Elorarr$', +u'\u2942': '$\\ElzRlarr$', +u'\u2944': '$\\ElzrLarr$', +u'\u2947': '$\\Elzrarrx$', +u'\u294e': '$\\LeftRightVector$', +u'\u294f': '$\\RightUpDownVector$', +u'\u2950': '$\\DownLeftRightVector$', +u'\u2951': '$\\LeftUpDownVector$', +u'\u2952': '$\\LeftVectorBar$', +u'\u2953': '$\\RightVectorBar$', +u'\u2954': '$\\RightUpVectorBar$', +u'\u2955': '$\\RightDownVectorBar$', +u'\u2956': '$\\DownLeftVectorBar$', +u'\u2957': '$\\DownRightVectorBar$', +u'\u2958': '$\\LeftUpVectorBar$', +u'\u2959': '$\\LeftDownVectorBar$', +u'\u295a': '$\\LeftTeeVector$', +u'\u295b': '$\\RightTeeVector$', +u'\u295c': '$\\RightUpTeeVector$', +u'\u295d': '$\\RightDownTeeVector$', +u'\u295e': '$\\DownLeftTeeVector$', +u'\u295f': '$\\DownRightTeeVector$', +u'\u2960': '$\\LeftUpTeeVector$', +u'\u2961': '$\\LeftDownTeeVector$', +u'\u296e': '$\\UpEquilibrium$', +u'\u296f': '$\\ReverseUpEquilibrium$', +u'\u2970': '$\\RoundImplies$', +u'\u297c': '$\\ElsevierGlyph{E214}$', +u'\u297d': '$\\ElsevierGlyph{E215}$', +u'\u2980': '$\\Elztfnc$', +u'\u2985': '$\\ElsevierGlyph{3018}$', +u'\u2986': '$\\Elroang$', +u'\u2993': '$<\\kern-0.58em($', +u'\u2994': '$\\ElsevierGlyph{E291}$', +u'\u2999': '$\\Elzddfnc$', +u'\u299c': '$\\Angle$', +u'\u29a0': '$\\Elzlpargt$', +u'\u29b5': '$\\ElsevierGlyph{E260}$', +u'\u29b6': '$\\ElsevierGlyph{E61B}$', +u'\u29ca': '$\\ElzLap$', +u'\u29cb': '$\\Elzdefas$', +u'\u29cf': '$\\LeftTriangleBar$', +u'\u29d0': '$\\RightTriangleBar$', +u'\u29dc': '$\\ElsevierGlyph{E372}$', +u'\u29eb': '$\\blacklozenge$', +u'\u29f4': '$\\RuleDelayed$', +u'\u2a04': '$\\Elxuplus$', +u'\u2a05': '$\\ElzThr$', +u'\u2a06': '$\\Elxsqcup$', +u'\u2a07': '$\\ElzInf$', +u'\u2a08': '$\\ElzSup$', +u'\u2a0d': '$\\ElzCint$', +u'\u2a0f': '$\\clockoint$', +u'\u2a10': '$\\ElsevierGlyph{E395}$', +u'\u2a16': '$\\sqrint$', +u'\u2a25': '$\\ElsevierGlyph{E25A}$', +u'\u2a2a': '$\\ElsevierGlyph{E25B}$', +u'\u2a2d': '$\\ElsevierGlyph{E25C}$', +u'\u2a2e': '$\\ElsevierGlyph{E25D}$', +u'\u2a2f': '$\\ElzTimes$', +u'\u2a34': '$\\ElsevierGlyph{E25E}$', +u'\u2a35': '$\\ElsevierGlyph{E25E}$', +u'\u2a3c': '$\\ElsevierGlyph{E259}$', +u'\u2a3f': '$\\amalg$', +u'\u2a53': '$\\ElzAnd$', +u'\u2a54': '$\\ElzOr$', +u'\u2a55': '$\\ElsevierGlyph{E36E}$', +u'\u2a56': '$\\ElOr$', +u'\u2a5e': '$\\perspcorrespond$', +u'\u2a5f': '$\\Elzminhat$', +u'\u2a63': '$\\ElsevierGlyph{225A}$', +u'\u2a6e': '$\\stackrel{*}{=}$', +u'\u2a75': '$\\Equal$', +u'\u2a7d': '$\\leqslant$', +u'\u2a7e': '$\\geqslant$', +u'\u2a85': '$\\lessapprox$', +u'\u2a86': '$\\gtrapprox$', +u'\u2a87': '$\\lneq$', +u'\u2a88': '$\\gneq$', +u'\u2a89': '$\\lnapprox$', +u'\u2a8a': '$\\gnapprox$', +u'\u2a8b': '$\\lesseqqgtr$', +u'\u2a8c': '$\\gtreqqless$', +u'\u2a95': '$\\eqslantless$', +u'\u2a96': '$\\eqslantgtr$', +u'\u2a9d': '$\\Pisymbol{ppi020}{117}$', +u'\u2a9e': '$\\Pisymbol{ppi020}{105}$', +u'\u2aa1': '$\\NestedLessLess$', +u'\u2aa2': '$\\NestedGreaterGreater$', +u'\u2aaf': '$\\preceq$', +u'\u2ab0': '$\\succeq$', +u'\u2ab5': '$\\precneqq$', +u'\u2ab6': '$\\succneqq$', +u'\u2ab7': '$\\precapprox$', +u'\u2ab8': '$\\succapprox$', +u'\u2ab9': '$\\precnapprox$', +u'\u2aba': '$\\succnapprox$', +u'\u2ac5': '$\\subseteqq$', +u'\u2ac6': '$\\supseteqq$', +u'\u2acb': '$\\subsetneqq$', +u'\u2acc': '$\\supsetneqq$', +u'\u2aeb': '$\\ElsevierGlyph{E30D}$', +u'\u2af6': '$\\Elztdcol$', +u'\u2afd': '${{/}\\!\\!{/}}$', +u'\u300a': '$\\ElsevierGlyph{300A}$', +u'\u300b': '$\\ElsevierGlyph{300B}$', +u'\u3018': '$\\ElsevierGlyph{3018}$', +u'\u3019': '$\\ElsevierGlyph{3019}$', +u'\u301a': '$\\openbracketleft$', +u'\u301b': '$\\openbracketright$', +u'\ufb00': '{ff}', +u'\ufb01': '{fi}', +u'\ufb02': '{fl}', +u'\ufb03': '{ffi}', +u'\ufb04': '{ffl}', +u'\U0001d400': '$\\mathbf{A}$', +u'\U0001d401': '$\\mathbf{B}$', +u'\U0001d402': '$\\mathbf{C}$', +u'\U0001d403': '$\\mathbf{D}$', +u'\U0001d404': '$\\mathbf{E}$', +u'\U0001d405': '$\\mathbf{F}$', +u'\U0001d406': '$\\mathbf{G}$', +u'\U0001d407': '$\\mathbf{H}$', +u'\U0001d408': '$\\mathbf{I}$', +u'\U0001d409': '$\\mathbf{J}$', +u'\U0001d40a': '$\\mathbf{K}$', +u'\U0001d40b': '$\\mathbf{L}$', +u'\U0001d40c': '$\\mathbf{M}$', +u'\U0001d40d': '$\\mathbf{N}$', +u'\U0001d40e': '$\\mathbf{O}$', +u'\U0001d40f': '$\\mathbf{P}$', +u'\U0001d410': '$\\mathbf{Q}$', +u'\U0001d411': '$\\mathbf{R}$', +u'\U0001d412': '$\\mathbf{S}$', +u'\U0001d413': '$\\mathbf{T}$', +u'\U0001d414': '$\\mathbf{U}$', +u'\U0001d415': '$\\mathbf{V}$', +u'\U0001d416': '$\\mathbf{W}$', +u'\U0001d417': '$\\mathbf{X}$', +u'\U0001d418': '$\\mathbf{Y}$', +u'\U0001d419': '$\\mathbf{Z}$', +u'\U0001d41a': '$\\mathbf{a}$', +u'\U0001d41b': '$\\mathbf{b}$', +u'\U0001d41c': '$\\mathbf{c}$', +u'\U0001d41d': '$\\mathbf{d}$', +u'\U0001d41e': '$\\mathbf{e}$', +u'\U0001d41f': '$\\mathbf{f}$', +u'\U0001d420': '$\\mathbf{g}$', +u'\U0001d421': '$\\mathbf{h}$', +u'\U0001d422': '$\\mathbf{i}$', +u'\U0001d423': '$\\mathbf{j}$', +u'\U0001d424': '$\\mathbf{k}$', +u'\U0001d425': '$\\mathbf{l}$', +u'\U0001d426': '$\\mathbf{m}$', +u'\U0001d427': '$\\mathbf{n}$', +u'\U0001d428': '$\\mathbf{o}$', +u'\U0001d429': '$\\mathbf{p}$', +u'\U0001d42a': '$\\mathbf{q}$', +u'\U0001d42b': '$\\mathbf{r}$', +u'\U0001d42c': '$\\mathbf{s}$', +u'\U0001d42d': '$\\mathbf{t}$', +u'\U0001d42e': '$\\mathbf{u}$', +u'\U0001d42f': '$\\mathbf{v}$', +u'\U0001d430': '$\\mathbf{w}$', +u'\U0001d431': '$\\mathbf{x}$', +u'\U0001d432': '$\\mathbf{y}$', +u'\U0001d433': '$\\mathbf{z}$', +u'\U0001d434': '$\\mathsl{A}$', +u'\U0001d435': '$\\mathsl{B}$', +u'\U0001d436': '$\\mathsl{C}$', +u'\U0001d437': '$\\mathsl{D}$', +u'\U0001d438': '$\\mathsl{E}$', +u'\U0001d439': '$\\mathsl{F}$', +u'\U0001d43a': '$\\mathsl{G}$', +u'\U0001d43b': '$\\mathsl{H}$', +u'\U0001d43c': '$\\mathsl{I}$', +u'\U0001d43d': '$\\mathsl{J}$', +u'\U0001d43e': '$\\mathsl{K}$', +u'\U0001d43f': '$\\mathsl{L}$', +u'\U0001d440': '$\\mathsl{M}$', +u'\U0001d441': '$\\mathsl{N}$', +u'\U0001d442': '$\\mathsl{O}$', +u'\U0001d443': '$\\mathsl{P}$', +u'\U0001d444': '$\\mathsl{Q}$', +u'\U0001d445': '$\\mathsl{R}$', +u'\U0001d446': '$\\mathsl{S}$', +u'\U0001d447': '$\\mathsl{T}$', +u'\U0001d448': '$\\mathsl{U}$', +u'\U0001d449': '$\\mathsl{V}$', +u'\U0001d44a': '$\\mathsl{W}$', +u'\U0001d44b': '$\\mathsl{X}$', +u'\U0001d44c': '$\\mathsl{Y}$', +u'\U0001d44d': '$\\mathsl{Z}$', +u'\U0001d44e': '$\\mathsl{a}$', +u'\U0001d44f': '$\\mathsl{b}$', +u'\U0001d450': '$\\mathsl{c}$', +u'\U0001d451': '$\\mathsl{d}$', +u'\U0001d452': '$\\mathsl{e}$', +u'\U0001d453': '$\\mathsl{f}$', +u'\U0001d454': '$\\mathsl{g}$', +u'\U0001d456': '$\\mathsl{i}$', +u'\U0001d457': '$\\mathsl{j}$', +u'\U0001d458': '$\\mathsl{k}$', +u'\U0001d459': '$\\mathsl{l}$', +u'\U0001d45a': '$\\mathsl{m}$', +u'\U0001d45b': '$\\mathsl{n}$', +u'\U0001d45c': '$\\mathsl{o}$', +u'\U0001d45d': '$\\mathsl{p}$', +u'\U0001d45e': '$\\mathsl{q}$', +u'\U0001d45f': '$\\mathsl{r}$', +u'\U0001d460': '$\\mathsl{s}$', +u'\U0001d461': '$\\mathsl{t}$', +u'\U0001d462': '$\\mathsl{u}$', +u'\U0001d463': '$\\mathsl{v}$', +u'\U0001d464': '$\\mathsl{w}$', +u'\U0001d465': '$\\mathsl{x}$', +u'\U0001d466': '$\\mathsl{y}$', +u'\U0001d467': '$\\mathsl{z}$', +u'\U0001d468': '$\\mathbit{A}$', +u'\U0001d469': '$\\mathbit{B}$', +u'\U0001d46a': '$\\mathbit{C}$', +u'\U0001d46b': '$\\mathbit{D}$', +u'\U0001d46c': '$\\mathbit{E}$', +u'\U0001d46d': '$\\mathbit{F}$', +u'\U0001d46e': '$\\mathbit{G}$', +u'\U0001d46f': '$\\mathbit{H}$', +u'\U0001d470': '$\\mathbit{I}$', +u'\U0001d471': '$\\mathbit{J}$', +u'\U0001d472': '$\\mathbit{K}$', +u'\U0001d473': '$\\mathbit{L}$', +u'\U0001d474': '$\\mathbit{M}$', +u'\U0001d475': '$\\mathbit{N}$', +u'\U0001d476': '$\\mathbit{O}$', +u'\U0001d477': '$\\mathbit{P}$', +u'\U0001d478': '$\\mathbit{Q}$', +u'\U0001d479': '$\\mathbit{R}$', +u'\U0001d47a': '$\\mathbit{S}$', +u'\U0001d47b': '$\\mathbit{T}$', +u'\U0001d47c': '$\\mathbit{U}$', +u'\U0001d47d': '$\\mathbit{V}$', +u'\U0001d47e': '$\\mathbit{W}$', +u'\U0001d47f': '$\\mathbit{X}$', +u'\U0001d480': '$\\mathbit{Y}$', +u'\U0001d481': '$\\mathbit{Z}$', +u'\U0001d482': '$\\mathbit{a}$', +u'\U0001d483': '$\\mathbit{b}$', +u'\U0001d484': '$\\mathbit{c}$', +u'\U0001d485': '$\\mathbit{d}$', +u'\U0001d486': '$\\mathbit{e}$', +u'\U0001d487': '$\\mathbit{f}$', +u'\U0001d488': '$\\mathbit{g}$', +u'\U0001d489': '$\\mathbit{h}$', +u'\U0001d48a': '$\\mathbit{i}$', +u'\U0001d48b': '$\\mathbit{j}$', +u'\U0001d48c': '$\\mathbit{k}$', +u'\U0001d48d': '$\\mathbit{l}$', +u'\U0001d48e': '$\\mathbit{m}$', +u'\U0001d48f': '$\\mathbit{n}$', +u'\U0001d490': '$\\mathbit{o}$', +u'\U0001d491': '$\\mathbit{p}$', +u'\U0001d492': '$\\mathbit{q}$', +u'\U0001d493': '$\\mathbit{r}$', +u'\U0001d494': '$\\mathbit{s}$', +u'\U0001d495': '$\\mathbit{t}$', +u'\U0001d496': '$\\mathbit{u}$', +u'\U0001d497': '$\\mathbit{v}$', +u'\U0001d498': '$\\mathbit{w}$', +u'\U0001d499': '$\\mathbit{x}$', +u'\U0001d49a': '$\\mathbit{y}$', +u'\U0001d49b': '$\\mathbit{z}$', +u'\U0001d49c': '$\\mathscr{A}$', +u'\U0001d49e': '$\\mathscr{C}$', +u'\U0001d49f': '$\\mathscr{D}$', +u'\U0001d4a2': '$\\mathscr{G}$', +u'\U0001d4a5': '$\\mathscr{J}$', +u'\U0001d4a6': '$\\mathscr{K}$', +u'\U0001d4a9': '$\\mathscr{N}$', +u'\U0001d4aa': '$\\mathscr{O}$', +u'\U0001d4ab': '$\\mathscr{P}$', +u'\U0001d4ac': '$\\mathscr{Q}$', +u'\U0001d4ae': '$\\mathscr{S}$', +u'\U0001d4af': '$\\mathscr{T}$', +u'\U0001d4b0': '$\\mathscr{U}$', +u'\U0001d4b1': '$\\mathscr{V}$', +u'\U0001d4b2': '$\\mathscr{W}$', +u'\U0001d4b3': '$\\mathscr{X}$', +u'\U0001d4b4': '$\\mathscr{Y}$', +u'\U0001d4b5': '$\\mathscr{Z}$', +u'\U0001d4b6': '$\\mathscr{a}$', +u'\U0001d4b7': '$\\mathscr{b}$', +u'\U0001d4b8': '$\\mathscr{c}$', +u'\U0001d4b9': '$\\mathscr{d}$', +u'\U0001d4bb': '$\\mathscr{f}$', +u'\U0001d4bd': '$\\mathscr{h}$', +u'\U0001d4be': '$\\mathscr{i}$', +u'\U0001d4bf': '$\\mathscr{j}$', +u'\U0001d4c0': '$\\mathscr{k}$', +u'\U0001d4c1': '$\\mathscr{l}$', +u'\U0001d4c2': '$\\mathscr{m}$', +u'\U0001d4c3': '$\\mathscr{n}$', +u'\U0001d4c5': '$\\mathscr{p}$', +u'\U0001d4c6': '$\\mathscr{q}$', +u'\U0001d4c7': '$\\mathscr{r}$', +u'\U0001d4c8': '$\\mathscr{s}$', +u'\U0001d4c9': '$\\mathscr{t}$', +u'\U0001d4ca': '$\\mathscr{u}$', +u'\U0001d4cb': '$\\mathscr{v}$', +u'\U0001d4cc': '$\\mathscr{w}$', +u'\U0001d4cd': '$\\mathscr{x}$', +u'\U0001d4ce': '$\\mathscr{y}$', +u'\U0001d4cf': '$\\mathscr{z}$', +u'\U0001d4d0': '$\\mathmit{A}$', +u'\U0001d4d1': '$\\mathmit{B}$', +u'\U0001d4d2': '$\\mathmit{C}$', +u'\U0001d4d3': '$\\mathmit{D}$', +u'\U0001d4d4': '$\\mathmit{E}$', +u'\U0001d4d5': '$\\mathmit{F}$', +u'\U0001d4d6': '$\\mathmit{G}$', +u'\U0001d4d7': '$\\mathmit{H}$', +u'\U0001d4d8': '$\\mathmit{I}$', +u'\U0001d4d9': '$\\mathmit{J}$', +u'\U0001d4da': '$\\mathmit{K}$', +u'\U0001d4db': '$\\mathmit{L}$', +u'\U0001d4dc': '$\\mathmit{M}$', +u'\U0001d4dd': '$\\mathmit{N}$', +u'\U0001d4de': '$\\mathmit{O}$', +u'\U0001d4df': '$\\mathmit{P}$', +u'\U0001d4e0': '$\\mathmit{Q}$', +u'\U0001d4e1': '$\\mathmit{R}$', +u'\U0001d4e2': '$\\mathmit{S}$', +u'\U0001d4e3': '$\\mathmit{T}$', +u'\U0001d4e4': '$\\mathmit{U}$', +u'\U0001d4e5': '$\\mathmit{V}$', +u'\U0001d4e6': '$\\mathmit{W}$', +u'\U0001d4e7': '$\\mathmit{X}$', +u'\U0001d4e8': '$\\mathmit{Y}$', +u'\U0001d4e9': '$\\mathmit{Z}$', +u'\U0001d4ea': '$\\mathmit{a}$', +u'\U0001d4eb': '$\\mathmit{b}$', +u'\U0001d4ec': '$\\mathmit{c}$', +u'\U0001d4ed': '$\\mathmit{d}$', +u'\U0001d4ee': '$\\mathmit{e}$', +u'\U0001d4ef': '$\\mathmit{f}$', +u'\U0001d4f0': '$\\mathmit{g}$', +u'\U0001d4f1': '$\\mathmit{h}$', +u'\U0001d4f2': '$\\mathmit{i}$', +u'\U0001d4f3': '$\\mathmit{j}$', +u'\U0001d4f4': '$\\mathmit{k}$', +u'\U0001d4f5': '$\\mathmit{l}$', +u'\U0001d4f6': '$\\mathmit{m}$', +u'\U0001d4f7': '$\\mathmit{n}$', +u'\U0001d4f8': '$\\mathmit{o}$', +u'\U0001d4f9': '$\\mathmit{p}$', +u'\U0001d4fa': '$\\mathmit{q}$', +u'\U0001d4fb': '$\\mathmit{r}$', +u'\U0001d4fc': '$\\mathmit{s}$', +u'\U0001d4fd': '$\\mathmit{t}$', +u'\U0001d4fe': '$\\mathmit{u}$', +u'\U0001d4ff': '$\\mathmit{v}$', +u'\U0001d500': '$\\mathmit{w}$', +u'\U0001d501': '$\\mathmit{x}$', +u'\U0001d502': '$\\mathmit{y}$', +u'\U0001d503': '$\\mathmit{z}$', +u'\U0001d504': '$\\mathfrak{A}$', +u'\U0001d505': '$\\mathfrak{B}$', +u'\U0001d507': '$\\mathfrak{D}$', +u'\U0001d508': '$\\mathfrak{E}$', +u'\U0001d509': '$\\mathfrak{F}$', +u'\U0001d50a': '$\\mathfrak{G}$', +u'\U0001d50d': '$\\mathfrak{J}$', +u'\U0001d50e': '$\\mathfrak{K}$', +u'\U0001d50f': '$\\mathfrak{L}$', +u'\U0001d510': '$\\mathfrak{M}$', +u'\U0001d511': '$\\mathfrak{N}$', +u'\U0001d512': '$\\mathfrak{O}$', +u'\U0001d513': '$\\mathfrak{P}$', +u'\U0001d514': '$\\mathfrak{Q}$', +u'\U0001d516': '$\\mathfrak{S}$', +u'\U0001d517': '$\\mathfrak{T}$', +u'\U0001d518': '$\\mathfrak{U}$', +u'\U0001d519': '$\\mathfrak{V}$', +u'\U0001d51a': '$\\mathfrak{W}$', +u'\U0001d51b': '$\\mathfrak{X}$', +u'\U0001d51c': '$\\mathfrak{Y}$', +u'\U0001d51e': '$\\mathfrak{a}$', +u'\U0001d51f': '$\\mathfrak{b}$', +u'\U0001d520': '$\\mathfrak{c}$', +u'\U0001d521': '$\\mathfrak{d}$', +u'\U0001d522': '$\\mathfrak{e}$', +u'\U0001d523': '$\\mathfrak{f}$', +u'\U0001d524': '$\\mathfrak{g}$', +u'\U0001d525': '$\\mathfrak{h}$', +u'\U0001d526': '$\\mathfrak{i}$', +u'\U0001d527': '$\\mathfrak{j}$', +u'\U0001d528': '$\\mathfrak{k}$', +u'\U0001d529': '$\\mathfrak{l}$', +u'\U0001d52a': '$\\mathfrak{m}$', +u'\U0001d52b': '$\\mathfrak{n}$', +u'\U0001d52c': '$\\mathfrak{o}$', +u'\U0001d52d': '$\\mathfrak{p}$', +u'\U0001d52e': '$\\mathfrak{q}$', +u'\U0001d52f': '$\\mathfrak{r}$', +u'\U0001d530': '$\\mathfrak{s}$', +u'\U0001d531': '$\\mathfrak{t}$', +u'\U0001d532': '$\\mathfrak{u}$', +u'\U0001d533': '$\\mathfrak{v}$', +u'\U0001d534': '$\\mathfrak{w}$', +u'\U0001d535': '$\\mathfrak{x}$', +u'\U0001d536': '$\\mathfrak{y}$', +u'\U0001d537': '$\\mathfrak{z}$', +u'\U0001d538': '$\\mathbb{A}$', +u'\U0001d539': '$\\mathbb{B}$', +u'\U0001d53b': '$\\mathbb{D}$', +u'\U0001d53c': '$\\mathbb{E}$', +u'\U0001d53d': '$\\mathbb{F}$', +u'\U0001d53e': '$\\mathbb{G}$', +u'\U0001d540': '$\\mathbb{I}$', +u'\U0001d541': '$\\mathbb{J}$', +u'\U0001d542': '$\\mathbb{K}$', +u'\U0001d543': '$\\mathbb{L}$', +u'\U0001d544': '$\\mathbb{M}$', +u'\U0001d546': '$\\mathbb{O}$', +u'\U0001d54a': '$\\mathbb{S}$', +u'\U0001d54b': '$\\mathbb{T}$', +u'\U0001d54c': '$\\mathbb{U}$', +u'\U0001d54d': '$\\mathbb{V}$', +u'\U0001d54e': '$\\mathbb{W}$', +u'\U0001d54f': '$\\mathbb{X}$', +u'\U0001d550': '$\\mathbb{Y}$', +u'\U0001d552': '$\\mathbb{a}$', +u'\U0001d553': '$\\mathbb{b}$', +u'\U0001d554': '$\\mathbb{c}$', +u'\U0001d555': '$\\mathbb{d}$', +u'\U0001d556': '$\\mathbb{e}$', +u'\U0001d557': '$\\mathbb{f}$', +u'\U0001d558': '$\\mathbb{g}$', +u'\U0001d559': '$\\mathbb{h}$', +u'\U0001d55a': '$\\mathbb{i}$', +u'\U0001d55b': '$\\mathbb{j}$', +u'\U0001d55c': '$\\mathbb{k}$', +u'\U0001d55d': '$\\mathbb{l}$', +u'\U0001d55e': '$\\mathbb{m}$', +u'\U0001d55f': '$\\mathbb{n}$', +u'\U0001d560': '$\\mathbb{o}$', +u'\U0001d561': '$\\mathbb{p}$', +u'\U0001d562': '$\\mathbb{q}$', +u'\U0001d563': '$\\mathbb{r}$', +u'\U0001d564': '$\\mathbb{s}$', +u'\U0001d565': '$\\mathbb{t}$', +u'\U0001d566': '$\\mathbb{u}$', +u'\U0001d567': '$\\mathbb{v}$', +u'\U0001d568': '$\\mathbb{w}$', +u'\U0001d569': '$\\mathbb{x}$', +u'\U0001d56a': '$\\mathbb{y}$', +u'\U0001d56b': '$\\mathbb{z}$', +u'\U0001d56c': '$\\mathslbb{A}$', +u'\U0001d56d': '$\\mathslbb{B}$', +u'\U0001d56e': '$\\mathslbb{C}$', +u'\U0001d56f': '$\\mathslbb{D}$', +u'\U0001d570': '$\\mathslbb{E}$', +u'\U0001d571': '$\\mathslbb{F}$', +u'\U0001d572': '$\\mathslbb{G}$', +u'\U0001d573': '$\\mathslbb{H}$', +u'\U0001d574': '$\\mathslbb{I}$', +u'\U0001d575': '$\\mathslbb{J}$', +u'\U0001d576': '$\\mathslbb{K}$', +u'\U0001d577': '$\\mathslbb{L}$', +u'\U0001d578': '$\\mathslbb{M}$', +u'\U0001d579': '$\\mathslbb{N}$', +u'\U0001d57a': '$\\mathslbb{O}$', +u'\U0001d57b': '$\\mathslbb{P}$', +u'\U0001d57c': '$\\mathslbb{Q}$', +u'\U0001d57d': '$\\mathslbb{R}$', +u'\U0001d57e': '$\\mathslbb{S}$', +u'\U0001d57f': '$\\mathslbb{T}$', +u'\U0001d580': '$\\mathslbb{U}$', +u'\U0001d581': '$\\mathslbb{V}$', +u'\U0001d582': '$\\mathslbb{W}$', +u'\U0001d583': '$\\mathslbb{X}$', +u'\U0001d584': '$\\mathslbb{Y}$', +u'\U0001d585': '$\\mathslbb{Z}$', +u'\U0001d586': '$\\mathslbb{a}$', +u'\U0001d587': '$\\mathslbb{b}$', +u'\U0001d588': '$\\mathslbb{c}$', +u'\U0001d589': '$\\mathslbb{d}$', +u'\U0001d58a': '$\\mathslbb{e}$', +u'\U0001d58b': '$\\mathslbb{f}$', +u'\U0001d58c': '$\\mathslbb{g}$', +u'\U0001d58d': '$\\mathslbb{h}$', +u'\U0001d58e': '$\\mathslbb{i}$', +u'\U0001d58f': '$\\mathslbb{j}$', +u'\U0001d590': '$\\mathslbb{k}$', +u'\U0001d591': '$\\mathslbb{l}$', +u'\U0001d592': '$\\mathslbb{m}$', +u'\U0001d593': '$\\mathslbb{n}$', +u'\U0001d594': '$\\mathslbb{o}$', +u'\U0001d595': '$\\mathslbb{p}$', +u'\U0001d596': '$\\mathslbb{q}$', +u'\U0001d597': '$\\mathslbb{r}$', +u'\U0001d598': '$\\mathslbb{s}$', +u'\U0001d599': '$\\mathslbb{t}$', +u'\U0001d59a': '$\\mathslbb{u}$', +u'\U0001d59b': '$\\mathslbb{v}$', +u'\U0001d59c': '$\\mathslbb{w}$', +u'\U0001d59d': '$\\mathslbb{x}$', +u'\U0001d59e': '$\\mathslbb{y}$', +u'\U0001d59f': '$\\mathslbb{z}$', +u'\U0001d5a0': '$\\mathsf{A}$', +u'\U0001d5a1': '$\\mathsf{B}$', +u'\U0001d5a2': '$\\mathsf{C}$', +u'\U0001d5a3': '$\\mathsf{D}$', +u'\U0001d5a4': '$\\mathsf{E}$', +u'\U0001d5a5': '$\\mathsf{F}$', +u'\U0001d5a6': '$\\mathsf{G}$', +u'\U0001d5a7': '$\\mathsf{H}$', +u'\U0001d5a8': '$\\mathsf{I}$', +u'\U0001d5a9': '$\\mathsf{J}$', +u'\U0001d5aa': '$\\mathsf{K}$', +u'\U0001d5ab': '$\\mathsf{L}$', +u'\U0001d5ac': '$\\mathsf{M}$', +u'\U0001d5ad': '$\\mathsf{N}$', +u'\U0001d5ae': '$\\mathsf{O}$', +u'\U0001d5af': '$\\mathsf{P}$', +u'\U0001d5b0': '$\\mathsf{Q}$', +u'\U0001d5b1': '$\\mathsf{R}$', +u'\U0001d5b2': '$\\mathsf{S}$', +u'\U0001d5b3': '$\\mathsf{T}$', +u'\U0001d5b4': '$\\mathsf{U}$', +u'\U0001d5b5': '$\\mathsf{V}$', +u'\U0001d5b6': '$\\mathsf{W}$', +u'\U0001d5b7': '$\\mathsf{X}$', +u'\U0001d5b8': '$\\mathsf{Y}$', +u'\U0001d5b9': '$\\mathsf{Z}$', +u'\U0001d5ba': '$\\mathsf{a}$', +u'\U0001d5bb': '$\\mathsf{b}$', +u'\U0001d5bc': '$\\mathsf{c}$', +u'\U0001d5bd': '$\\mathsf{d}$', +u'\U0001d5be': '$\\mathsf{e}$', +u'\U0001d5bf': '$\\mathsf{f}$', +u'\U0001d5c0': '$\\mathsf{g}$', +u'\U0001d5c1': '$\\mathsf{h}$', +u'\U0001d5c2': '$\\mathsf{i}$', +u'\U0001d5c3': '$\\mathsf{j}$', +u'\U0001d5c4': '$\\mathsf{k}$', +u'\U0001d5c5': '$\\mathsf{l}$', +u'\U0001d5c6': '$\\mathsf{m}$', +u'\U0001d5c7': '$\\mathsf{n}$', +u'\U0001d5c8': '$\\mathsf{o}$', +u'\U0001d5c9': '$\\mathsf{p}$', +u'\U0001d5ca': '$\\mathsf{q}$', +u'\U0001d5cb': '$\\mathsf{r}$', +u'\U0001d5cc': '$\\mathsf{s}$', +u'\U0001d5cd': '$\\mathsf{t}$', +u'\U0001d5ce': '$\\mathsf{u}$', +u'\U0001d5cf': '$\\mathsf{v}$', +u'\U0001d5d0': '$\\mathsf{w}$', +u'\U0001d5d1': '$\\mathsf{x}$', +u'\U0001d5d2': '$\\mathsf{y}$', +u'\U0001d5d3': '$\\mathsf{z}$', +u'\U0001d5d4': '$\\mathsfbf{A}$', +u'\U0001d5d5': '$\\mathsfbf{B}$', +u'\U0001d5d6': '$\\mathsfbf{C}$', +u'\U0001d5d7': '$\\mathsfbf{D}$', +u'\U0001d5d8': '$\\mathsfbf{E}$', +u'\U0001d5d9': '$\\mathsfbf{F}$', +u'\U0001d5da': '$\\mathsfbf{G}$', +u'\U0001d5db': '$\\mathsfbf{H}$', +u'\U0001d5dc': '$\\mathsfbf{I}$', +u'\U0001d5dd': '$\\mathsfbf{J}$', +u'\U0001d5de': '$\\mathsfbf{K}$', +u'\U0001d5df': '$\\mathsfbf{L}$', +u'\U0001d5e0': '$\\mathsfbf{M}$', +u'\U0001d5e1': '$\\mathsfbf{N}$', +u'\U0001d5e2': '$\\mathsfbf{O}$', +u'\U0001d5e3': '$\\mathsfbf{P}$', +u'\U0001d5e4': '$\\mathsfbf{Q}$', +u'\U0001d5e5': '$\\mathsfbf{R}$', +u'\U0001d5e6': '$\\mathsfbf{S}$', +u'\U0001d5e7': '$\\mathsfbf{T}$', +u'\U0001d5e8': '$\\mathsfbf{U}$', +u'\U0001d5e9': '$\\mathsfbf{V}$', +u'\U0001d5ea': '$\\mathsfbf{W}$', +u'\U0001d5eb': '$\\mathsfbf{X}$', +u'\U0001d5ec': '$\\mathsfbf{Y}$', +u'\U0001d5ed': '$\\mathsfbf{Z}$', +u'\U0001d5ee': '$\\mathsfbf{a}$', +u'\U0001d5ef': '$\\mathsfbf{b}$', +u'\U0001d5f0': '$\\mathsfbf{c}$', +u'\U0001d5f1': '$\\mathsfbf{d}$', +u'\U0001d5f2': '$\\mathsfbf{e}$', +u'\U0001d5f3': '$\\mathsfbf{f}$', +u'\U0001d5f4': '$\\mathsfbf{g}$', +u'\U0001d5f5': '$\\mathsfbf{h}$', +u'\U0001d5f6': '$\\mathsfbf{i}$', +u'\U0001d5f7': '$\\mathsfbf{j}$', +u'\U0001d5f8': '$\\mathsfbf{k}$', +u'\U0001d5f9': '$\\mathsfbf{l}$', +u'\U0001d5fa': '$\\mathsfbf{m}$', +u'\U0001d5fb': '$\\mathsfbf{n}$', +u'\U0001d5fc': '$\\mathsfbf{o}$', +u'\U0001d5fd': '$\\mathsfbf{p}$', +u'\U0001d5fe': '$\\mathsfbf{q}$', +u'\U0001d5ff': '$\\mathsfbf{r}$', +u'\U0001d600': '$\\mathsfbf{s}$', +u'\U0001d601': '$\\mathsfbf{t}$', +u'\U0001d602': '$\\mathsfbf{u}$', +u'\U0001d603': '$\\mathsfbf{v}$', +u'\U0001d604': '$\\mathsfbf{w}$', +u'\U0001d605': '$\\mathsfbf{x}$', +u'\U0001d606': '$\\mathsfbf{y}$', +u'\U0001d607': '$\\mathsfbf{z}$', +u'\U0001d608': '$\\mathsfsl{A}$', +u'\U0001d609': '$\\mathsfsl{B}$', +u'\U0001d60a': '$\\mathsfsl{C}$', +u'\U0001d60b': '$\\mathsfsl{D}$', +u'\U0001d60c': '$\\mathsfsl{E}$', +u'\U0001d60d': '$\\mathsfsl{F}$', +u'\U0001d60e': '$\\mathsfsl{G}$', +u'\U0001d60f': '$\\mathsfsl{H}$', +u'\U0001d610': '$\\mathsfsl{I}$', +u'\U0001d611': '$\\mathsfsl{J}$', +u'\U0001d612': '$\\mathsfsl{K}$', +u'\U0001d613': '$\\mathsfsl{L}$', +u'\U0001d614': '$\\mathsfsl{M}$', +u'\U0001d615': '$\\mathsfsl{N}$', +u'\U0001d616': '$\\mathsfsl{O}$', +u'\U0001d617': '$\\mathsfsl{P}$', +u'\U0001d618': '$\\mathsfsl{Q}$', +u'\U0001d619': '$\\mathsfsl{R}$', +u'\U0001d61a': '$\\mathsfsl{S}$', +u'\U0001d61b': '$\\mathsfsl{T}$', +u'\U0001d61c': '$\\mathsfsl{U}$', +u'\U0001d61d': '$\\mathsfsl{V}$', +u'\U0001d61e': '$\\mathsfsl{W}$', +u'\U0001d61f': '$\\mathsfsl{X}$', +u'\U0001d620': '$\\mathsfsl{Y}$', +u'\U0001d621': '$\\mathsfsl{Z}$', +u'\U0001d622': '$\\mathsfsl{a}$', +u'\U0001d623': '$\\mathsfsl{b}$', +u'\U0001d624': '$\\mathsfsl{c}$', +u'\U0001d625': '$\\mathsfsl{d}$', +u'\U0001d626': '$\\mathsfsl{e}$', +u'\U0001d627': '$\\mathsfsl{f}$', +u'\U0001d628': '$\\mathsfsl{g}$', +u'\U0001d629': '$\\mathsfsl{h}$', +u'\U0001d62a': '$\\mathsfsl{i}$', +u'\U0001d62b': '$\\mathsfsl{j}$', +u'\U0001d62c': '$\\mathsfsl{k}$', +u'\U0001d62d': '$\\mathsfsl{l}$', +u'\U0001d62e': '$\\mathsfsl{m}$', +u'\U0001d62f': '$\\mathsfsl{n}$', +u'\U0001d630': '$\\mathsfsl{o}$', +u'\U0001d631': '$\\mathsfsl{p}$', +u'\U0001d632': '$\\mathsfsl{q}$', +u'\U0001d633': '$\\mathsfsl{r}$', +u'\U0001d634': '$\\mathsfsl{s}$', +u'\U0001d635': '$\\mathsfsl{t}$', +u'\U0001d636': '$\\mathsfsl{u}$', +u'\U0001d637': '$\\mathsfsl{v}$', +u'\U0001d638': '$\\mathsfsl{w}$', +u'\U0001d639': '$\\mathsfsl{x}$', +u'\U0001d63a': '$\\mathsfsl{y}$', +u'\U0001d63b': '$\\mathsfsl{z}$', +u'\U0001d63c': '$\\mathsfbfsl{A}$', +u'\U0001d63d': '$\\mathsfbfsl{B}$', +u'\U0001d63e': '$\\mathsfbfsl{C}$', +u'\U0001d63f': '$\\mathsfbfsl{D}$', +u'\U0001d640': '$\\mathsfbfsl{E}$', +u'\U0001d641': '$\\mathsfbfsl{F}$', +u'\U0001d642': '$\\mathsfbfsl{G}$', +u'\U0001d643': '$\\mathsfbfsl{H}$', +u'\U0001d644': '$\\mathsfbfsl{I}$', +u'\U0001d645': '$\\mathsfbfsl{J}$', +u'\U0001d646': '$\\mathsfbfsl{K}$', +u'\U0001d647': '$\\mathsfbfsl{L}$', +u'\U0001d648': '$\\mathsfbfsl{M}$', +u'\U0001d649': '$\\mathsfbfsl{N}$', +u'\U0001d64a': '$\\mathsfbfsl{O}$', +u'\U0001d64b': '$\\mathsfbfsl{P}$', +u'\U0001d64c': '$\\mathsfbfsl{Q}$', +u'\U0001d64d': '$\\mathsfbfsl{R}$', +u'\U0001d64e': '$\\mathsfbfsl{S}$', +u'\U0001d64f': '$\\mathsfbfsl{T}$', +u'\U0001d650': '$\\mathsfbfsl{U}$', +u'\U0001d651': '$\\mathsfbfsl{V}$', +u'\U0001d652': '$\\mathsfbfsl{W}$', +u'\U0001d653': '$\\mathsfbfsl{X}$', +u'\U0001d654': '$\\mathsfbfsl{Y}$', +u'\U0001d655': '$\\mathsfbfsl{Z}$', +u'\U0001d656': '$\\mathsfbfsl{a}$', +u'\U0001d657': '$\\mathsfbfsl{b}$', +u'\U0001d658': '$\\mathsfbfsl{c}$', +u'\U0001d659': '$\\mathsfbfsl{d}$', +u'\U0001d65a': '$\\mathsfbfsl{e}$', +u'\U0001d65b': '$\\mathsfbfsl{f}$', +u'\U0001d65c': '$\\mathsfbfsl{g}$', +u'\U0001d65d': '$\\mathsfbfsl{h}$', +u'\U0001d65e': '$\\mathsfbfsl{i}$', +u'\U0001d65f': '$\\mathsfbfsl{j}$', +u'\U0001d660': '$\\mathsfbfsl{k}$', +u'\U0001d661': '$\\mathsfbfsl{l}$', +u'\U0001d662': '$\\mathsfbfsl{m}$', +u'\U0001d663': '$\\mathsfbfsl{n}$', +u'\U0001d664': '$\\mathsfbfsl{o}$', +u'\U0001d665': '$\\mathsfbfsl{p}$', +u'\U0001d666': '$\\mathsfbfsl{q}$', +u'\U0001d667': '$\\mathsfbfsl{r}$', +u'\U0001d668': '$\\mathsfbfsl{s}$', +u'\U0001d669': '$\\mathsfbfsl{t}$', +u'\U0001d66a': '$\\mathsfbfsl{u}$', +u'\U0001d66b': '$\\mathsfbfsl{v}$', +u'\U0001d66c': '$\\mathsfbfsl{w}$', +u'\U0001d66d': '$\\mathsfbfsl{x}$', +u'\U0001d66e': '$\\mathsfbfsl{y}$', +u'\U0001d66f': '$\\mathsfbfsl{z}$', +u'\U0001d670': '$\\mathtt{A}$', +u'\U0001d671': '$\\mathtt{B}$', +u'\U0001d672': '$\\mathtt{C}$', +u'\U0001d673': '$\\mathtt{D}$', +u'\U0001d674': '$\\mathtt{E}$', +u'\U0001d675': '$\\mathtt{F}$', +u'\U0001d676': '$\\mathtt{G}$', +u'\U0001d677': '$\\mathtt{H}$', +u'\U0001d678': '$\\mathtt{I}$', +u'\U0001d679': '$\\mathtt{J}$', +u'\U0001d67a': '$\\mathtt{K}$', +u'\U0001d67b': '$\\mathtt{L}$', +u'\U0001d67c': '$\\mathtt{M}$', +u'\U0001d67d': '$\\mathtt{N}$', +u'\U0001d67e': '$\\mathtt{O}$', +u'\U0001d67f': '$\\mathtt{P}$', +u'\U0001d680': '$\\mathtt{Q}$', +u'\U0001d681': '$\\mathtt{R}$', +u'\U0001d682': '$\\mathtt{S}$', +u'\U0001d683': '$\\mathtt{T}$', +u'\U0001d684': '$\\mathtt{U}$', +u'\U0001d685': '$\\mathtt{V}$', +u'\U0001d686': '$\\mathtt{W}$', +u'\U0001d687': '$\\mathtt{X}$', +u'\U0001d688': '$\\mathtt{Y}$', +u'\U0001d689': '$\\mathtt{Z}$', +u'\U0001d68a': '$\\mathtt{a}$', +u'\U0001d68b': '$\\mathtt{b}$', +u'\U0001d68c': '$\\mathtt{c}$', +u'\U0001d68d': '$\\mathtt{d}$', +u'\U0001d68e': '$\\mathtt{e}$', +u'\U0001d68f': '$\\mathtt{f}$', +u'\U0001d690': '$\\mathtt{g}$', +u'\U0001d691': '$\\mathtt{h}$', +u'\U0001d692': '$\\mathtt{i}$', +u'\U0001d693': '$\\mathtt{j}$', +u'\U0001d694': '$\\mathtt{k}$', +u'\U0001d695': '$\\mathtt{l}$', +u'\U0001d696': '$\\mathtt{m}$', +u'\U0001d697': '$\\mathtt{n}$', +u'\U0001d698': '$\\mathtt{o}$', +u'\U0001d699': '$\\mathtt{p}$', +u'\U0001d69a': '$\\mathtt{q}$', +u'\U0001d69b': '$\\mathtt{r}$', +u'\U0001d69c': '$\\mathtt{s}$', +u'\U0001d69d': '$\\mathtt{t}$', +u'\U0001d69e': '$\\mathtt{u}$', +u'\U0001d69f': '$\\mathtt{v}$', +u'\U0001d6a0': '$\\mathtt{w}$', +u'\U0001d6a1': '$\\mathtt{x}$', +u'\U0001d6a2': '$\\mathtt{y}$', +u'\U0001d6a3': '$\\mathtt{z}$', +u'\U0001d6a8': '$\\mathbf{\\Alpha}$', +u'\U0001d6a9': '$\\mathbf{\\Beta}$', +u'\U0001d6aa': '$\\mathbf{\\Gamma}$', +u'\U0001d6ab': '$\\mathbf{\\Delta}$', +u'\U0001d6ac': '$\\mathbf{\\Epsilon}$', +u'\U0001d6ad': '$\\mathbf{\\Zeta}$', +u'\U0001d6ae': '$\\mathbf{\\Eta}$', +u'\U0001d6af': '$\\mathbf{\\Theta}$', +u'\U0001d6b0': '$\\mathbf{\\Iota}$', +u'\U0001d6b1': '$\\mathbf{\\Kappa}$', +u'\U0001d6b2': '$\\mathbf{\\Lambda}$', +u'\U0001d6b3': '$M$', +u'\U0001d6b4': '$N$', +u'\U0001d6b5': '$\\mathbf{\\Xi}$', +u'\U0001d6b6': '$O$', +u'\U0001d6b7': '$\\mathbf{\\Pi}$', +u'\U0001d6b8': '$\\mathbf{\\Rho}$', +u'\U0001d6b9': '{\\mathbf{\\vartheta}}', +u'\U0001d6ba': '$\\mathbf{\\Sigma}$', +u'\U0001d6bb': '$\\mathbf{\\Tau}$', +u'\U0001d6bc': '$\\mathbf{\\Upsilon}$', +u'\U0001d6bd': '$\\mathbf{\\Phi}$', +u'\U0001d6be': '$\\mathbf{\\Chi}$', +u'\U0001d6bf': '$\\mathbf{\\Psi}$', +u'\U0001d6c0': '$\\mathbf{\\Omega}$', +u'\U0001d6c1': '$\\mathbf{\\nabla}$', +u'\U0001d6c2': '$\\mathbf{\\Alpha}$', +u'\U0001d6c3': '$\\mathbf{\\Beta}$', +u'\U0001d6c4': '$\\mathbf{\\Gamma}$', +u'\U0001d6c5': '$\\mathbf{\\Delta}$', +u'\U0001d6c6': '$\\mathbf{\\Epsilon}$', +u'\U0001d6c7': '$\\mathbf{\\Zeta}$', +u'\U0001d6c8': '$\\mathbf{\\Eta}$', +u'\U0001d6c9': '$\\mathbf{\\theta}$', +u'\U0001d6ca': '$\\mathbf{\\Iota}$', +u'\U0001d6cb': '$\\mathbf{\\Kappa}$', +u'\U0001d6cc': '$\\mathbf{\\Lambda}$', +u'\U0001d6cd': '$M$', +u'\U0001d6ce': '$N$', +u'\U0001d6cf': '$\\mathbf{\\Xi}$', +u'\U0001d6d0': '$O$', +u'\U0001d6d1': '$\\mathbf{\\Pi}$', +u'\U0001d6d2': '$\\mathbf{\\Rho}$', +u'\U0001d6d3': '$\\mathbf{\\varsigma}$', +u'\U0001d6d4': '$\\mathbf{\\Sigma}$', +u'\U0001d6d5': '$\\mathbf{\\Tau}$', +u'\U0001d6d6': '$\\mathbf{\\Upsilon}$', +u'\U0001d6d7': '$\\mathbf{\\Phi}$', +u'\U0001d6d8': '$\\mathbf{\\Chi}$', +u'\U0001d6d9': '$\\mathbf{\\Psi}$', +u'\U0001d6da': '$\\mathbf{\\Omega}$', +u'\U0001d6db': '$\\partial$', +u'\U0001d6dc': '$\\in$', +u'\U0001d6dd': '{\\mathbf{\\vartheta}}', +u'\U0001d6de': '{\\mathbf{\\varkappa}}', +u'\U0001d6df': '{\\mathbf{\\phi}}', +u'\U0001d6e0': '{\\mathbf{\\varrho}}', +u'\U0001d6e1': '{\\mathbf{\\varpi}}', +u'\U0001d6e2': '$\\mathsl{\\Alpha}$', +u'\U0001d6e3': '$\\mathsl{\\Beta}$', +u'\U0001d6e4': '$\\mathsl{\\Gamma}$', +u'\U0001d6e5': '$\\mathsl{\\Delta}$', +u'\U0001d6e6': '$\\mathsl{\\Epsilon}$', +u'\U0001d6e7': '$\\mathsl{\\Zeta}$', +u'\U0001d6e8': '$\\mathsl{\\Eta}$', +u'\U0001d6e9': '$\\mathsl{\\Theta}$', +u'\U0001d6ea': '$\\mathsl{\\Iota}$', +u'\U0001d6eb': '$\\mathsl{\\Kappa}$', +u'\U0001d6ec': '$\\mathsl{\\Lambda}$', +u'\U0001d6ed': '$M$', +u'\U0001d6ee': '$N$', +u'\U0001d6ef': '$\\mathsl{\\Xi}$', +u'\U0001d6f0': '$O$', +u'\U0001d6f1': '$\\mathsl{\\Pi}$', +u'\U0001d6f2': '$\\mathsl{\\Rho}$', +u'\U0001d6f3': '{\\mathsl{\\vartheta}}', +u'\U0001d6f4': '$\\mathsl{\\Sigma}$', +u'\U0001d6f5': '$\\mathsl{\\Tau}$', +u'\U0001d6f6': '$\\mathsl{\\Upsilon}$', +u'\U0001d6f7': '$\\mathsl{\\Phi}$', +u'\U0001d6f8': '$\\mathsl{\\Chi}$', +u'\U0001d6f9': '$\\mathsl{\\Psi}$', +u'\U0001d6fa': '$\\mathsl{\\Omega}$', +u'\U0001d6fb': '$\\mathsl{\\nabla}$', +u'\U0001d6fc': '$\\mathsl{\\Alpha}$', +u'\U0001d6fd': '$\\mathsl{\\Beta}$', +u'\U0001d6fe': '$\\mathsl{\\Gamma}$', +u'\U0001d6ff': '$\\mathsl{\\Delta}$', +u'\U0001d700': '$\\mathsl{\\Epsilon}$', +u'\U0001d701': '$\\mathsl{\\Zeta}$', +u'\U0001d702': '$\\mathsl{\\Eta}$', +u'\U0001d703': '$\\mathsl{\\Theta}$', +u'\U0001d704': '$\\mathsl{\\Iota}$', +u'\U0001d705': '$\\mathsl{\\Kappa}$', +u'\U0001d706': '$\\mathsl{\\Lambda}$', +u'\U0001d707': '$M$', +u'\U0001d708': '$N$', +u'\U0001d709': '$\\mathsl{\\Xi}$', +u'\U0001d70a': '$O$', +u'\U0001d70b': '$\\mathsl{\\Pi}$', +u'\U0001d70c': '$\\mathsl{\\Rho}$', +u'\U0001d70d': '$\\mathsl{\\varsigma}$', +u'\U0001d70e': '$\\mathsl{\\Sigma}$', +u'\U0001d70f': '$\\mathsl{\\Tau}$', +u'\U0001d710': '$\\mathsl{\\Upsilon}$', +u'\U0001d711': '$\\mathsl{\\Phi}$', +u'\U0001d712': '$\\mathsl{\\Chi}$', +u'\U0001d713': '$\\mathsl{\\Psi}$', +u'\U0001d714': '$\\mathsl{\\Omega}$', +u'\U0001d715': '$\\partial$', +u'\U0001d716': '$\\in$', +u'\U0001d717': '{\\mathsl{\\vartheta}}', +u'\U0001d718': '{\\mathsl{\\varkappa}}', +u'\U0001d719': '{\\mathsl{\\phi}}', +u'\U0001d71a': '{\\mathsl{\\varrho}}', +u'\U0001d71b': '{\\mathsl{\\varpi}}', +u'\U0001d71c': '$\\mathbit{\\Alpha}$', +u'\U0001d71d': '$\\mathbit{\\Beta}$', +u'\U0001d71e': '$\\mathbit{\\Gamma}$', +u'\U0001d71f': '$\\mathbit{\\Delta}$', +u'\U0001d720': '$\\mathbit{\\Epsilon}$', +u'\U0001d721': '$\\mathbit{\\Zeta}$', +u'\U0001d722': '$\\mathbit{\\Eta}$', +u'\U0001d723': '$\\mathbit{\\Theta}$', +u'\U0001d724': '$\\mathbit{\\Iota}$', +u'\U0001d725': '$\\mathbit{\\Kappa}$', +u'\U0001d726': '$\\mathbit{\\Lambda}$', +u'\U0001d727': '$M$', +u'\U0001d728': '$N$', +u'\U0001d729': '$\\mathbit{\\Xi}$', +u'\U0001d72a': '$O$', +u'\U0001d72b': '$\\mathbit{\\Pi}$', +u'\U0001d72c': '$\\mathbit{\\Rho}$', +u'\U0001d72d': '{\\mathbit{O}}', +u'\U0001d72e': '$\\mathbit{\\Sigma}$', +u'\U0001d72f': '$\\mathbit{\\Tau}$', +u'\U0001d730': '$\\mathbit{\\Upsilon}$', +u'\U0001d731': '$\\mathbit{\\Phi}$', +u'\U0001d732': '$\\mathbit{\\Chi}$', +u'\U0001d733': '$\\mathbit{\\Psi}$', +u'\U0001d734': '$\\mathbit{\\Omega}$', +u'\U0001d735': '$\\mathbit{\\nabla}$', +u'\U0001d736': '$\\mathbit{\\Alpha}$', +u'\U0001d737': '$\\mathbit{\\Beta}$', +u'\U0001d738': '$\\mathbit{\\Gamma}$', +u'\U0001d739': '$\\mathbit{\\Delta}$', +u'\U0001d73a': '$\\mathbit{\\Epsilon}$', +u'\U0001d73b': '$\\mathbit{\\Zeta}$', +u'\U0001d73c': '$\\mathbit{\\Eta}$', +u'\U0001d73d': '$\\mathbit{\\Theta}$', +u'\U0001d73e': '$\\mathbit{\\Iota}$', +u'\U0001d73f': '$\\mathbit{\\Kappa}$', +u'\U0001d740': '$\\mathbit{\\Lambda}$', +u'\U0001d741': '$M$', +u'\U0001d742': '$N$', +u'\U0001d743': '$\\mathbit{\\Xi}$', +u'\U0001d744': '$O$', +u'\U0001d745': '$\\mathbit{\\Pi}$', +u'\U0001d746': '$\\mathbit{\\Rho}$', +u'\U0001d747': '$\\mathbit{\\varsigma}$', +u'\U0001d748': '$\\mathbit{\\Sigma}$', +u'\U0001d749': '$\\mathbit{\\Tau}$', +u'\U0001d74a': '$\\mathbit{\\Upsilon}$', +u'\U0001d74b': '$\\mathbit{\\Phi}$', +u'\U0001d74c': '$\\mathbit{\\Chi}$', +u'\U0001d74d': '$\\mathbit{\\Psi}$', +u'\U0001d74e': '$\\mathbit{\\Omega}$', +u'\U0001d74f': '$\\partial$', +u'\U0001d750': '$\\in$', +u'\U0001d751': '{\\mathbit{\\vartheta}}', +u'\U0001d752': '{\\mathbit{\\varkappa}}', +u'\U0001d753': '{\\mathbit{\\phi}}', +u'\U0001d754': '{\\mathbit{\\varrho}}', +u'\U0001d755': '{\\mathbit{\\varpi}}', +u'\U0001d756': '$\\mathsfbf{\\Alpha}$', +u'\U0001d757': '$\\mathsfbf{\\Beta}$', +u'\U0001d758': '$\\mathsfbf{\\Gamma}$', +u'\U0001d759': '$\\mathsfbf{\\Delta}$', +u'\U0001d75a': '$\\mathsfbf{\\Epsilon}$', +u'\U0001d75b': '$\\mathsfbf{\\Zeta}$', +u'\U0001d75c': '$\\mathsfbf{\\Eta}$', +u'\U0001d75d': '$\\mathsfbf{\\Theta}$', +u'\U0001d75e': '$\\mathsfbf{\\Iota}$', +u'\U0001d75f': '$\\mathsfbf{\\Kappa}$', +u'\U0001d760': '$\\mathsfbf{\\Lambda}$', +u'\U0001d761': '$M$', +u'\U0001d762': '$N$', +u'\U0001d763': '$\\mathsfbf{\\Xi}$', +u'\U0001d764': '$O$', +u'\U0001d765': '$\\mathsfbf{\\Pi}$', +u'\U0001d766': '$\\mathsfbf{\\Rho}$', +u'\U0001d767': '{\\mathsfbf{\\vartheta}}', +u'\U0001d768': '$\\mathsfbf{\\Sigma}$', +u'\U0001d769': '$\\mathsfbf{\\Tau}$', +u'\U0001d76a': '$\\mathsfbf{\\Upsilon}$', +u'\U0001d76b': '$\\mathsfbf{\\Phi}$', +u'\U0001d76c': '$\\mathsfbf{\\Chi}$', +u'\U0001d76d': '$\\mathsfbf{\\Psi}$', +u'\U0001d76e': '$\\mathsfbf{\\Omega}$', +u'\U0001d76f': '$\\mathsfbf{\\nabla}$', +u'\U0001d770': '$\\mathsfbf{\\Alpha}$', +u'\U0001d771': '$\\mathsfbf{\\Beta}$', +u'\U0001d772': '$\\mathsfbf{\\Gamma}$', +u'\U0001d773': '$\\mathsfbf{\\Delta}$', +u'\U0001d774': '$\\mathsfbf{\\Epsilon}$', +u'\U0001d775': '$\\mathsfbf{\\Zeta}$', +u'\U0001d776': '$\\mathsfbf{\\Eta}$', +u'\U0001d777': '$\\mathsfbf{\\Theta}$', +u'\U0001d778': '$\\mathsfbf{\\Iota}$', +u'\U0001d779': '$\\mathsfbf{\\Kappa}$', +u'\U0001d77a': '$\\mathsfbf{\\Lambda}$', +u'\U0001d77b': '$M$', +u'\U0001d77c': '$N$', +u'\U0001d77d': '$\\mathsfbf{\\Xi}$', +u'\U0001d77e': '$O$', +u'\U0001d77f': '$\\mathsfbf{\\Pi}$', +u'\U0001d780': '$\\mathsfbf{\\Rho}$', +u'\U0001d781': '$\\mathsfbf{\\varsigma}$', +u'\U0001d782': '$\\mathsfbf{\\Sigma}$', +u'\U0001d783': '$\\mathsfbf{\\Tau}$', +u'\U0001d784': '$\\mathsfbf{\\Upsilon}$', +u'\U0001d785': '$\\mathsfbf{\\Phi}$', +u'\U0001d786': '$\\mathsfbf{\\Chi}$', +u'\U0001d787': '$\\mathsfbf{\\Psi}$', +u'\U0001d788': '$\\mathsfbf{\\Omega}$', +u'\U0001d789': '$\\partial$', +u'\U0001d78a': '$\\in$', +u'\U0001d78b': '{\\mathsfbf{\\vartheta}}', +u'\U0001d78c': '{\\mathsfbf{\\varkappa}}', +u'\U0001d78d': '{\\mathsfbf{\\phi}}', +u'\U0001d78e': '{\\mathsfbf{\\varrho}}', +u'\U0001d78f': '{\\mathsfbf{\\varpi}}', +u'\U0001d790': '$\\mathsfbfsl{\\Alpha}$', +u'\U0001d791': '$\\mathsfbfsl{\\Beta}$', +u'\U0001d792': '$\\mathsfbfsl{\\Gamma}$', +u'\U0001d793': '$\\mathsfbfsl{\\Delta}$', +u'\U0001d794': '$\\mathsfbfsl{\\Epsilon}$', +u'\U0001d795': '$\\mathsfbfsl{\\Zeta}$', +u'\U0001d796': '$\\mathsfbfsl{\\Eta}$', +u'\U0001d797': '$\\mathsfbfsl{\\vartheta}$', +u'\U0001d798': '$\\mathsfbfsl{\\Iota}$', +u'\U0001d799': '$\\mathsfbfsl{\\Kappa}$', +u'\U0001d79a': '$\\mathsfbfsl{\\Lambda}$', +u'\U0001d79b': '$M$', +u'\U0001d79c': '$N$', +u'\U0001d79d': '$\\mathsfbfsl{\\Xi}$', +u'\U0001d79e': '$O$', +u'\U0001d79f': '$\\mathsfbfsl{\\Pi}$', +u'\U0001d7a0': '$\\mathsfbfsl{\\Rho}$', +u'\U0001d7a1': '{\\mathsfbfsl{\\vartheta}}', +u'\U0001d7a2': '$\\mathsfbfsl{\\Sigma}$', +u'\U0001d7a3': '$\\mathsfbfsl{\\Tau}$', +u'\U0001d7a4': '$\\mathsfbfsl{\\Upsilon}$', +u'\U0001d7a5': '$\\mathsfbfsl{\\Phi}$', +u'\U0001d7a6': '$\\mathsfbfsl{\\Chi}$', +u'\U0001d7a7': '$\\mathsfbfsl{\\Psi}$', +u'\U0001d7a8': '$\\mathsfbfsl{\\Omega}$', +u'\U0001d7a9': '$\\mathsfbfsl{\\nabla}$', +u'\U0001d7aa': '$\\mathsfbfsl{\\Alpha}$', +u'\U0001d7ab': '$\\mathsfbfsl{\\Beta}$', +u'\U0001d7ac': '$\\mathsfbfsl{\\Gamma}$', +u'\U0001d7ad': '$\\mathsfbfsl{\\Delta}$', +u'\U0001d7ae': '$\\mathsfbfsl{\\Epsilon}$', +u'\U0001d7af': '$\\mathsfbfsl{\\Zeta}$', +u'\U0001d7b0': '$\\mathsfbfsl{\\Eta}$', +u'\U0001d7b1': '$\\mathsfbfsl{\\vartheta}$', +u'\U0001d7b2': '$\\mathsfbfsl{\\Iota}$', +u'\U0001d7b3': '$\\mathsfbfsl{\\Kappa}$', +u'\U0001d7b4': '$\\mathsfbfsl{\\Lambda}$', +u'\U0001d7b5': '$M$', +u'\U0001d7b6': '$N$', +u'\U0001d7b7': '$\\mathsfbfsl{\\Xi}$', +u'\U0001d7b8': '$O$', +u'\U0001d7b9': '$\\mathsfbfsl{\\Pi}$', +u'\U0001d7ba': '$\\mathsfbfsl{\\Rho}$', +u'\U0001d7bb': '$\\mathsfbfsl{\\varsigma}$', +u'\U0001d7bc': '$\\mathsfbfsl{\\Sigma}$', +u'\U0001d7bd': '$\\mathsfbfsl{\\Tau}$', +u'\U0001d7be': '$\\mathsfbfsl{\\Upsilon}$', +u'\U0001d7bf': '$\\mathsfbfsl{\\Phi}$', +u'\U0001d7c0': '$\\mathsfbfsl{\\Chi}$', +u'\U0001d7c1': '$\\mathsfbfsl{\\Psi}$', +u'\U0001d7c2': '$\\mathsfbfsl{\\Omega}$', +u'\U0001d7c3': '$\\partial$', +u'\U0001d7c4': '$\\in$', +u'\U0001d7c5': '{\\mathsfbfsl{\\vartheta}}', +u'\U0001d7c6': '{\\mathsfbfsl{\\varkappa}}', +u'\U0001d7c7': '{\\mathsfbfsl{\\phi}}', +u'\U0001d7c8': '{\\mathsfbfsl{\\varrho}}', +u'\U0001d7c9': '{\\mathsfbfsl{\\varpi}}', +u'\U0001d7ce': '$\\mathbf{0}$', +u'\U0001d7cf': '$\\mathbf{1}$', +u'\U0001d7d0': '$\\mathbf{2}$', +u'\U0001d7d1': '$\\mathbf{3}$', +u'\U0001d7d2': '$\\mathbf{4}$', +u'\U0001d7d3': '$\\mathbf{5}$', +u'\U0001d7d4': '$\\mathbf{6}$', +u'\U0001d7d5': '$\\mathbf{7}$', +u'\U0001d7d6': '$\\mathbf{8}$', +u'\U0001d7d7': '$\\mathbf{9}$', +u'\U0001d7d8': '$\\mathbb{0}$', +u'\U0001d7d9': '$\\mathbb{1}$', +u'\U0001d7da': '$\\mathbb{2}$', +u'\U0001d7db': '$\\mathbb{3}$', +u'\U0001d7dc': '$\\mathbb{4}$', +u'\U0001d7dd': '$\\mathbb{5}$', +u'\U0001d7de': '$\\mathbb{6}$', +u'\U0001d7df': '$\\mathbb{7}$', +u'\U0001d7e0': '$\\mathbb{8}$', +u'\U0001d7e1': '$\\mathbb{9}$', +u'\U0001d7e2': '$\\mathsf{0}$', +u'\U0001d7e3': '$\\mathsf{1}$', +u'\U0001d7e4': '$\\mathsf{2}$', +u'\U0001d7e5': '$\\mathsf{3}$', +u'\U0001d7e6': '$\\mathsf{4}$', +u'\U0001d7e7': '$\\mathsf{5}$', +u'\U0001d7e8': '$\\mathsf{6}$', +u'\U0001d7e9': '$\\mathsf{7}$', +u'\U0001d7ea': '$\\mathsf{8}$', +u'\U0001d7eb': '$\\mathsf{9}$', +u'\U0001d7ec': '$\\mathsfbf{0}$', +u'\U0001d7ed': '$\\mathsfbf{1}$', +u'\U0001d7ee': '$\\mathsfbf{2}$', +u'\U0001d7ef': '$\\mathsfbf{3}$', +u'\U0001d7f0': '$\\mathsfbf{4}$', +u'\U0001d7f1': '$\\mathsfbf{5}$', +u'\U0001d7f2': '$\\mathsfbf{6}$', +u'\U0001d7f3': '$\\mathsfbf{7}$', +u'\U0001d7f4': '$\\mathsfbf{8}$', +u'\U0001d7f5': '$\\mathsfbf{9}$', +u'\U0001d7f6': '$\\mathtt{0}$', +u'\U0001d7f7': '$\\mathtt{1}$', +u'\U0001d7f8': '$\\mathtt{2}$', +u'\U0001d7f9': '$\\mathtt{3}$', +u'\U0001d7fa': '$\\mathtt{4}$', +u'\U0001d7fb': '$\\mathtt{5}$', +u'\U0001d7fc': '$\\mathtt{6}$', +u'\U0001d7fd': '$\\mathtt{7}$', +u'\U0001d7fe': '$\\mathtt{8}$', +u'\U0001d7ff': '$\\mathtt{9}$'} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/null.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/null.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,21 @@ +# $Id: null.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A do-nothing Writer. +""" + +from docutils import writers + + +class Writer(writers.UnfilteredWriter): + + supported = ('null',) + """Formats this writer supports.""" + + config_section = 'null writer' + config_section_dependencies = ('writers',) + + def translate(self): + pass diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/__init__.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,105 @@ +# $Id: __init__.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +PEP HTML Writer. +""" + +__docformat__ = 'reStructuredText' + + +import sys +import os +import os.path +import codecs +import docutils +from docutils import frontend, nodes, utils, writers +from docutils.writers import html4css1 + + +class Writer(html4css1.Writer): + + default_stylesheet = 'pep.css' + + default_stylesheet_path = utils.relative_path( + os.path.join(os.getcwd(), 'dummy'), + os.path.join(os.path.dirname(__file__), default_stylesheet)) + + default_template = 'template.txt' + + default_template_path = utils.relative_path( + os.path.join(os.getcwd(), 'dummy'), + os.path.join(os.path.dirname(__file__), default_template)) + + settings_spec = html4css1.Writer.settings_spec + ( + 'PEP/HTML-Specific Options', + 'For the PEP/HTML writer, the default value for the --stylesheet-path ' + 'option is "%s", and the default value for --template is "%s". ' + 'See HTML-Specific Options above.' + % (default_stylesheet_path, default_template_path), + (('Python\'s home URL. Default is "http://www.python.org".', + ['--python-home'], + {'default': 'http://www.python.org', 'metavar': ''}), + ('Home URL prefix for PEPs. Default is "." (current directory).', + ['--pep-home'], + {'default': '.', 'metavar': ''}), + # For testing. + (frontend.SUPPRESS_HELP, + ['--no-random'], + {'action': 'store_true', 'validator': frontend.validate_boolean}),)) + + settings_default_overrides = {'stylesheet_path': default_stylesheet_path, + 'template': default_template_path,} + + relative_path_settings = (html4css1.Writer.relative_path_settings + + ('template',)) + + config_section = 'pep_html writer' + config_section_dependencies = ('writers', 'html4css1 writer') + + def __init__(self): + html4css1.Writer.__init__(self) + self.translator_class = HTMLTranslator + + def interpolation_dict(self): + subs = html4css1.Writer.interpolation_dict(self) + settings = self.document.settings + pyhome = settings.python_home + subs['pyhome'] = pyhome + subs['pephome'] = settings.pep_home + if pyhome == '..': + subs['pepindex'] = '.' + else: + subs['pepindex'] = pyhome + '/dev/peps' + index = self.document.first_child_matching_class(nodes.field_list) + header = self.document[index] + self.pepnum = header[0][1].astext() + subs['pep'] = self.pepnum + if settings.no_random: + subs['banner'] = 0 + else: + import random + subs['banner'] = random.randrange(64) + try: + subs['pepnum'] = '%04i' % int(self.pepnum) + except ValueError: + subs['pepnum'] = pepnum + self.title = header[1][1].astext() + subs['title'] = self.title + subs['body'] = ''.join( + self.body_pre_docinfo + self.docinfo + self.body) + return subs + + def assemble_parts(self): + html4css1.Writer.assemble_parts(self) + self.parts['title'] = [self.title] + self.parts['pepnum'] = self.pepnum + + +class HTMLTranslator(html4css1.HTMLTranslator): + + def depart_field_list(self, node): + html4css1.HTMLTranslator.depart_field_list(self, node) + if 'rfc2822' in node['classes']: + self.body.append('
    \n') diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/pep.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/pep.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,344 @@ +/* +:Author: David Goodger +:Contact: goodger@python.org +:date: $Date: 2006-05-21 22:44:42 +0200 (Sun, 21 May 2006) $ +:version: $Revision: 4564 $ +:copyright: This stylesheet has been placed in the public domain. + +Default cascading style sheet for the PEP HTML output of Docutils. +*/ + +/* "! important" is used here to override other ``margin-top`` and + ``margin-bottom`` styles that are later in the stylesheet or + more specific. See http://www.w3.org/TR/CSS1#the-cascade */ +.first { + margin-top: 0 ! important } + +.last, .with-subtitle { + margin-bottom: 0 ! important } + +.hidden { + display: none } + +.navigation { + width: 100% ; + background: #99ccff ; + margin-top: 0px ; + margin-bottom: 0px } + +.navigation .navicon { + width: 150px ; + height: 35px } + +.navigation .textlinks { + padding-left: 1em ; + text-align: left } + +.navigation td, .navigation th { + padding-left: 0em ; + padding-right: 0em ; + vertical-align: middle } + +.rfc2822 { + margin-top: 0.5em ; + margin-left: 0.5em ; + margin-right: 0.5em ; + margin-bottom: 0em } + +.rfc2822 td { + text-align: left } + +.rfc2822 th.field-name { + text-align: right ; + font-family: sans-serif ; + padding-right: 0.5em ; + font-weight: bold ; + margin-bottom: 0em } + +a.toc-backref { + text-decoration: none ; + color: black } + +blockquote.epigraph { + margin: 2em 5em ; } + +body { + margin: 0px ; + margin-bottom: 1em ; + padding: 0px } + +dl.docutils dd { + margin-bottom: 0.5em } + +div.section { + margin-left: 1em ; + margin-right: 1em ; + margin-bottom: 1.5em } + +div.section div.section { + margin-left: 0em ; + margin-right: 0em ; + margin-top: 1.5em } + +div.abstract { + margin: 2em 5em } + +div.abstract p.topic-title { + font-weight: bold ; + text-align: center } + +div.admonition, div.attention, div.caution, div.danger, div.error, +div.hint, div.important, div.note, div.tip, div.warning { + margin: 2em ; + border: medium outset ; + padding: 1em } + +div.admonition p.admonition-title, div.hint p.admonition-title, +div.important p.admonition-title, div.note p.admonition-title, +div.tip p.admonition-title { + font-weight: bold ; + font-family: sans-serif } + +div.attention p.admonition-title, div.caution p.admonition-title, +div.danger p.admonition-title, div.error p.admonition-title, +div.warning p.admonition-title { + color: red ; + font-weight: bold ; + font-family: sans-serif } + +/* Uncomment (and remove this text!) to get reduced vertical space in + compound paragraphs. +div.compound .compound-first, div.compound .compound-middle { + margin-bottom: 0.5em } + +div.compound .compound-last, div.compound .compound-middle { + margin-top: 0.5em } +*/ + +div.dedication { + margin: 2em 5em ; + text-align: center ; + font-style: italic } + +div.dedication p.topic-title { + font-weight: bold ; + font-style: normal } + +div.figure { + margin-left: 2em ; + margin-right: 2em } + +div.footer, div.header { + clear: both; + font-size: smaller } + +div.footer { + margin-left: 1em ; + margin-right: 1em } + +div.line-block { + display: block ; + margin-top: 1em ; + margin-bottom: 1em } + +div.line-block div.line-block { + margin-top: 0 ; + margin-bottom: 0 ; + margin-left: 1.5em } + +div.sidebar { + margin-left: 1em ; + border: medium outset ; + padding: 1em ; + background-color: #ffffee ; + width: 40% ; + float: right ; + clear: right } + +div.sidebar p.rubric { + font-family: sans-serif ; + font-size: medium } + +div.system-messages { + margin: 5em } + +div.system-messages h1 { + color: red } + +div.system-message { + border: medium outset ; + padding: 1em } + +div.system-message p.system-message-title { + color: red ; + font-weight: bold } + +div.topic { + margin: 2em } + +h1.section-subtitle, h2.section-subtitle, h3.section-subtitle, +h4.section-subtitle, h5.section-subtitle, h6.section-subtitle { + margin-top: 0.4em } + +h1 { + font-family: sans-serif ; + font-size: large } + +h2 { + font-family: sans-serif ; + font-size: medium } + +h3 { + font-family: sans-serif ; + font-size: small } + +h4 { + font-family: sans-serif ; + font-style: italic ; + font-size: small } + +h5 { + font-family: sans-serif; + font-size: x-small } + +h6 { + font-family: sans-serif; + font-style: italic ; + font-size: x-small } + +hr.docutils { + width: 75% } + +img.align-left { + clear: left } + +img.align-right { + clear: right } + +img.borderless { + border: 0 } + +ol.simple, ul.simple { + margin-bottom: 1em } + +ol.arabic { + list-style: decimal } + +ol.loweralpha { + list-style: lower-alpha } + +ol.upperalpha { + list-style: upper-alpha } + +ol.lowerroman { + list-style: lower-roman } + +ol.upperroman { + list-style: upper-roman } + +p.attribution { + text-align: right ; + margin-left: 50% } + +p.caption { + font-style: italic } + +p.credits { + font-style: italic ; + font-size: smaller } + +p.label { + white-space: nowrap } + +p.rubric { + font-weight: bold ; + font-size: larger ; + color: maroon ; + text-align: center } + +p.sidebar-title { + font-family: sans-serif ; + font-weight: bold ; + font-size: larger } + +p.sidebar-subtitle { + font-family: sans-serif ; + font-weight: bold } + +p.topic-title { + font-family: sans-serif ; + font-weight: bold } + +pre.address { + margin-bottom: 0 ; + margin-top: 0 ; + font-family: serif ; + font-size: 100% } + +pre.literal-block, pre.doctest-block { + margin-left: 2em ; + margin-right: 2em } + +span.classifier { + font-family: sans-serif ; + font-style: oblique } + +span.classifier-delimiter { + font-family: sans-serif ; + font-weight: bold } + +span.interpreted { + font-family: sans-serif } + +span.option { + white-space: nowrap } + +span.option-argument { + font-style: italic } + +span.pre { + white-space: pre } + +span.problematic { + color: red } + +span.section-subtitle { + /* font-size relative to parent (h1..h6 element) */ + font-size: 80% } + +table.citation { + border-left: solid 1px gray; + margin-left: 1px } + +table.docinfo { + margin: 2em 4em } + +table.docutils { + margin-top: 0.5em ; + margin-bottom: 0.5em } + +table.footnote { + border-left: solid 1px black; + margin-left: 1px } + +table.docutils td, table.docutils th, +table.docinfo td, table.docinfo th { + padding-left: 0.5em ; + padding-right: 0.5em ; + vertical-align: top } + +td.num { + text-align: right } + +th.field-name { + font-weight: bold ; + text-align: left ; + white-space: nowrap ; + padding-left: 0 } + +h1 tt.docutils, h2 tt.docutils, h3 tt.docutils, +h4 tt.docutils, h5 tt.docutils, h6 tt.docutils { + font-size: 100% } + +ul.auto-toc { + list-style-type: none } diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/template.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pep_html/template.txt Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,29 @@ + + + + + + + + PEP %(pep)s -- %(title)s + %(stylesheet)s + + + + + +
    +%(body)s +%(body_suffix)s diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pseudoxml.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/pseudoxml.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,31 @@ +# $Id: pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +Simple internal document tree Writer, writes indented pseudo-XML. +""" + +__docformat__ = 'reStructuredText' + + +from docutils import writers + + +class Writer(writers.Writer): + + supported = ('pprint', 'pformat', 'pseudoxml') + """Formats this writer supports.""" + + config_section = 'pseudoxml writer' + config_section_dependencies = ('writers',) + + output = None + """Final translated form of `document`.""" + + def translate(self): + self.output = self.document.pformat() + + def supports(self, format): + """This writer supports all format-specific elements.""" + return 1 diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/__init__.py Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,337 @@ +# $Id: __init__.py 4883 2007-01-16 01:51:28Z wiemann $ +# Authors: Chris Liechti ; +# David Goodger +# Copyright: This module has been placed in the public domain. + +""" +S5/HTML Slideshow Writer. +""" + +__docformat__ = 'reStructuredText' + + +import sys +import os +import re +import docutils +from docutils import frontend, nodes, utils +from docutils.writers import html4css1 +from docutils.parsers.rst import directives + +themes_dir_path = utils.relative_path( + os.path.join(os.getcwd(), 'dummy'), + os.path.join(os.path.dirname(__file__), 'themes')) + +def find_theme(name): + # Where else to look for a theme? + # Check working dir? Destination dir? Config dir? Plugins dir? + path = os.path.join(themes_dir_path, name) + if not os.path.isdir(path): + raise docutils.ApplicationError( + 'Theme directory not found: %r (path: %r)' % (name, path)) + return path + + +class Writer(html4css1.Writer): + + settings_spec = html4css1.Writer.settings_spec + ( + 'S5 Slideshow Specific Options', + 'For the S5/HTML writer, the --no-toc-backlinks option ' + '(defined in General Docutils Options above) is the default, ' + 'and should not be changed.', + (('Specify an installed S5 theme by name. Overrides --theme-url. ' + 'The default theme name is "default". The theme files will be ' + 'copied into a "ui/" directory, in the same directory as the ' + 'destination file (output HTML). Note that existing theme files ' + 'will not be overwritten (unless --overwrite-theme-files is used).', + ['--theme'], + {'default': 'default', 'metavar': '', + 'overrides': 'theme_url'}), + ('Specify an S5 theme URL. The destination file (output HTML) will ' + 'link to this theme; nothing will be copied. Overrides --theme.', + ['--theme-url'], + {'metavar': '', 'overrides': 'theme'}), + ('Allow existing theme files in the ``ui/`` directory to be ' + 'overwritten. The default is not to overwrite theme files.', + ['--overwrite-theme-files'], + {'action': 'store_true', 'validator': frontend.validate_boolean}), + ('Keep existing theme files in the ``ui/`` directory; do not ' + 'overwrite any. This is the default.', + ['--keep-theme-files'], + {'dest': 'overwrite_theme_files', 'action': 'store_false'}), + ('Set the initial view mode to "slideshow" [default] or "outline".', + ['--view-mode'], + {'choices': ['slideshow', 'outline'], 'default': 'slideshow', + 'metavar': ''}), + ('Normally hide the presentation controls in slideshow mode. ' + 'This is the default.', + ['--hidden-controls'], + {'action': 'store_true', 'default': True, + 'validator': frontend.validate_boolean}), + ('Always show the presentation controls in slideshow mode. ' + 'The default is to hide the controls.', + ['--visible-controls'], + {'dest': 'hidden_controls', 'action': 'store_false'}), + ('Enable the current slide indicator ("1 / 15"). ' + 'The default is to disable it.', + ['--current-slide'], + {'action': 'store_true', 'validator': frontend.validate_boolean}), + ('Disable the current slide indicator. This is the default.', + ['--no-current-slide'], + {'dest': 'current_slide', 'action': 'store_false'}),)) + + settings_default_overrides = {'toc_backlinks': 0} + + config_section = 's5_html writer' + config_section_dependencies = ('writers', 'html4css1 writer') + + def __init__(self): + html4css1.Writer.__init__(self) + self.translator_class = S5HTMLTranslator + + +class S5HTMLTranslator(html4css1.HTMLTranslator): + + s5_stylesheet_template = """\ + + + + + + + + +\n""" + # The script element must go in front of the link elements to + # avoid a flash of unstyled content (FOUC), reproducible with + # Firefox. + + disable_current_slide = """ +\n""" + + layout_template = """\ +
    +
    +
    + + +
    \n""" +#
    +#
    +#
    +#
    + + default_theme = 'default' + """Name of the default theme.""" + + base_theme_file = '__base__' + """Name of the file containing the name of the base theme.""" + + direct_theme_files = ( + 'slides.css', 'outline.css', 'print.css', 'opera.css', 'slides.js') + """Names of theme files directly linked to in the output HTML""" + + indirect_theme_files = ( + 's5-core.css', 'framing.css', 'pretty.css', 'blank.gif', 'iepngfix.htc') + """Names of files used indirectly; imported or used by files in + `direct_theme_files`.""" + + required_theme_files = indirect_theme_files + direct_theme_files + """Names of mandatory theme files.""" + + def __init__(self, *args): + html4css1.HTMLTranslator.__init__(self, *args) + #insert S5-specific stylesheet and script stuff: + self.theme_file_path = None + self.setup_theme() + view_mode = self.document.settings.view_mode + control_visibility = ('visible', 'hidden')[self.document.settings + .hidden_controls] + self.stylesheet.append(self.s5_stylesheet_template + % {'path': self.theme_file_path, + 'view_mode': view_mode, + 'control_visibility': control_visibility}) + if not self.document.settings.current_slide: + self.stylesheet.append(self.disable_current_slide) + self.add_meta('\n') + self.s5_footer = [] + self.s5_header = [] + self.section_count = 0 + self.theme_files_copied = None + + def setup_theme(self): + if self.document.settings.theme: + self.copy_theme() + elif self.document.settings.theme_url: + self.theme_file_path = self.document.settings.theme_url + else: + raise docutils.ApplicationError( + 'No theme specified for S5/HTML writer.') + + def copy_theme(self): + """ + Locate & copy theme files. + + A theme may be explicitly based on another theme via a '__base__' + file. The default base theme is 'default'. Files are accumulated + from the specified theme, any base themes, and 'default'. + """ + settings = self.document.settings + path = find_theme(settings.theme) + theme_paths = [path] + self.theme_files_copied = {} + required_files_copied = {} + # This is a link (URL) in HTML, so we use "/", not os.sep: + self.theme_file_path = '%s/%s' % ('ui', settings.theme) + if settings._destination: + dest = os.path.join( + os.path.dirname(settings._destination), 'ui', settings.theme) + if not os.path.isdir(dest): + os.makedirs(dest) + else: + # no destination, so we can't copy the theme + return + default = 0 + while path: + for f in os.listdir(path): # copy all files from each theme + if f == self.base_theme_file: + continue # ... except the "__base__" file + if ( self.copy_file(f, path, dest) + and f in self.required_theme_files): + required_files_copied[f] = 1 + if default: + break # "default" theme has no base theme + # Find the "__base__" file in theme directory: + base_theme_file = os.path.join(path, self.base_theme_file) + # If it exists, read it and record the theme path: + if os.path.isfile(base_theme_file): + lines = open(base_theme_file).readlines() + for line in lines: + line = line.strip() + if line and not line.startswith('#'): + path = find_theme(line) + if path in theme_paths: # check for duplicates (cycles) + path = None # if found, use default base + else: + theme_paths.append(path) + break + else: # no theme name found + path = None # use default base + else: # no base theme file found + path = None # use default base + if not path: + path = find_theme(self.default_theme) + theme_paths.append(path) + default = 1 + if len(required_files_copied) != len(self.required_theme_files): + # Some required files weren't found & couldn't be copied. + required = list(self.required_theme_files) + for f in required_files_copied.keys(): + required.remove(f) + raise docutils.ApplicationError( + 'Theme files not found: %s' + % ', '.join(['%r' % f for f in required])) + + files_to_skip_pattern = re.compile(r'~$|\.bak$|#$|\.cvsignore$') + + def copy_file(self, name, source_dir, dest_dir): + """ + Copy file `name` from `source_dir` to `dest_dir`. + Return 1 if the file exists in either `source_dir` or `dest_dir`. + """ + source = os.path.join(source_dir, name) + dest = os.path.join(dest_dir, name) + if self.theme_files_copied.has_key(dest): + return 1 + else: + self.theme_files_copied[dest] = 1 + if os.path.isfile(source): + if self.files_to_skip_pattern.search(source): + return None + settings = self.document.settings + if os.path.exists(dest) and not settings.overwrite_theme_files: + settings.record_dependencies.add(dest) + else: + src_file = open(source, 'rb') + src_data = src_file.read() + src_file.close() + dest_file = open(dest, 'wb') + dest_dir = dest_dir.replace(os.sep, '/') + dest_file.write(src_data.replace( + 'ui/default', dest_dir[dest_dir.rfind('ui/'):])) + dest_file.close() + settings.record_dependencies.add(source) + return 1 + if os.path.isfile(dest): + return 1 + + def depart_document(self, node): + header = ''.join(self.s5_header) + footer = ''.join(self.s5_footer) + title = ''.join(self.html_title).replace('

    ', '

    ') + layout = self.layout_template % {'header': header, + 'title': title, + 'footer': footer} + self.fragment.extend(self.body) + self.body_prefix.extend(layout) + self.body_prefix.append('
    \n') + self.body_prefix.append( + self.starttag({'classes': ['slide'], 'ids': ['slide0']}, 'div')) + if not self.section_count: + self.body.append('
    \n') + self.body_suffix.insert(0, '

    \n') + # skip content-type meta tag with interpolated charset value: + self.html_head.extend(self.head[1:]) + self.html_body.extend(self.body_prefix[1:] + self.body_pre_docinfo + + self.docinfo + self.body + + self.body_suffix[:-1]) + + def depart_footer(self, node): + start = self.context.pop() + self.s5_footer.append('

    ') + self.s5_footer.extend(self.body[start:]) + self.s5_footer.append('

    ') + del self.body[start:] + + def depart_header(self, node): + start = self.context.pop() + header = ['\n') + del self.body[start:] + self.s5_header.extend(header) + + def visit_section(self, node): + if not self.section_count: + self.body.append('\n\n') + self.section_count += 1 + self.section_level += 1 + if self.section_level > 1: + # dummy for matching div's + self.body.append(self.starttag(node, 'div', CLASS='section')) + else: + self.body.append(self.starttag(node, 'div', CLASS='slide')) + + def visit_subtitle(self, node): + if isinstance(node.parent, nodes.section): + level = self.section_level + self.initial_header_level - 1 + if level == 1: + level = 2 + tag = 'h%s' % level + self.body.append(self.starttag(node, tag, '')) + self.context.append('\n' % tag) + else: + html4css1.HTMLTranslator.visit_subtitle(self, node) + + def visit_title(self, node): + html4css1.HTMLTranslator.visit_title(self, node) diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/README.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/README.txt Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,6 @@ +Except where otherwise noted (default/iepngfix.htc), all files in this +directory have been released into the Public Domain. + +These files are based on files from S5 1.1, released into the Public +Domain by Eric Meyer. For further details, please see +http://www.meyerweb.com/eric/tools/s5/credits.html. diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/__base__ --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/__base__ Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,2 @@ +# base theme of this theme: +big-white diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/framing.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/framing.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,25 @@ +/* The following styles size, place, and layer the slide components. + Edit these if you want to change the overall slide layout. + The commented lines can be uncommented (and modified, if necessary) + to help you with the rearrangement process. */ + +/* target = 1024x768 */ + +div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} +div#header {top: 0; z-index: 1;} +div#footer {display:none;} +.slide {top: 0; width: 92%; padding: 0.1em 4% 4%; z-index: 2;} +/* list-style: none;} */ +div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0;} +#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; + z-index: 10;} +html>body #currentSlide {position: fixed;} + +/* +div#header {background: #FCC;} +div#footer {background: #CCF;} +div#controls {background: #BBD;} +div#currentSlide {background: #FFC;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/pretty.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-black/pretty.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,109 @@ +/* This file has been placed in the public domain. */ +/* Following are the presentation styles -- edit away! */ + +html, body {margin: 0; padding: 0;} +body {background: black; color: white;} +:link, :visited {text-decoration: none; color: cyan;} +#controls :active {color: #888 !important;} +#controls :focus {outline: 1px dotted #CCC;} + +blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} +blockquote p {margin: 0;} + +kbd {font-weight: bold; font-size: 1em;} +sup {font-size: smaller; line-height: 1px;} + +.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} +.slide ul ul li {list-style: square;} +.slide img.leader {display: block; margin: 0 auto;} +.slide tt {font-size: 90%;} + +.slide {font-size: 3em; font-family: sans-serif; font-weight: bold;} +.slide h1 {padding-top: 0; z-index: 1; margin: 0; font-size: 120%;} +.slide h2 {font-size: 110%;} +.slide h3 {font-size: 105%;} +h1 abbr {font-variant: small-caps;} + +div#controls {position: absolute; left: 50%; bottom: 0; + width: 50%; text-align: right; font: bold 0.9em sans-serif;} +html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0; padding: 0;} +#controls #navLinks a {padding: 0; margin: 0 0.5em; + border: none; color: #888; cursor: pointer;} +#controls #navList {height: 1em;} +#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; + background: black; color: #CCC;} + +#currentSlide {text-align: center; font-size: 0.5em; color: #AAA; + font-family: sans-serif; font-weight: bold;} + +#slide0 h1 {position: static; margin: 0 0 0.5em; padding-top: 0.3em; top: 0; + font-size: 150%; white-space: normal; background: transparent;} +#slide0 h2 {font: 110%; font-style: italic; color: gray;} +#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} +#slide0 h4 {margin-top: 0; font-size: 1em;} + +ul.urls {list-style: none; display: inline; margin: 0;} +.urls li {display: inline; margin: 0;} +.external {border-bottom: 1px dotted gray;} +html>body .external {border-bottom: none;} +.external:after {content: " \274F"; font-size: smaller; color: #FCC;} + +.incremental, .incremental *, .incremental *:after { + color: black; visibility: visible; border: 0;} +img.incremental {visibility: hidden;} +.slide .current {color: lime;} + +.slide-display {display: inline ! important;} + +.huge {font-size: 150%;} +.big {font-size: 120%;} +.small {font-size: 75%;} +.tiny {font-size: 50%;} +.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} +.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} + +.maroon {color: maroon;} +.red {color: red;} +.magenta {color: magenta;} +.fuchsia {color: fuchsia;} +.pink {color: #FAA;} +.orange {color: orange;} +.yellow {color: yellow;} +.lime {color: lime;} +.green {color: green;} +.olive {color: olive;} +.teal {color: teal;} +.cyan {color: cyan;} +.aqua {color: aqua;} +.blue {color: blue;} +.navy {color: navy;} +.purple {color: purple;} +.black {color: black;} +.gray {color: gray;} +.silver {color: silver;} +.white {color: white;} + +.left {text-align: left ! important;} +.center {text-align: center ! important;} +.right {text-align: right ! important;} + +.animation {position: relative; margin: 1em 0; padding: 0;} +.animation img {position: absolute;} + +/* Docutils-specific overrides */ + +.slide table.docinfo {margin: 0.5em 0 0.5em 1em;} + +div.sidebar {background-color: black;} + +pre.literal-block, pre.doctest-block {background-color: black;} + +tt.docutils {background-color: black;} + +/* diagnostics */ +/* +li:after {content: " [" attr(class) "]"; color: #F88;} +div:before {content: "[" attr(class) "]"; color: #F88;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/framing.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/framing.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,24 @@ +/* This file has been placed in the public domain. */ +/* The following styles size, place, and layer the slide components. + Edit these if you want to change the overall slide layout. + The commented lines can be uncommented (and modified, if necessary) + to help you with the rearrangement process. */ + +/* target = 1024x768 */ + +div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} +div#footer {display:none;} +.slide {top: 0; width: 92%; padding: 0.25em 4% 4%; z-index: 2;} +div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0;} +#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; + z-index: 10;} +html>body #currentSlide {position: fixed;} + +/* +div#header {background: #FCC;} +div#footer {background: #CCF;} +div#controls {background: #BBD;} +div#currentSlide {background: #FFC;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/pretty.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/big-white/pretty.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,107 @@ +/* This file has been placed in the public domain. */ +/* Following are the presentation styles -- edit away! */ + +html, body {margin: 0; padding: 0;} +body {background: white; color: black;} +:link, :visited {text-decoration: none; color: #00C;} +#controls :active {color: #88A !important;} +#controls :focus {outline: 1px dotted #227;} + +blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} +blockquote p {margin: 0;} + +kbd {font-weight: bold; font-size: 1em;} +sup {font-size: smaller; line-height: 1px;} + +.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} +.slide ul ul li {list-style: square;} +.slide img.leader {display: block; margin: 0 auto;} +.slide tt {font-size: 90%;} + +.slide {font-size: 3em; font-family: sans-serif; font-weight: bold;} +.slide h1 {padding-top: 0; z-index: 1; margin: 0; font-size: 120%;} +.slide h2 {font-size: 110%;} +.slide h3 {font-size: 105%;} +h1 abbr {font-variant: small-caps;} + +div#controls {position: absolute; left: 50%; bottom: 0; + width: 50%; text-align: right; font: bold 0.9em sans-serif;} +html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0; padding: 0;} +#controls #navLinks a {padding: 0; margin: 0 0.5em; + border: none; color: #005; cursor: pointer;} +#controls #navList {height: 1em;} +#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; + background: #DDD; color: #227;} + +#currentSlide {text-align: center; font-size: 0.5em; color: #444; + font-family: sans-serif; font-weight: bold;} + +#slide0 h1 {position: static; margin: 0 0 0.5em; padding-top: 0.3em; top: 0; + font-size: 150%; white-space: normal; background: transparent;} +#slide0 h2 {font: 110%; font-style: italic; color: gray;} +#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} +#slide0 h4 {margin-top: 0; font-size: 1em;} + +ul.urls {list-style: none; display: inline; margin: 0;} +.urls li {display: inline; margin: 0;} +.external {border-bottom: 1px dotted gray;} +html>body .external {border-bottom: none;} +.external:after {content: " \274F"; font-size: smaller; color: #77B;} + +.incremental, .incremental *, .incremental *:after { + color: white; visibility: visible; border: 0;} +img.incremental {visibility: hidden;} +.slide .current {color: green;} + +.slide-display {display: inline ! important;} + +.huge {font-size: 150%;} +.big {font-size: 120%;} +.small {font-size: 75%;} +.tiny {font-size: 50%;} +.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} +.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} + +.maroon {color: maroon;} +.red {color: red;} +.magenta {color: magenta;} +.fuchsia {color: fuchsia;} +.pink {color: #FAA;} +.orange {color: orange;} +.yellow {color: yellow;} +.lime {color: lime;} +.green {color: green;} +.olive {color: olive;} +.teal {color: teal;} +.cyan {color: cyan;} +.aqua {color: aqua;} +.blue {color: blue;} +.navy {color: navy;} +.purple {color: purple;} +.black {color: black;} +.gray {color: gray;} +.silver {color: silver;} +.white {color: white;} + +.left {text-align: left ! important;} +.center {text-align: center ! important;} +.right {text-align: right ! important;} + +.animation {position: relative; margin: 1em 0; padding: 0;} +.animation img {position: absolute;} + +/* Docutils-specific overrides */ + +.slide table.docinfo {margin: 0.5em 0 0.5em 1em;} + +pre.literal-block, pre.doctest-block {background-color: white;} + +tt.docutils {background-color: white;} + +/* diagnostics */ +/* +li:after {content: " [" attr(class) "]"; color: #F88;} +div:before {content: "[" attr(class) "]"; color: #F88;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/blank.gif Binary file buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/blank.gif has changed diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/framing.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/framing.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,25 @@ +/* This file has been placed in the public domain. */ +/* The following styles size, place, and layer the slide components. + Edit these if you want to change the overall slide layout. + The commented lines can be uncommented (and modified, if necessary) + to help you with the rearrangement process. */ + +/* target = 1024x768 */ + +div#header, div#footer, .slide {width: 100%; top: 0; left: 0;} +div#header {position: fixed; top: 0; height: 3em; z-index: 1;} +div#footer {top: auto; bottom: 0; height: 2.5em; z-index: 5;} +.slide {top: 0; width: 92%; padding: 2.5em 4% 4%; z-index: 2;} +div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0;} +#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em; + z-index: 10;} +html>body #currentSlide {position: fixed;} + +/* +div#header {background: #FCC;} +div#footer {background: #CCF;} +div#controls {background: #BBD;} +div#currentSlide {background: #FFC;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/iepngfix.htc --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/iepngfix.htc Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,42 @@ + + + + + \ No newline at end of file diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/opera.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/opera.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,8 @@ +/* This file has been placed in the public domain. */ +/* DO NOT CHANGE THESE unless you really want to break Opera Show */ +.slide { + visibility: visible !important; + position: static !important; + page-break-before: always; +} +#slide0 {page-break-before: avoid;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/outline.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/outline.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,16 @@ +/* This file has been placed in the public domain. */ +/* Don't change this unless you want the layout stuff to show up in the + outline view! */ + +.layout div, #footer *, #controlForm * {display: none;} +#footer, #controls, #controlForm, #navLinks, #toggle { + display: block; visibility: visible; margin: 0; padding: 0;} +#toggle {float: right; padding: 0.5em;} +html>body #toggle {position: fixed; top: 0; right: 0;} + +/* making the outline look pretty-ish */ + +#slide0 h1, #slide0 h2, #slide0 h3, #slide0 h4 {border: none; margin: 0;} +#toggle {border: 1px solid; border-width: 0 0 1px 1px; background: #FFF;} + +.outline {display: inline ! important;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/pretty.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/pretty.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,120 @@ +/* This file has been placed in the public domain. */ +/* Following are the presentation styles -- edit away! */ + +html, body {margin: 0; padding: 0;} +body {background: white; color: black;} +/* Replace the background style above with the style below (and again for + div#header) for a graphic: */ +/* background: white url(bodybg.gif) -16px 0 no-repeat; */ +:link, :visited {text-decoration: none; color: #00C;} +#controls :active {color: #88A !important;} +#controls :focus {outline: 1px dotted #227;} +h1, h2, h3, h4 {font-size: 100%; margin: 0; padding: 0; font-weight: inherit;} + +blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;} +blockquote p {margin: 0;} + +kbd {font-weight: bold; font-size: 1em;} +sup {font-size: smaller; line-height: 1px;} + +.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;} +.slide ul ul li {list-style: square;} +.slide img.leader {display: block; margin: 0 auto;} +.slide tt {font-size: 90%;} + +div#header, div#footer {background: #005; color: #AAB; font-family: sans-serif;} +/* background: #005 url(bodybg.gif) -16px 0 no-repeat; */ +div#footer {font-size: 0.5em; font-weight: bold; padding: 1em 0;} +#footer h1 {display: block; padding: 0 1em;} +#footer h2 {display: block; padding: 0.8em 1em 0;} + +.slide {font-size: 1.2em;} +.slide h1 {position: absolute; top: 0.45em; z-index: 1; + margin: 0; padding-left: 0.7em; white-space: nowrap; + font: bold 150% sans-serif; color: #DDE; background: #005;} +.slide h2 {font: bold 120%/1em sans-serif; padding-top: 0.5em;} +.slide h3 {font: bold 100% sans-serif; padding-top: 0.5em;} +h1 abbr {font-variant: small-caps;} + +div#controls {position: absolute; left: 50%; bottom: 0; + width: 50%; text-align: right; font: bold 0.9em sans-serif;} +html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;} +div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; + margin: 0; padding: 0;} +#controls #navLinks a {padding: 0; margin: 0 0.5em; + background: #005; border: none; color: #779; cursor: pointer;} +#controls #navList {height: 1em;} +#controls #navList #jumplist {position: absolute; bottom: 0; right: 0; + background: #DDD; color: #227;} + +#currentSlide {text-align: center; font-size: 0.5em; color: #449; + font-family: sans-serif; font-weight: bold;} + +#slide0 {padding-top: 1.5em} +#slide0 h1 {position: static; margin: 1em 0 0; padding: 0; color: #000; + font: bold 2em sans-serif; white-space: normal; background: transparent;} +#slide0 h2 {font: bold italic 1em sans-serif; margin: 0.25em;} +#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;} +#slide0 h4 {margin-top: 0; font-size: 1em;} + +ul.urls {list-style: none; display: inline; margin: 0;} +.urls li {display: inline; margin: 0;} +.external {border-bottom: 1px dotted gray;} +html>body .external {border-bottom: none;} +.external:after {content: " \274F"; font-size: smaller; color: #77B;} + +.incremental, .incremental *, .incremental *:after {visibility: visible; + color: white; border: 0;} +img.incremental {visibility: hidden;} +.slide .current {color: green;} + +.slide-display {display: inline ! important;} + +.huge {font-family: sans-serif; font-weight: bold; font-size: 150%;} +.big {font-family: sans-serif; font-weight: bold; font-size: 120%;} +.small {font-size: 75%;} +.tiny {font-size: 50%;} +.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;} +.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;} + +.maroon {color: maroon;} +.red {color: red;} +.magenta {color: magenta;} +.fuchsia {color: fuchsia;} +.pink {color: #FAA;} +.orange {color: orange;} +.yellow {color: yellow;} +.lime {color: lime;} +.green {color: green;} +.olive {color: olive;} +.teal {color: teal;} +.cyan {color: cyan;} +.aqua {color: aqua;} +.blue {color: blue;} +.navy {color: navy;} +.purple {color: purple;} +.black {color: black;} +.gray {color: gray;} +.silver {color: silver;} +.white {color: white;} + +.left {text-align: left ! important;} +.center {text-align: center ! important;} +.right {text-align: right ! important;} + +.animation {position: relative; margin: 1em 0; padding: 0;} +.animation img {position: absolute;} + +/* Docutils-specific overrides */ + +.slide table.docinfo {margin: 1em 0 0.5em 2em;} + +pre.literal-block, pre.doctest-block {background-color: white;} + +tt.docutils {background-color: white;} + +/* diagnostics */ +/* +li:after {content: " [" attr(class) "]"; color: #F88;} +div:before {content: "[" attr(class) "]"; color: #F88;} +*/ diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/print.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/print.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,24 @@ +/* This file has been placed in the public domain. */ +/* The following rule is necessary to have all slides appear in print! + DO NOT REMOVE IT! */ +.slide, ul {page-break-inside: avoid; visibility: visible !important;} +h1 {page-break-after: avoid;} + +body {font-size: 12pt; background: white;} +* {color: black;} + +#slide0 h1 {font-size: 200%; border: none; margin: 0.5em 0 0.25em;} +#slide0 h3 {margin: 0; padding: 0;} +#slide0 h4 {margin: 0 0 0.5em; padding: 0;} +#slide0 {margin-bottom: 3em;} + +#header {display: none;} +#footer h1 {margin: 0; border-bottom: 1px solid; color: gray; + font-style: italic;} +#footer h2, #controls {display: none;} + +.print {display: inline ! important;} + +/* The following rule keeps the layout stuff out of print. + Remove at your own risk! */ +.layout, .layout * {display: none !important;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/s5-core.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/s5-core.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,11 @@ +/* This file has been placed in the public domain. */ +/* Do not edit or override these styles! + The system will likely break if you do. */ + +div#header, div#footer, div#controls, .slide {position: absolute;} +html>body div#header, html>body div#footer, + html>body div#controls, html>body .slide {position: fixed;} +.handout {display: none;} +.layout {display: block;} +.slide, .hideme, .incremental {visibility: hidden;} +#slide0 {visibility: visible;} diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.css Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,10 @@ +/* This file has been placed in the public domain. */ + +/* required to make the slide show run at all */ +@import url(s5-core.css); + +/* sets basic placement and size of slide components */ +@import url(framing.css); + +/* styles that make the slides look good */ +@import url(pretty.css); diff -r be27ed110b50 -r d8ac696cc51f buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/buildframework/helium/external/python/lib/common/docutils-0.5-py2.5.egg/docutils/writers/s5_html/themes/default/slides.js Wed Dec 23 19:29:07 2009 +0200 @@ -0,0 +1,558 @@ +// S5 v1.1 slides.js -- released into the Public Domain +// Modified for Docutils (http://docutils.sf.net) by David Goodger +// +// Please see http://www.meyerweb.com/eric/tools/s5/credits.html for +// information about all the wonderful and talented contributors to this code! + +var undef; +var slideCSS = ''; +var snum = 0; +var smax = 1; +var slideIDs = new Array(); +var incpos = 0; +var number = undef; +var s5mode = true; +var defaultView = 'slideshow'; +var controlVis = 'visible'; + +var isIE = navigator.appName == 'Microsoft Internet Explorer' ? 1 : 0; +var isOp = navigator.userAgent.indexOf('Opera') > -1 ? 1 : 0; +var isGe = navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('Safari') < 1 ? 1 : 0; + +function hasClass(object, className) { + if (!object.className) return false; + return (object.className.search('(^|\\s)' + className + '(\\s|$)') != -1); +} + +function hasValue(object, value) { + if (!object) return false; + return (object.search('(^|\\s)' + value + '(\\s|$)') != -1); +} + +function removeClass(object,className) { + if (!object) return; + object.className = object.className.replace(new RegExp('(^|\\s)'+className+'(\\s|$)'), RegExp.$1+RegExp.$2); +} + +function addClass(object,className) { + if (!object || hasClass(object, className)) return; + if (object.className) { + object.className += ' '+className; + } else { + object.className = className; + } +} + +function GetElementsWithClassName(elementName,className) { + var allElements = document.getElementsByTagName(elementName); + var elemColl = new Array(); + for (var i = 0; i< allElements.length; i++) { + if (hasClass(allElements[i], className)) { + elemColl[elemColl.length] = allElements[i]; + } + } + return elemColl; +} + +function isParentOrSelf(element, id) { + if (element == null || element.nodeName=='BODY') return false; + else if (element.id == id) return true; + else return isParentOrSelf(element.parentNode, id); +} + +function nodeValue(node) { + var result = ""; + if (node.nodeType == 1) { + var children = node.childNodes; + for (var i = 0; i < children.length; ++i) { + result += nodeValue(children[i]); + } + } + else if (node.nodeType == 3) { + result = node.nodeValue; + } + return(result); +} + +function slideLabel() { + var slideColl = GetElementsWithClassName('*','slide'); + var list = document.getElementById('jumplist'); + smax = slideColl.length; + for (var n = 0; n < smax; n++) { + var obj = slideColl[n]; + + var did = 'slide' + n.toString(); + if (obj.getAttribute('id')) { + slideIDs[n] = obj.getAttribute('id'); + } + else { + obj.setAttribute('id',did); + slideIDs[n] = did; + } + if (isOp) continue; + + var otext = ''; + var menu = obj.firstChild; + if (!menu) continue; // to cope with empty slides + while (menu && menu.nodeType == 3) { + menu = menu.nextSibling; + } + if (!menu) continue; // to cope with slides with only text nodes + + var menunodes = menu.childNodes; + for (var o = 0; o < menunodes.length; o++) { + otext += nodeValue(menunodes[o]); + } + list.options[list.length] = new Option(n + ' : ' + otext, n); + } +} + +function currentSlide() { + var cs; + var footer_nodes; + var vis = 'visible'; + if (document.getElementById) { + cs = document.getElementById('currentSlide'); + footer_nodes = document.getElementById('footer').childNodes; + } else { + cs = document.currentSlide; + footer = document.footer.childNodes; + } + cs.innerHTML = '' + snum + '<\/span> ' + + '\/<\/span> ' + + '' + (smax-1) + '<\/span>'; + if (snum == 0) { + vis = 'hidden'; + } + cs.style.visibility = vis; + for (var i = 0; i < footer_nodes.length; i++) { + if (footer_nodes[i].nodeType == 1) { + footer_nodes[i].style.visibility = vis; + } + } +} + +function go(step) { + if (document.getElementById('slideProj').disabled || step == 0) return; + var jl = document.getElementById('jumplist'); + var cid = slideIDs[snum]; + var ce = document.getElementById(cid); + if (incrementals[snum].length > 0) { + for (var i = 0; i < incrementals[snum].length; i++) { + removeClass(incrementals[snum][i], 'current'); + removeClass(incrementals[snum][i], 'incremental'); + } + } + if (step != 'j') { + snum += step; + lmax = smax - 1; + if (snum > lmax) snum = lmax; + if (snum < 0) snum = 0; + } else + snum = parseInt(jl.value); + var nid = slideIDs[snum]; + var ne = document.getElementById(nid); + if (!ne) { + ne = document.getElementById(slideIDs[0]); + snum = 0; + } + if (step < 0) {incpos = incrementals[snum].length} else {incpos = 0;} + if (incrementals[snum].length > 0 && incpos == 0) { + for (var i = 0; i < incrementals[snum].length; i++) { + if (hasClass(incrementals[snum][i], 'current')) + incpos = i + 1; + else + addClass(incrementals[snum][i], 'incremental'); + } + } + if (incrementals[snum].length > 0 && incpos > 0) + addClass(incrementals[snum][incpos - 1], 'current'); + ce.style.visibility = 'hidden'; + ne.style.visibility = 'visible'; + jl.selectedIndex = snum; + currentSlide(); + number = 0; +} + +function goTo(target) { + if (target >= smax || target == snum) return; + go(target - snum); +} + +function subgo(step) { + if (step > 0) { + removeClass(incrementals[snum][incpos - 1],'current'); + removeClass(incrementals[snum][incpos], 'incremental'); + addClass(incrementals[snum][incpos],'current'); + incpos++; + } else { + incpos--; + removeClass(incrementals[snum][incpos],'current'); + addClass(incrementals[snum][incpos], 'incremental'); + addClass(incrementals[snum][incpos - 1],'current'); + } +} + +function toggle() { + var slideColl = GetElementsWithClassName('*','slide'); + var slides = document.getElementById('slideProj'); + var outline = document.getElementById('outlineStyle'); + if (!slides.disabled) { + slides.disabled = true; + outline.disabled = false; + s5mode = false; + fontSize('1em'); + for (var n = 0; n < smax; n++) { + var slide = slideColl[n]; + slide.style.visibility = 'visible'; + } + } else { + slides.disabled = false; + outline.disabled = true; + s5mode = true; + fontScale(); + for (var n = 0; n < smax; n++) { + var slide = slideColl[n]; + slide.style.visibility = 'hidden'; + } + slideColl[snum].style.visibility = 'visible'; + } +} + +function showHide(action) { + var obj = GetElementsWithClassName('*','hideme')[0]; + switch (action) { + case 's': obj.style.visibility = 'visible'; break; + case 'h': obj.style.visibility = 'hidden'; break; + case 'k': + if (obj.style.visibility != 'visible') { + obj.style.visibility = 'visible'; + } else { + obj.style.visibility = 'hidden'; + } + break; + } +} + +// 'keys' code adapted from MozPoint (http://mozpoint.mozdev.org/) +function keys(key) { + if (!key) { + key = event; + key.which = key.keyCode; + } + if (key.which == 84) { + toggle(); + return; + } + if (s5mode) { + switch (key.which) { + case 10: // return + case 13: // enter + if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return; + if (key.target && isParentOrSelf(key.target, 'controls')) return; + if(number != undef) { + goTo(number); + break; + } + case 32: // spacebar + case 34: // page down + case 39: // rightkey + case 40: // downkey + if(number != undef) { + go(number); + } else if (!incrementals[snum] || incpos >= incrementals[snum].length) { + go(1); + } else { + subgo(1); + } + break; + case 33: // page up + case 37: // leftkey + case 38: // upkey + if(number != undef) { + go(-1 * number); + } else if (!incrementals[snum] || incpos <= 0) { + go(-1); + } else { + subgo(-1); + } + break; + case 36: // home + goTo(0); + break; + case 35: // end + goTo(smax-1); + break; + case 67: // c + showHide('k'); + break; + } + if (key.which < 48 || key.which > 57) { + number = undef; + } else { + if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return; + if (key.target && isParentOrSelf(key.target, 'controls')) return; + number = (((number != undef) ? number : 0) * 10) + (key.which - 48); + } + } + return false; +} + +function clicker(e) { + number = undef; + var target; + if (window.event) { + target = window.event.srcElement; + e = window.event; + } else target = e.target; + if (target.href != null || hasValue(target.rel, 'external') || isParentOrSelf(target, 'controls') || isParentOrSelf(target,'embed') || isParentOrSelf(target, 'object')) return true; + if (!e.which || e.which == 1) { + if (!incrementals[snum] || incpos >= incrementals[snum].length) { + go(1); + } else { + subgo(1); + } + } +} + +function findSlide(hash) { + var target = document.getElementById(hash); + if (target) { + for (var i = 0; i < slideIDs.length; i++) { + if (target.id == slideIDs[i]) return i; + } + } + return null; +} + +function slideJump() { + if (window.location.hash == null || window.location.hash == '') { + currentSlide(); + return; + } + if (window.location.hash == null) return; + var dest = null; + dest = findSlide(window.location.hash.slice(1)); + if (dest == null) { + dest = 0; + } + go(dest - snum); +} + +function fixLinks() { + var thisUri = window.location.href; + thisUri = thisUri.slice(0, thisUri.length - window.location.hash.length); + var aelements = document.getElementsByTagName('A'); + for (var i = 0; i < aelements.length; i++) { + var a = aelements[i].href; + var slideID = a.match('\#.+'); + if ((slideID) && (slideID[0].slice(0,1) == '#')) { + var dest = findSlide(slideID[0].slice(1)); + if (dest != null) { + if (aelements[i].addEventListener) { + aelements[i].addEventListener("click", new Function("e", + "if (document.getElementById('slideProj').disabled) return;" + + "go("+dest+" - snum); " + + "if (e.preventDefault) e.preventDefault();"), true); + } else if (aelements[i].attachEvent) { + aelements[i].attachEvent("onclick", new Function("", + "if (document.getElementById('slideProj').disabled) return;" + + "go("+dest+" - snum); " + + "event.returnValue = false;")); + } + } + } + } +} + +function externalLinks() { + if (!document.getElementsByTagName) return; + var anchors = document.getElementsByTagName('a'); + for (var i=0; i' + + '