--- a/clone_packages/other_packages.txt Wed Mar 24 16:42:06 2010 +0000
+++ b/clone_packages/other_packages.txt Wed Mar 24 16:43:43 2010 +0000
@@ -3,7 +3,6 @@
https://developer.symbian.org/oss/MCL/utilities
https://developer.symbian.org/sfl/API_REF/Public_API/epoc32/
https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/AgileBrowser
-https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/EUserHL
https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/route_generator
https://developer.symbian.org/oss/FCL/interim/auxiliary_tools/simulation_PSY
https://developer.symbian.org/sfl/FCL/interim/desktopsw
--- a/dbrtools/dbr/checkenv.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/checkenv.py Wed Mar 24 16:43:43 2010 +0000
@@ -17,16 +17,19 @@
def run(args):
location = '/'
-#needs a fix to scanenv for this to work...
-# if(len(args)):
-# location = args[0]
+ filtertype = ''
+#using arg for filter...not for other env
+ if(len(args)):
+ filtertype = args[0]
db = dbrenv.CreateDB(location)
local = dbrenv.DBRLocalEnv(location)
results = db.compare(local)
local.verify(results.unknown)
- results2 = db.compare(local)hg diff -U
- results2.printdetail()
- results2.printsummary()
+ results2 = db.compare(local)
+ filter = dbrenv.CreateFilter(filtertype)
+ filteredresults = filter.filter(results2)
+ filteredresults.printdetail()
+ filteredresults.printsummary()
db.update(local, results2.touched)
db.save()
--- a/dbrtools/dbr/cleanenv.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/cleanenv.py Wed Mar 24 16:43:43 2010 +0000
@@ -1,4 +1,4 @@
-# Copyright (c) 2009 Symbian Foundation Ltd
+# Copyright (c) 2009-2010 Symbian Foundation Ltd
# This component and the accompanying materials are made available
# under the terms of the License "Eclipse Public License v1.0"
# which accompanies this distribution, and is available
@@ -13,46 +13,48 @@
# Description:
# DBR cleanenv - cleans your environment
-import dbrbaseline
-import dbrpatch
+
import dbrutils
+import dbrenv
import re #temporary for dealing with patches
+import os
-def main(args):
- zippath = '/'
- if(len(args)):
- zippath = args[0]
-
- dbfilename = dbrutils.defaultdb()
- baseline = dbrbaseline.readdb(dbfilename)
- if(len(baseline ) > 0):
- env = dbrutils.scanenv()
- patches = dbrpatch.loadpatches(dbrpatch.dbrutils.patchpath())
- db = dbrpatch.createpatchedbaseline(baseline,patches)
- results = dbrpatch.newupdatedb(db,env)
- dbrutils.deletefiles(sorted(results['added']))
- required = set()
- required.update(results['removed'])
- required.update(results['changed'])
- required.update(results['untestable']) #untestable is going to be a problem...
- dbrutils.extractfiles(required, zippath)
- for name in sorted(patches):
- dbrutils.extractfromzip(required, re.sub('.txt','.zip',name),'')
-
- env = dbrutils.scanenv()
- results2 = dbrpatch.newupdatedb(db,env)
-
- baseline = dbrpatch.updatebaseline(baseline, db)
- patches = dbrpatch.updatepatches(patches, db)
-
- dbrpatch.savepatches(patches)
- dbrbaseline.writedb(baseline,dbfilename)
+def run(args):
+ zippath = '/'
+ if(len(args)):
+ zippath = args[0]
+ #This block is a cut'n'paste from checkenv...we call call that instead...
+
+ location = '/'
+#needs a fix to scanenv for this to work...
+# if(len(args)):
+# location = args[0]
+ db = dbrenv.CreateDB(location)
+ local = dbrenv.DBRLocalEnv(location)
+ results = db.compare(local)
+ local.verify(results.unknown)
+ results2 = db.compare(local)
+ db.update(local, results2.touched)
+ #cleaning
+ dbrutils.deletefiles(sorted(results2.added))
+ required = results2.changed | results2.removed
+ dbrutils.extractfiles(required, zippath)
+ #do something about the patches here...
+ print 'Need to extract the patches in a nicer manner!!!'
+ dbrutils.extractfiles(required, os.path.join(location,dbrutils.patch_path_internal()))
+
+ #scan again...create a new 'local'
+ local = dbrenv.DBRLocalEnv(location)
+ local.verify(required)
+ results3 = db.compare(local)
+ db.update(local, results3.touched)
+ db.save()
+ results3.printdetail()
+ results3.printsummary()
-def run(args):
- main(args)
def help():
print "Cleans the current environment"
--- a/dbrtools/dbr/dbrenv.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/dbrenv.py Wed Mar 24 16:43:43 2010 +0000
@@ -25,7 +25,7 @@
def CreateDB(location): #virtual constructor
print location
# print dbrutils.patch_path_internal()
- if(os.path.exists(os.path.join(location,dbrutils.defaultdb()))):
+ if(os.path.isfile(os.path.join(location,dbrutils.defaultdb()))):
# print 'loading baseline environment'
# return DBRBaselineEnv(location)
print 'loading patched baseline environment'
@@ -42,7 +42,11 @@
return DBREnv(location)
-
+#Start simple with the filtering...
+def CreateFilter(arg):
+ if(os.path.isfile(arg)):
+ return DBRFileFilter(arg)
+ return DBRFilter()
class DBREnv:
db = dict()
@@ -184,7 +188,7 @@
#load up patches...
if(len(self.db) > 0):
self.baseline = self.db
- self.patches = dbrpatch.loadpatches(os.path.join(self.location,dbrutils.patchpath()))
+ self.patches = dbrpatch.loadpatches(os.path.join(self.location,dbrutils.patch_path_internal()))
self.db = dbrpatch.createpatchedbaseline(self.baseline,self.patches)
def save(self):
@@ -230,4 +234,26 @@
print 'status: dirty'
else:
print 'status: clean'
-
\ No newline at end of file
+
+
+
+class DBRFilter:
+ info = ''
+ def __init__(self):
+ self.info = 'null filter'
+ def filter(self, results):
+ return results
+
+class DBRFileFilter (DBRFilter):
+ filename = ''
+ def __init__(self, filename):
+ DBRFilter.__init__(self)
+ self.info = 'file filter'
+ self.filename = filename
+ self.files = dbrutils.readfilenamesfromfile(self.filename)
+# for file in sorted(self.files):
+# print file
+
+ def filter(self, results):
+ return DBRCompResults(results.added & self.files, results.removed & self.files, results.touched & self.files, results.changed & self.files, results.unknown & self.files)
+
--- a/dbrtools/dbr/dbrpatch.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/dbrpatch.py Wed Mar 24 16:43:43 2010 +0000
@@ -222,15 +222,14 @@
return db
def listpatches():
- path = dbrutils.patchpath()
- patchfiles = glob.glob('%spatch*.txt' % path)
+ patchfiles = glob.glob(os.path.join(dbrutils.patchpath(),'patch*.txt'))
print 'Installed patches'
for file in patchfiles:
print '\t%s' % re.sub('.txt','',os.path.basename(file))
def removepatch(patch):
path = dbrutils.patchpath()
- file = '%s%s%s' %(path,patch,'.txt')
+ file = os.path.join(path,'%s.txt' % patch)
files = set()
files.add(file)
dbrutils.deletefiles(files)
@@ -238,7 +237,7 @@
def loadpatches(path):
patches = dict()
- patchfiles = glob.glob('%spatch*.txt' % path)
+ patchfiles = glob.glob(os.path.join(path,'patch*.txt'))
for file in patchfiles:
print 'Loading patch: %s' % re.sub('.txt','',os.path.basename(file))
--- a/dbrtools/dbr/dbrutils.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/dbrutils.py Wed Mar 24 16:43:43 2010 +0000
@@ -258,3 +258,22 @@
# env[fn] = data
print '\n'
return env
+
+def readfilenamesfromfile(filename):
+ files = set()
+ f = open(filename, 'r')
+
+ fixpath = re.compile('\\\\')
+ leadingslash = re.compile('^%s' % fixpath.sub('/',epocroot()))
+ newline = re.compile('\n')
+ epoc32 = re.compile('^epoc32');
+ trailingtab = re.compile('\t\d+') #normally in rombuild files...
+ for line in f:
+ line = newline.sub('',line)
+ name = string.lower(leadingslash.sub('',fixpath.sub('/',line)))
+ if(epoc32.search(name)):
+ name = trailingtab.sub('',name)
+ files.add(name)
+ f.close()
+ return files
+
--- a/dbrtools/dbr/diffenv.py Wed Mar 24 16:42:06 2010 +0000
+++ b/dbrtools/dbr/diffenv.py Wed Mar 24 16:43:43 2010 +0000
@@ -17,17 +17,22 @@
def run(args):
if(len(args)):
+ filtertype = ''
if(len(args) == 1):
first = '/'
second = args[0]
else:
first = args[0]
- second = args[1]
+ second = args[1]
+ if(len(args) == 3):
+ filtertype = args[2]
db1=dbrenv.CreateDB(first)
db2=dbrenv.CreateDB(second)
results = db1.compare(db2)
- results.printdetail()
- results.printsummary()
+ filter = dbrenv.CreateFilter(filtertype)
+ filteredresults = filter.filter(results)
+ filteredresults.printdetail()
+ filteredresults.printsummary()
else:
help()
--- a/downloadkit/downloadkit.py Wed Mar 24 16:42:06 2010 +0000
+++ b/downloadkit/downloadkit.py Wed Mar 24 16:43:43 2010 +0000
@@ -26,7 +26,7 @@
import hashlib
import xml.etree.ElementTree as ET
-version = '0.14'
+version = '0.16'
user_agent = 'downloadkit.py script v' + version
headers = { 'User-Agent' : user_agent }
top_level_url = "https://developer.symbian.org"
@@ -380,11 +380,14 @@
urlbase = top_level_url + '/main/tools_and_kits/downloads/'
viewid = 5 # default to Symbian^3
- if version[0] == 2:
+ if version[0] == '2':
viewid= 1 # Symbian^2
- if version[0] == 3:
+ if version[0] == '3':
viewid= 5 # Symbian^3
- url = urlbase + ('view.php?id=%d'% viewid) + 'vId=' + version
+ url = urlbase + ('view.php?id=%d'% viewid)
+ if len(version) > 1:
+ # single character version means "give me the latest"
+ url = url + '&vId=' + version
req = urllib2.Request(url, None, headers)
response = urllib2.urlopen(req)
@@ -406,11 +409,18 @@
# check that this is the right version
match = re.search('Platform Release (\(Public\) )?v(\d\.\d\.[0-9a-z]+)', doc, re.IGNORECASE)
- if match and match.group(2) != version:
- print "*** ERROR: version %s is not available" % version
- print "*** the website is offering version %s instead" % match.group(1)
+ if not match:
+ print "*** ERROR: no version information in the download page"
return 0
+ if len(version) > 1:
+ if match.group(2) != version:
+ print "*** ERROR: version %s is not available" % version
+ print "*** the website is offering version %s instead" % match.group(2)
+ return 0
+ else:
+ print "The latest version of Symbian^%s is PDK %s" % (version, match.group(2))
+
# let's hope the HTML format never changes...
# <a href='download.php?id=27&cid=60&iid=270' title='src_oss_mw.zip'> ...</a>
threadlist = []
@@ -424,7 +434,10 @@
continue # no snapshots of Mercurial source thanks...
if options.nowinscw and re.search(r"winscw", filename) :
continue # no winscw emulator...
-
+ if options.noarmv5 and re.search(r"armv5", filename) :
+ continue # no armv5 emulator...
+ if options.noarmv5 and options.nowinscw and re.search(r"binaries_epoc.zip|binaries_epoc_sdk", filename) :
+ continue # skip binaries_epoc and binaries_sdk ...
if download_file(filename, downloadurl) != True :
continue # download failed
@@ -453,6 +466,8 @@
help="Don't download any of the source code available directly from Mercurial")
parser.add_option("--nowinscw", action="store_true", dest="nowinscw",
help="Don't download the winscw emulator")
+parser.add_option("--noarmv5", action="store_true", dest="noarmv5",
+ help="Don't download the armv5 binaries")
parser.add_option("--nounzip", action="store_true", dest="nounzip",
help="Just download, don't unzip or delete any files")
parser.add_option("--nodelete", action="store_true", dest="nodelete",
@@ -471,6 +486,7 @@
dryrun=False,
nosrc=False,
nowinscw=False,
+ noarmv5=False,
nounzip=False,
nodelete=False,
progress=False,
@@ -482,7 +498,7 @@
(options, args) = parser.parse_args()
if len(args) != 1:
- parser.error("Must supply a PDK version, e.g. 3.0.f")
+ parser.error("Must supply a PDK version, e.g. 3 or 3.0.h")
if not check_unzip_environment() :
parser.error("Unable to execute 7z command")
--- a/uh_parser/releaseables.pm Wed Mar 24 16:42:06 2010 +0000
+++ b/uh_parser/releaseables.pm Wed Mar 24 16:43:43 2010 +0000
@@ -228,26 +228,61 @@
my $layer = $1;
my $package = $2;
- mkdir("$::basedir/releaseables/$layer");
- mkdir("$::basedir/releaseables/$layer/$package");
+ mkdir("$::releaseablesdir/$layer");
+ mkdir("$::releaseablesdir/$layer/$package");
- my $filename = "$::basedir/releaseables/$layer/$package/info.tsv";
+ my $filename = "$::releaseablesdir/$layer/$package/info.tsv";
+ my $filenamemissing = "$::raptorbitsdir/$layer\_$package\_missing.txt" if ($::missing);
print "Writing info file $filename\n" if (!-f$filename);
open(FILE, ">>$filename");
+ open(MISSING, ">>$filenamemissing");
for my $filetype (keys %{$whatlog_info->{$bldinf}->{$config}})
{
for (sort(@{$whatlog_info->{$bldinf}->{$config}->{$filetype}}))
{
print FILE "$_\t$filetype\t$config\n";
+ my $file = $_;
+
+ if($::missing && !-f $file)
+ {
+ print MISSING $file."\n";
+ }
}
}
+ close(FILE);
+ close(MISSING) if ($::missing);
- close(FILE);
}
}
}
+sub remove_missing_duplicates
+{
+ opendir(DIR, $::raptorbitsdir);
+ my @files = grep((-f "$::raptorbitsdir/$_" && $_ !~ /^\.\.?$/ && $_ =~ /_missing\.txt$/), readdir(DIR));
+ close(DIR);
+
+ for my $file (@files)
+ {
+ open(FILE, "+<$::raptorbitsdir/$file");
+ print "working on $file\n";
+
+ # Read it
+ my @content = <FILE>;
+
+ # Sort it, and grep to remove duplicates
+ my $previous = "\n\n";
+ @content = grep {$_ ne $previous && ($previous = $_, 1) } sort @content;
+
+ # Write it
+ seek(FILE, 0, 0);
+ print FILE @content;
+ truncate(FILE,tell(FILE));
+
+ close(FILE);
+ }
+}
sub normalize_filepath
{
--- a/uh_parser/uh.pl Wed Mar 24 16:42:06 2010 +0000
+++ b/uh_parser/uh.pl Wed Mar 24 16:43:43 2010 +0000
@@ -20,6 +20,7 @@
use RaptorInfo;
use RaptorUnreciped;
use RaptorRecipe;
+use releaseables;
use XML::SAX;
use RaptorSAXHandler;
@@ -30,10 +31,13 @@
our $raptorbitsdir = 'raptorbits';
our $basedir = '';
my $outputdir = "html";
+our $releaseablesdir = "releaseables";
our $raptor_config = 'dummy_config';
our $current_log_file = '';
+our $missing = 0;
my $help = 0;
GetOptions((
+ 'missing!' => \$missing,
'basedir=s' => \$basedir,
'help!' => \$help
));
@@ -46,7 +50,9 @@
print "Unite and HTML-ize Raptor log files.\n";
print "Usage: perl uh.pl [OPTIONS] FILE1 FILE2 ...\n";
print "where OPTIONS are:\n";
- print "\t--basedir=DIR Generate output under DIR (defaults to current dir)\n";
+ print "\t-m, --missing\tAlso add the list of missing binaries (Raptor log should include whatlog info).\n";
+ print "\t\t\tCheck is done against the epoc tree at the root of the current drive\n";
+ print "\t-b DIR, --basedir DIR\tGenerate output under DIR (defaults to current dir)\n";
exit(0);
}
@@ -54,6 +60,7 @@
{
$raptorbitsdir = "$basedir/raptorbits";
$outputdir = "$basedir/html";
+ $releaseablesdir = "$basedir/releaseables";
}
mkdir($basedir) if (!-d$basedir);
@@ -62,6 +69,8 @@
system("rmdir /S /Q $raptorbitsdir") if (-d $raptorbitsdir);
mkdir($raptorbitsdir);
#print "Created dir $raptorbitsdir.\n";
+system("rmdir /S /Q $releaseablesdir") if (-d $releaseablesdir);
+mkdir("$releaseablesdir");
our $failure_item_number = 0;
@@ -69,12 +78,14 @@
open(SUMMARY, ">$raptorbitsdir/summary.csv");
close(SUMMARY);
+
my $saxhandler = RaptorSAXHandler->new();
$saxhandler->add_observer('RaptorError', $RaptorError::reset_status);
$saxhandler->add_observer('RaptorWarning', $RaptorWarning::reset_status);
$saxhandler->add_observer('RaptorInfo', $RaptorInfo::reset_status);
$saxhandler->add_observer('RaptorUnreciped', $RaptorUnreciped::reset_status);
$saxhandler->add_observer('RaptorRecipe', $RaptorRecipe::reset_status);
+$saxhandler->add_observer('releaseables', $releaseables::reset_status);
our $allbldinfs = {};
@@ -86,6 +97,8 @@
$parser->parse_uri($_);
}
+releaseables::remove_missing_duplicates();
+
my @allpackages = distinct_packages($allbldinfs);
print "Generating HTML...\n";
@@ -100,6 +113,7 @@
my $general_failures_by_category_severity = {};
my $recipe_failures_num_by_severity = {};
my $recipe_failures_by_package_severity = {};
+my $missing_by_package = {};
#my $severities = {};
my @severities = ('critical', 'major', 'minor', 'unknown');
@@ -235,13 +249,14 @@
print AGGREGATED "<table border='1'>\n";
$tableheader = "<tr><th>package</th>";
for (@severities) { $tableheader .= "<th>$_</th>"; }
+$tableheader .= "<th>missing</th>" if ($missing);
$tableheader .= "</tr>";
print AGGREGATED "$tableheader\n";
for my $package (@allpackages)
{
- if (defined $recipe_failures_num_by_severity->{$package})
+ my $mustlink = print_package_specific_summary($package);
+ if ($mustlink)
{
- print_package_specific_summary($package, $recipe_failures_by_package_severity->{$package});
my $packagesummaryhtml = $package;
$packagesummaryhtml =~ s,/,_,;
$packagesummaryhtml .= ".html";
@@ -252,6 +267,7 @@
$failuresbyseverity = $recipe_failures_num_by_severity->{$package}->{$_} if (defined $recipe_failures_num_by_severity->{$package}->{$_});
$packageline .= "<td>$failuresbyseverity</td>";
}
+ $packageline .= "<td>".$missing_by_package->{$package}."</td>" if ($missing);
$packageline .= "</tr>";
print AGGREGATED "$packageline\n";
}
@@ -310,42 +326,99 @@
sub print_package_specific_summary
{
- my ($package, $failures_by_severity) = @_;
+ my ($package) = @_;
+
+ my $anyfailures = 0;
my $filenamebase = $package;
$filenamebase =~ s,/,_,;
- open(SPECIFIC, ">$outputdir/$filenamebase.html");
- print SPECIFIC "FAILURES FOR PACKAGE $package<br/>\n";
-
- for my $severity (@severities)
+ if (defined $recipe_failures_by_package_severity->{$package})
{
- if (defined $failures_by_severity->{$severity})
- {
- print SPECIFIC "<br/>".uc($severity)."<br/>\n";
- print SPECIFIC "<table border='1'>\n";
- # $subcategory, $severity, $mmp, $phase, $recipe, $file, $line
- my $tableheader = "<tr><th>category</th><th>configuration</th><th>mmp</th><th>phase</th><th>recipe</th><th>log snippet</th></tr>";
- print SPECIFIC "$tableheader\n";
+ $anyfailures = 1;
+
+ my $failures_by_severity = $recipe_failures_by_package_severity->{$package};
+
+ open(SPECIFIC, ">$outputdir/$filenamebase.html");
+ print SPECIFIC "FAILURES FOR PACKAGE $package<br/>\n";
- for my $failure (@{$failures_by_severity->{$severity}})
+ for my $severity (@severities)
+ {
+ if (defined $failures_by_severity->{$severity})
{
- my $failureline = "<tr><td>$failure->{subcategory}</td>";
- $failureline .= "<td>$failure->{config}</td>";
- $failureline .= "<td>$failure->{mmp}</td>";
- $failureline .= "<td>$failure->{phase}</td>";
- $failureline .= "<td>$failure->{recipe}</td>";
- $failureline .= "<td><a href='$filenamebase\_failures.html#failure_item_$failure->{linenum}'>item $failure->{linenum}</a></td>";
- $failureline .= "</tr>";
- print SPECIFIC "$failureline\n";
+ print SPECIFIC "<br/>".uc($severity)."<br/>\n";
+ print SPECIFIC "<table border='1'>\n";
+ # $subcategory, $severity, $mmp, $phase, $recipe, $file, $line
+ my $tableheader = "<tr><th>category</th><th>configuration</th><th>mmp</th><th>phase</th><th>recipe</th><th>log snippet</th></tr>";
+ print SPECIFIC "$tableheader\n";
+
+ for my $failure (@{$failures_by_severity->{$severity}})
+ {
+ my $failureline = "<tr><td>$failure->{subcategory}</td>";
+ $failureline .= "<td>$failure->{config}</td>";
+ $failureline .= "<td>$failure->{mmp}</td>";
+ $failureline .= "<td>$failure->{phase}</td>";
+ $failureline .= "<td>$failure->{recipe}</td>";
+ $failureline .= "<td><a href='$filenamebase\_failures.html#failure_item_$failure->{linenum}'>item $failure->{linenum}</a></td>";
+ $failureline .= "</tr>";
+ print SPECIFIC "$failureline\n";
+ }
+
+ print SPECIFIC "</table>\n";
+ print SPECIFIC "<br/>\n";
}
+ }
+ close(SPECIFIC);
+ }
+
+ if ($missing)
+ {
+ my $missinglistfile = $package;
+ $missinglistfile =~ s,/,_,;
+ $missinglistfile .= "_missing.txt";
+ if (open(MISSINGLIST, "$::raptorbitsdir/$missinglistfile"))
+ {
+ my @list = ();
+ while(<MISSINGLIST>)
+ {
+ my $missingfile = $_;
+ chomp $missingfile;
+ $missingfile =~ s,^\s+,,g;
+ $missingfile =~ s,\s+$,,g;
+ push(@list, $missingfile);
+ }
+ close(MISSINGLIST);
- print SPECIFIC "</table>\n";
- print SPECIFIC "<br/>\n";
+ $missing_by_package->{$package} = scalar(@list);
+
+ if ($missing_by_package->{$package} > 0)
+ {
+ open(SPECIFIC, ">>$outputdir/$filenamebase.html");
+ print SPECIFIC "FAILURES FOR PACKAGE $package<br/>\n" if(!$anyfailures);
+
+ $anyfailures = 1;
+
+ print SPECIFIC "<br/>MISSING<br/>\n";
+ print SPECIFIC "<table border='1'>\n";
+ # $subcategory, $severity, $mmp, $phase, $recipe, $file, $line
+ my $tableheader = "<tr><th>file</th></tr>\n";
+ print SPECIFIC "$tableheader\n";
+
+ for my $missingfile (sort {$a cmp $b} @list)
+ {
+ $missingfile = CGI::escapeHTML($missingfile);
+ print SPECIFIC "<tr><td>$missingfile</td></tr>\n";
+ }
+
+ print SPECIFIC "</table>\n";
+ print SPECIFIC "<br/>\n";
+
+ close(SPECIFIC);
+ }
}
}
- close(SPECIFIC);
+ return $anyfailures;
}
sub translate_detail_files_to_html