587
|
1 |
#============================================================================
|
|
2 |
#Name : mappers.py
|
|
3 |
#Part of : Helium
|
|
4 |
|
|
5 |
#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
6 |
#All rights reserved.
|
|
7 |
#This component and the accompanying materials are made available
|
|
8 |
#under the terms of the License "Eclipse Public License v1.0"
|
|
9 |
#which accompanies this distribution, and is available
|
|
10 |
#at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
11 |
#
|
|
12 |
#Initial Contributors:
|
|
13 |
#Nokia Corporation - initial contribution.
|
|
14 |
#
|
|
15 |
#Contributors:
|
|
16 |
#
|
|
17 |
#Description:
|
|
18 |
#===============================================================================
|
|
19 |
|
|
20 |
""" Archive mappers that map how the input files are divided into archives.
|
|
21 |
|
|
22 |
|
|
23 |
"""
|
|
24 |
|
|
25 |
import buildtools
|
|
26 |
import os
|
|
27 |
import codecs
|
|
28 |
import fileutils
|
|
29 |
import logging
|
|
30 |
import symrec
|
|
31 |
import re
|
|
32 |
import csv
|
|
33 |
import shutil
|
|
34 |
|
|
35 |
_logger = logging.getLogger('logger.mappers')
|
|
36 |
_logger.setLevel(logging.INFO)
|
|
37 |
|
|
38 |
# Default value for missing/invalid policy files.
|
|
39 |
MISSING_POLICY = "9999"
|
|
40 |
|
|
41 |
|
|
42 |
class Mapper(object):
|
|
43 |
""" Mapper Abstract class. Any custom implementation must derive it!.
|
|
44 |
|
|
45 |
It handles metadata creation.
|
|
46 |
"""
|
|
47 |
|
|
48 |
def __init__(self, config, tool):
|
|
49 |
self._tool = tool
|
|
50 |
self._config = config
|
|
51 |
self._metadata = None
|
|
52 |
if not os.path.exists(self._config['archives.dir']):
|
|
53 |
os.makedirs(self._config['archives.dir'])
|
|
54 |
if self._config.has_key("grace.metadata") and self._config.get_boolean("grace.metadata", False):
|
|
55 |
if self._config.has_key("grace.template") and os.path.exists(self._config["grace.template"]) and \
|
|
56 |
not os.path.exists(os.path.join(self._config['archives.dir'], self._config['name'] + ".metadata.xml")):
|
|
57 |
shutil.copy(config["grace.template"], os.path.join(self._config['archives.dir'], self._config['name'] + ".metadata.xml"))
|
|
58 |
self._metadata = symrec.ReleaseMetadata(os.path.join(self._config['archives.dir'], self._config['name']+ ".metadata.xml"),
|
|
59 |
service=self._config['grace.service'],
|
|
60 |
product=self._config['grace.product'],
|
|
61 |
release=self._config['grace.release'])
|
|
62 |
self._metadata.save()
|
|
63 |
|
|
64 |
def declare_package(self, filename, extract="single"):
|
|
65 |
""" Add a package to the metadata file. """
|
|
66 |
if self._metadata is None:
|
|
67 |
return
|
|
68 |
self._metadata.add_package(os.path.basename(filename), extract=extract, filters=self._config.get_list('grace.filters', None), default=self._config.get_boolean('grace.default', True))
|
|
69 |
self._metadata.save()
|
|
70 |
|
|
71 |
def create_commands(self, manifest):
|
|
72 |
""" Return a list of command list. """
|
|
73 |
return [[self._tool.create_command(self._config['name'], manifests=[manifest])]]
|
|
74 |
|
|
75 |
|
|
76 |
class DefaultMapper(Mapper):
|
|
77 |
""" The default mapper. It splits the content based on size characteristics.
|
|
78 |
|
|
79 |
'the max.files.per.archive' and 'max.uncompressed.size' properties define how the input files
|
|
80 |
are split between a number of part zips.
|
|
81 |
"""
|
|
82 |
def __init__(self, config, archiver):
|
|
83 |
""" Initialization. """
|
|
84 |
Mapper.__init__(self, config, archiver)
|
|
85 |
|
|
86 |
def create_commands(self, manifest):
|
|
87 |
""" Return a list of command lists. """
|
|
88 |
result = []
|
|
89 |
|
|
90 |
_logger.info(" * Input manifest: " + manifest)
|
|
91 |
manifests = self._split_manifest_file(self._config['name'], manifest)
|
|
92 |
if not os.path.exists(self._config['archives.dir']):
|
|
93 |
_logger.info(" * Mkdir " + self._config['archives.dir'])
|
|
94 |
os.makedirs(self._config['archives.dir'])
|
|
95 |
|
|
96 |
for manifest in manifests:
|
|
97 |
_logger.info(" * Creating command for manifest: " + manifest)
|
|
98 |
filename = os.path.join(self._config['archives.dir'], os.path.splitext(os.path.basename(manifest))[0])
|
|
99 |
if len(manifests) == 1:
|
|
100 |
filename = os.path.join(self._config['archives.dir'], self._config['name'])
|
|
101 |
_logger.info(" * " + filename + self._tool.extension())
|
|
102 |
self.declare_package(filename + self._tool.extension(), self._config.get('grace.extract', 'single'))
|
|
103 |
result.extend(self._tool.create_command(self._config.get('zip.root.dir', self._config['root.dir']), filename, manifests=[manifest]))
|
|
104 |
|
|
105 |
return [result]
|
|
106 |
|
|
107 |
def _split_manifest_file(self, name, manifest_file_path):
|
|
108 |
""" This method return a list of files that contain the content of the zip parts to create. """
|
|
109 |
filenames = []
|
|
110 |
|
|
111 |
if (self._config.has_key('max.files.per.archive') or self._config.has_key('max.uncompressed.size')):
|
|
112 |
size = 0
|
|
113 |
files = 0
|
|
114 |
part = 0
|
|
115 |
filename = ""
|
|
116 |
output = None
|
|
117 |
|
|
118 |
if os.path.exists(self._config['root.dir']) and os.path.isdir(self._config['root.dir']):
|
|
119 |
curdir = os.path.abspath(os.curdir)
|
|
120 |
os.chdir(self._config.get('zip.root.dir', self._config['root.dir']))
|
|
121 |
maxfiles = self._config.get('max.files.per.archive', 100000000)
|
|
122 |
_logger.info("Max number of files per archive: " + str(maxfiles))
|
|
123 |
max_uncompressed_size = self._config.get('max.uncompressed.size', 100000000)
|
|
124 |
_logger.info("Max uncompressed size per archive: " + str(max_uncompressed_size))
|
|
125 |
|
|
126 |
file_handle = codecs.open(manifest_file_path, "r", "utf-8" )
|
|
127 |
|
|
128 |
for line in file_handle.readlines():
|
|
129 |
line = line.rstrip()
|
|
130 |
|
|
131 |
if(os.path.isfile(line)):
|
|
132 |
if part == 0 or files == int(maxfiles) or size + os.path.getsize(line) >= int(max_uncompressed_size):
|
|
133 |
if output != None:
|
|
134 |
output.close()
|
|
135 |
|
|
136 |
size = 0
|
|
137 |
files = 0
|
|
138 |
part += 1
|
|
139 |
|
|
140 |
filename = "%s_part%02d" % (name, part)
|
|
141 |
filenames.append(os.path.join(self._config['temp.build.dir'], filename + ".txt"))
|
|
142 |
|
|
143 |
output = codecs.open(os.path.join(self._config['temp.build.dir'], filename + ".txt"), 'w', "utf-8" )
|
|
144 |
|
|
145 |
files += 1
|
|
146 |
size += os.path.getsize(line)
|
|
147 |
|
|
148 |
output.write(u"".join([line, u'\n']))
|
|
149 |
elif(os.path.isdir(line)):
|
|
150 |
if (len(os.listdir(line)) == 0):
|
|
151 |
if part == 0 or files == int(maxfiles):
|
|
152 |
if output != None:
|
|
153 |
output.close()
|
|
154 |
|
|
155 |
size = 0
|
|
156 |
files = 0
|
|
157 |
part += 1
|
|
158 |
|
|
159 |
filename = "%s_part%02d" % (name, part)
|
|
160 |
filenames.append(os.path.join(self._config['temp.build.dir'], filename + ".txt"))
|
|
161 |
|
|
162 |
output = open(os.path.abspath(os.path.join(self._config['temp.build.dir'], filename + ".txt")), 'w')
|
|
163 |
|
|
164 |
files += 1
|
|
165 |
|
|
166 |
output.write(u"".join([line, u'\n']))
|
|
167 |
else:
|
|
168 |
_logger.warning('Not recognized as file or directory: %s' % line)
|
|
169 |
|
|
170 |
if output != None:
|
|
171 |
output.close()
|
|
172 |
|
|
173 |
file_handle.close()
|
|
174 |
os.chdir(curdir)
|
|
175 |
else:
|
|
176 |
filenames.append(manifest_file_path)
|
|
177 |
|
|
178 |
return filenames
|
|
179 |
|
|
180 |
|
|
181 |
class PolicyMapper(Mapper):
|
|
182 |
""" Implements a policy content mapper.
|
|
183 |
|
|
184 |
It transforms a list of files into a list of commands with their inputs.
|
|
185 |
All files with policy 0 will be under the main archive.
|
|
186 |
All other files will get backed up by policy and then store into an second archive.
|
|
187 |
"""
|
|
188 |
|
|
189 |
def __init__(self, config, archiver):
|
|
190 |
""" Initialization. """
|
|
191 |
Mapper.__init__(self, config, archiver)
|
|
192 |
self._policies = {}
|
|
193 |
self._policy_cache = {}
|
|
194 |
self._binary = {}
|
|
195 |
# Load csv
|
|
196 |
if self._config.has_key('policy.csv'):
|
|
197 |
if os.path.exists(self._config['policy.csv']):
|
|
198 |
self.load_policy_binary(self._config['policy.csv'])
|
|
199 |
else:
|
|
200 |
_logger.error("POLICY_ERROR: File not found '%s'." % self._config['policy.csv'])
|
|
201 |
|
|
202 |
def load_policy_binary(self, csvfile, column=1):
|
|
203 |
""" Loads the binary IDs from the CSV file. """
|
|
204 |
_logger.info("POLICY_INFO: Loading policy definition '%s'." % csvfile)
|
|
205 |
reader = csv.reader(open(csvfile, "rU"))
|
|
206 |
for row in reader:
|
|
207 |
if re.match(r"^((?:\d+)|(?:0842[0-9a-zA-Z]{3}))$", row[0].strip()):
|
|
208 |
_logger.info("POLICY_INFO: Adding policy: '%s' => '%s'" % (row[0].strip(), row[column].strip().lower()))
|
|
209 |
self._binary[row[0].strip()] = row[column].strip().lower()
|
|
210 |
else:
|
|
211 |
_logger.warning("POLICY_WARNING: Discarding policy: '%s'." % row[0].strip())
|
|
212 |
|
|
213 |
def zip2zip(self):
|
|
214 |
""" Should the non public zip be zipped up under a specific zip. """
|
|
215 |
return self._config.get_boolean('policy.zip2zip', False)
|
|
216 |
|
|
217 |
def create_commands(self, manifest):
|
|
218 |
""" Generates a list of build commands. """
|
|
219 |
result = []
|
|
220 |
stages = []
|
|
221 |
|
|
222 |
# Create the archive output directory
|
|
223 |
if not os.path.exists(self._config['archives.dir']):
|
|
224 |
_logger.info(" * Mkdir " + self._config['archives.dir'])
|
|
225 |
os.makedirs(self._config['archives.dir'])
|
|
226 |
|
|
227 |
# Sort the manifest content, splitting it by policy
|
|
228 |
file_handle = codecs.open(manifest, "r", "utf-8")
|
|
229 |
for line in file_handle.readlines():
|
|
230 |
line = line.rstrip()
|
|
231 |
self._sort_by_policy(line)
|
|
232 |
file_handle.close()
|
|
233 |
|
|
234 |
# Generating sublists.
|
|
235 |
for key in self._policies.keys():
|
|
236 |
self._policies[key].close()
|
|
237 |
manifest = os.path.join(self._config['temp.build.dir'], self._config['name'] + "_%s" % key + ".txt")
|
|
238 |
filename = os.path.join(self._config['archives.dir'], self._config['name'] + "_%s" % key)
|
|
239 |
_logger.info(" * " + filename + self._tool.extension())
|
|
240 |
result.extend(self._tool.create_command(self._config.get('zip.root.dir', self._config['root.dir']), filename, manifests=[manifest]))
|
|
241 |
stages.append(result)
|
|
242 |
|
|
243 |
# See if any internal archives need to be created
|
|
244 |
content = []
|
|
245 |
for key in self._policies.keys():
|
|
246 |
if not self.zip2zip():
|
|
247 |
self.declare_package(self._config['name'] + "_%s" % key + self._tool.extension())
|
|
248 |
else:
|
|
249 |
if key != "0":
|
|
250 |
content.append(os.path.join(self._config['archives.dir'], self._config['name'] + "_%s" % key + self._tool.extension()))
|
|
251 |
else:
|
|
252 |
self.declare_package(self._config['name'] + "_%s" % key + self._tool.extension())
|
|
253 |
|
|
254 |
# Creating zip that contains each policy zips.
|
|
255 |
if self.zip2zip() and len(content) > 0:
|
|
256 |
manifest = os.path.join(self._config['temp.build.dir'], self._config['name'] + ".internal.txt")
|
|
257 |
file_handle = codecs.open( manifest, "w+", "utf-8" )
|
|
258 |
file_handle.write(u"\n".join(content))
|
|
259 |
file_handle.close()
|
|
260 |
internal = "internal"
|
|
261 |
if self._config.has_key('policy.internal.name'):
|
|
262 |
internal = self._config['policy.internal.name']
|
|
263 |
filename = os.path.join(self._config['archives.dir'], self._config['name'] + "_" + internal)
|
|
264 |
_logger.info(" * " + filename + self._tool.extension())
|
|
265 |
self.declare_package(filename + self._tool.extension(), "double")
|
|
266 |
stages.append(self._tool.create_command(self._config['archives.dir'], filename, manifests=[manifest]))
|
|
267 |
|
|
268 |
cmds = []
|
|
269 |
for filename in content:
|
|
270 |
cmds.append(buildtools.Delete(filename=filename))
|
|
271 |
stages.append(cmds)
|
|
272 |
return stages
|
|
273 |
|
|
274 |
def get_dir_policy(self, dirname):
|
|
275 |
""" Get policy value for a specific directory. """
|
|
276 |
dirname = os.path.normpath(dirname)
|
|
277 |
if not self._policy_cache.has_key(dirname):
|
|
278 |
policyfile = None
|
|
279 |
for name in self.get_policy_filenames():
|
|
280 |
if os.sep != '\\':
|
|
281 |
for filename in os.listdir(dirname):
|
|
282 |
if filename.lower() == name.lower():
|
|
283 |
policyfile = os.path.join(dirname, filename)
|
|
284 |
break
|
|
285 |
elif os.path.exists(os.path.join(dirname, name)):
|
|
286 |
policyfile = os.path.join(dirname, name)
|
|
287 |
break
|
|
288 |
|
|
289 |
value = self._config.get('policy.default.value', MISSING_POLICY)
|
|
290 |
if policyfile != None: #policy file present
|
|
291 |
try:
|
|
292 |
value = fileutils.read_policy_content(policyfile)
|
|
293 |
if value not in self._binary.keys(): #check policy file is valid
|
|
294 |
_logger.error("POLICY_ERROR: policy file found %s but policy %s value not exists in csv" % (policyfile, value))
|
628
|
295 |
except IOError, exc:
|
587
|
296 |
_logger.error("POLICY_ERROR: %s" % exc)
|
|
297 |
value = self._config.get('policy.default.value', MISSING_POLICY)
|
|
298 |
else: #no policy file present
|
|
299 |
filePresent = False
|
|
300 |
dirPresent = False
|
|
301 |
for ftype in os.listdir(dirname): #see if files or directories are present
|
|
302 |
if os.path.isdir(os.path.join(dirname, ftype)):
|
|
303 |
dirPresent = True
|
|
304 |
if os.path.isfile(os.path.join(dirname, ftype)):
|
|
305 |
filePresent = True
|
|
306 |
|
|
307 |
if filePresent: #files present : error
|
|
308 |
_logger.error("POLICY_ERROR: could not find a policy file under: '%s'" % dirname)
|
|
309 |
elif dirPresent and not filePresent: #directories only : warning
|
|
310 |
_logger.error("POLICY_WARNING: no policy file, no files present, but sub-folder present in : '%s'" % dirname)
|
|
311 |
else: #no files no dirs : warning
|
|
312 |
_logger.error("POLICY_WARNING: empty directory at : '%s'" % dirname)
|
|
313 |
|
|
314 |
# saving the policy value for that directory.
|
|
315 |
self._policy_cache[dirname] = value
|
|
316 |
return self._policy_cache[dirname]
|
|
317 |
|
|
318 |
def get_policy_filenames(self):
|
|
319 |
""" Returns the list of potential policy filenames. """
|
|
320 |
return self._config.get_list('policy.filenames', ['Distribution.policy.s60'])
|
|
321 |
|
|
322 |
def _sort_by_policy(self, filename):
|
|
323 |
""" Store the input file sorted by its policy number. """
|
|
324 |
path = os.path.join(self._config['root.dir'], filename)
|
|
325 |
parentdir = os.path.dirname(path)
|
|
326 |
if os.path.isdir(path):
|
|
327 |
parentdir = path
|
|
328 |
value = self.get_dir_policy(parentdir)
|
|
329 |
if not value in self._policies:
|
|
330 |
self._policies[value] = codecs.open( os.path.join(self._config['temp.build.dir'], self._config['name'] + "_%s" % value + ".txt"), "w+", "utf-8" )
|
|
331 |
self._policies[value].write(u"%s\n" % filename)
|
|
332 |
|
|
333 |
|
|
334 |
class PolicyRemoverMapper(PolicyMapper):
|
|
335 |
""" This class implements a variant of the policy mapper.
|
|
336 |
|
|
337 |
It removes the internal source. Only binary flagged content is kept.
|
|
338 |
"""
|
|
339 |
|
|
340 |
def __init__(self, config, archiver):
|
|
341 |
""" Initialization. """
|
|
342 |
PolicyMapper.__init__(self, config, archiver)
|
|
343 |
self._rm_policy_cache = {}
|
|
344 |
|
|
345 |
def get_policy_root_dir(self):
|
|
346 |
""" Return the policy.root.dir or root.dir if not set or not under root.dir."""
|
|
347 |
if not self._config.has_key("policy.root.dir"):
|
|
348 |
return os.path.normpath(self._config['root.dir'])
|
|
349 |
else:
|
|
350 |
if fileutils.destinsrc(self._config['root.dir'], self._config['policy.root.dir']):
|
|
351 |
return os.path.normpath(self._config['policy.root.dir'])
|
|
352 |
else:
|
|
353 |
return os.path.normpath(self._config['root.dir'])
|
|
354 |
|
|
355 |
def get_rmdir_policy(self, dirname):
|
|
356 |
""" check if the directory should be dropped or not"""
|
|
357 |
dirname = os.path.normpath(dirname)
|
|
358 |
# check if parent is banned...
|
|
359 |
prootdir = os.path.normpath(self.get_policy_root_dir())
|
|
360 |
rootdir = os.path.normpath(self._config['root.dir'])
|
|
361 |
if os.sep == '\\':
|
|
362 |
dirname = dirname.lower()
|
|
363 |
prootdir = prootdir.lower()
|
|
364 |
rootdir = rootdir.lower()
|
|
365 |
|
|
366 |
# else get real value...
|
|
367 |
if not self._rm_policy_cache.has_key(dirname):
|
|
368 |
self._rm_policy_cache[dirname] = self.get_dir_policy(dirname)
|
|
369 |
|
|
370 |
return self._rm_policy_cache[dirname]
|
|
371 |
|
|
372 |
def create_commands(self, manifest):
|
|
373 |
""" Generates a list of build commands. """
|
|
374 |
stages = PolicyMapper.create_commands(self, manifest)
|
|
375 |
|
|
376 |
if not self._config.has_key('policy.csv'):
|
|
377 |
_logger.error("POLICY_ERROR: Property 'policy.csv' not defined everything will get removed.")
|
|
378 |
cmds = []
|
|
379 |
file_handle = codecs.open( manifest, "r", "utf-8" )
|
|
380 |
for line in file_handle.readlines():
|
|
381 |
line = line.rstrip()
|
|
382 |
filepath = os.path.normpath(os.path.join(self._config.get('zip.root.dir', self._config['root.dir']), line))
|
|
383 |
value = self.get_rmdir_policy(os.path.dirname(filepath))
|
|
384 |
delete = True
|
|
385 |
if value in self._binary.keys():
|
|
386 |
if self._binary[value] == "yes":
|
|
387 |
_logger.info("POLICY_INFO: Keeping %s (%s=>yes)!" % (filepath, value))
|
|
388 |
delete = False
|
|
389 |
elif self._binary[value] == "bin":
|
|
390 |
_logger.info("POLICY_INFO: Keeping %s (%s=>bin)!" % (filepath, value))
|
|
391 |
delete = False
|
|
392 |
else:
|
|
393 |
_logger.error("POLICY_ERROR: %s value for %s not in csv file. Will be removed!!" % (value, filepath))
|
|
394 |
|
|
395 |
if delete:
|
|
396 |
_logger.info("POLICY_INFO: File %s will be removed!" % filepath)
|
|
397 |
cmds.append(buildtools.Delete(filename=filepath))
|
|
398 |
file_handle.close()
|
|
399 |
if len(cmds) > 0:
|
|
400 |
stages.append(cmds)
|
|
401 |
return stages
|
|
402 |
|
|
403 |
|
|
404 |
class SFPolicyRemoverMapper(PolicyRemoverMapper):
|
|
405 |
""" Implement an SFL column based policy remover. """
|
|
406 |
|
|
407 |
def __init__(self, config, archiver):
|
|
408 |
""" Initialization. """
|
|
409 |
PolicyRemoverMapper.__init__(self, config, archiver)
|
|
410 |
|
628
|
411 |
def load_policy_binary(self, csvfile, column=1):
|
587
|
412 |
""" Loading the policy using the 3rd column. """
|
|
413 |
_logger.info("POLICY_INFO: Loading actions from the 3rd column")
|
|
414 |
PolicyRemoverMapper.load_policy_binary(self, csvfile, column=3)
|
|
415 |
|
|
416 |
class EPLPolicyRemoverMapper(PolicyRemoverMapper):
|
|
417 |
""" Implement an EPL column based policy remover. """
|
|
418 |
def __init__(self, config, archiver):
|
|
419 |
""" Initialization. """
|
|
420 |
PolicyRemoverMapper.__init__(self, config, archiver)
|
|
421 |
|
628
|
422 |
def load_policy_binary(self, csvfile, column=1):
|
587
|
423 |
""" Loading the policy using the 4th column. """
|
|
424 |
_logger.info("POLICY_INFO: Loading actions from the 4th column")
|
|
425 |
PolicyRemoverMapper.load_policy_binary(self, csvfile, column=4)
|
|
426 |
|
|
427 |
|
|
428 |
MAPPERS = {'default': DefaultMapper,
|
|
429 |
'policy': PolicyMapper,
|
|
430 |
'policy.remover': PolicyRemoverMapper,
|
|
431 |
'sfl.policy.remover': SFPolicyRemoverMapper,
|
|
432 |
'epl.policy.remover': EPLPolicyRemoverMapper,}
|
|
433 |
|
|
434 |
def get_mapper(name, config, archiver):
|
|
435 |
""" Get mapper instance from its string id. """
|
|
436 |
if name in MAPPERS:
|
|
437 |
return MAPPERS[name](config, archiver)
|
|
438 |
raise Exception("ERROR: Could not find mapper '%s'." % name)
|