0
|
1 |
#
|
|
2 |
# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
3 |
# All rights reserved.
|
|
4 |
# This component and the accompanying materials are made available
|
|
5 |
# under the terms of "Eclipse Public License v1.0"
|
|
6 |
# which accompanies this distribution, and is available
|
|
7 |
# at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
8 |
#
|
|
9 |
# Initial Contributors:
|
|
10 |
# Nokia Corporation - initial contribution.
|
|
11 |
#
|
|
12 |
# Contributors:
|
|
13 |
#
|
|
14 |
# Description:
|
|
15 |
#
|
|
16 |
|
|
17 |
import operator as ops
|
|
18 |
import logging
|
|
19 |
import tokenize
|
|
20 |
from token import ENDMARKER, NAME, ERRORTOKEN
|
|
21 |
import StringIO
|
|
22 |
|
|
23 |
from cone.public import container
|
|
24 |
|
|
25 |
RELATIONS = {}
|
|
26 |
|
|
27 |
def abstract():
|
|
28 |
import inspect
|
|
29 |
caller = inspect.getouterframes(inspect.currentframe())[1][3]
|
|
30 |
raise NotImplementedError(caller + ' needs to be implemented')
|
|
31 |
|
|
32 |
def get_tokens(tokenstr):
|
|
33 |
result = []
|
|
34 |
tokens = []
|
|
35 |
tokenstr = tokenstr.replace('\r', '')
|
|
36 |
name_buffer = [] # Temp buffer for reading name tokens
|
|
37 |
last_epos = None
|
|
38 |
for toknum, tokval, spos, epos, _ in tokenize.generate_tokens(StringIO.StringIO(unicode(tokenstr)).readline):
|
|
39 |
#print "toknum: %r, tokval: %r, spos: %r, epos: %r" % (toknum, tokval, spos, epos)
|
|
40 |
val = tokval.strip('\r\n\t ')
|
|
41 |
|
|
42 |
if toknum == ENDMARKER and name_buffer:
|
|
43 |
tokens.append(''.join(name_buffer))
|
|
44 |
|
|
45 |
# Ignore whitespace (this ignores also the end marker,
|
|
46 |
# since its value is empty)
|
|
47 |
if val == '': continue
|
|
48 |
|
|
49 |
# Put NAME, and ERRORTOKEN tokens through the temp
|
|
50 |
# buffer
|
|
51 |
if toknum in (NAME, ERRORTOKEN):
|
|
52 |
# If this and the previous token in the temp buffer are not adjacent,
|
|
53 |
# they belong to separate tokens
|
|
54 |
if name_buffer and spos[1] != last_epos[1]:
|
|
55 |
tokens.append(''.join(name_buffer))
|
|
56 |
name_buffer = []
|
|
57 |
|
|
58 |
name_buffer.append(val)
|
|
59 |
last_epos = epos
|
|
60 |
# Other tokens can just go directly to the token list
|
|
61 |
else:
|
|
62 |
if name_buffer:
|
|
63 |
tokens.append(''.join(name_buffer))
|
|
64 |
name_buffer = []
|
|
65 |
tokens.append(val)
|
|
66 |
|
|
67 |
while len(tokens) > 0:
|
|
68 |
val = tokens.pop(0)
|
|
69 |
# Join the refs with dot in between them to make them dotted refs
|
|
70 |
if val == '.':
|
|
71 |
newval = ".".join([result.pop(),tokens.pop(0)])
|
|
72 |
result.append( newval )
|
|
73 |
else:
|
|
74 |
result.append( val )
|
|
75 |
|
|
76 |
return result
|
|
77 |
|
|
78 |
class RelationException(Exception):
|
|
79 |
pass
|
|
80 |
|
|
81 |
#### The containers are here ####
|
|
82 |
|
|
83 |
|
|
84 |
class RelationBase(object):
|
|
85 |
"""
|
|
86 |
RelationBase defines a base class for all named relations that can be applied between objects.
|
|
87 |
e.g. Relation depends, that can be applied in Rule
|
|
88 |
"""
|
|
89 |
relation_name = "RelationBase"
|
|
90 |
def __init__(self, data, left, right):
|
|
91 |
self.description = ""
|
|
92 |
self.data = data or container.DataContainer()
|
|
93 |
self.left = left
|
|
94 |
self.right = right
|
|
95 |
|
|
96 |
def __str__(self):
|
|
97 |
"""
|
|
98 |
@return: A string presentation of the relation object
|
|
99 |
"""
|
|
100 |
return "%s %s %s" % (self.left,self.relation_name,self.right)
|
|
101 |
|
|
102 |
def get_name(self):
|
|
103 |
"""
|
|
104 |
@return: The relation name.
|
|
105 |
"""
|
|
106 |
return self.relation_name
|
|
107 |
|
|
108 |
def get_description(self):
|
|
109 |
"""
|
|
110 |
@return: a possible description of the relation.
|
|
111 |
"""
|
|
112 |
return self.description
|
|
113 |
|
|
114 |
def execute(self):
|
|
115 |
"""
|
|
116 |
Execute the relation object.
|
|
117 |
"""
|
|
118 |
pass
|
|
119 |
|
|
120 |
|
|
121 |
class RelationContainer(RelationBase, list):
|
|
122 |
"""
|
|
123 |
This class provides the RelationContainer interface for collecting
|
|
124 |
relation sets into one place which can be executed through the container.
|
|
125 |
"""
|
|
126 |
def __init__(self, data=None):
|
|
127 |
super(RelationContainer, self).__init__(data, 'LContainer', 'RContainer')
|
|
128 |
self.value_list = list()
|
|
129 |
|
|
130 |
def append(self, value):
|
|
131 |
self.value_list.append(value)
|
|
132 |
|
|
133 |
def __iter__(self):
|
|
134 |
return self.value_list.__iter__()
|
|
135 |
|
|
136 |
def __len__(self):
|
|
137 |
return len(self.value_list)
|
|
138 |
|
|
139 |
def __or__(self, other):
|
|
140 |
"""
|
|
141 |
This function adds two RelationContainers to each other and removes
|
|
142 |
duplicates from the result.
|
|
143 |
The modification is inplace and the returned value is called object.
|
|
144 |
"""
|
|
145 |
self.value_list = set(self.value_list) | set(other.value_list)
|
|
146 |
return self
|
|
147 |
|
|
148 |
def __unicode__(self):
|
|
149 |
if self:
|
|
150 |
ret = ''
|
|
151 |
for value in self:
|
|
152 |
ret += unicode(value)
|
|
153 |
else:
|
|
154 |
ret = 'No relations'
|
|
155 |
return ret
|
|
156 |
|
|
157 |
def find_relations(self, refs): abstract()
|
|
158 |
def add_relation(self, relation): abstract()
|
|
159 |
def has_errors(self): abstract()
|
|
160 |
|
|
161 |
class RelationContainerImpl(RelationContainer):
|
|
162 |
""" Base implementation for RelationContainer to use in ConE rules
|
|
163 |
"""
|
|
164 |
def execute(self):
|
|
165 |
ret = True
|
|
166 |
i = 0
|
|
167 |
for relation in self:
|
|
168 |
i += 1
|
|
169 |
r = relation.execute()
|
|
170 |
ret = ret and r
|
|
171 |
return ret
|
|
172 |
|
|
173 |
def find_relations(self, refs):
|
|
174 |
relations = []
|
|
175 |
for ref in refs:
|
|
176 |
for relation in self:
|
|
177 |
if relation.has_ref(ref):
|
|
178 |
relations.append(relation)
|
|
179 |
return relations
|
|
180 |
|
|
181 |
def add_relation(self, relation):
|
|
182 |
self.append(relation)
|
|
183 |
|
|
184 |
def has_ref(self, refs):
|
|
185 |
for ref in refs:
|
|
186 |
for relation in self:
|
|
187 |
if relation.has_ref(ref):
|
|
188 |
return True
|
|
189 |
return False
|
|
190 |
|
|
191 |
def has_errors(self):
|
|
192 |
for relation in self:
|
|
193 |
if relation.has_errors():
|
|
194 |
return True
|
|
195 |
return False
|
|
196 |
|
|
197 |
def get_errors(self):
|
|
198 |
errors = []
|
|
199 |
for relation in self:
|
|
200 |
errors += relation.get_errors()
|
|
201 |
return errors
|
|
202 |
|
|
203 |
#### The relations are here ####
|
|
204 |
|
|
205 |
class BaseRelation(RelationBase):
|
|
206 |
""" BaseRelation implements the basic evaluation logic for relations
|
|
207 |
This class abstract and should be extended for concrete implementation of
|
|
208 |
relation type.
|
|
209 |
|
|
210 |
Subclasses need to set their own context in their constructor before this
|
|
211 |
class's constructor is called if custom context is needed. If context not
|
|
212 |
set then DefaultContext is used.
|
|
213 |
"""
|
|
214 |
KEY = 'base_relation'
|
|
215 |
|
|
216 |
def __init__(self, data, left, right):
|
|
217 |
# Context needs to be overridden for special purposes
|
|
218 |
try:
|
|
219 |
self.__getattribute__('context')
|
|
220 |
except AttributeError:
|
|
221 |
self.context = DefaultContext(data)
|
|
222 |
|
|
223 |
left = self.expand_rule_elements(left)
|
|
224 |
right = self.expand_rule_elements(right)
|
|
225 |
super(BaseRelation, self).__init__(data, left, right)
|
|
226 |
self.interpreter = ASTInterpreter(context=self.context)
|
|
227 |
|
|
228 |
def execute(self):
|
|
229 |
"""
|
|
230 |
@return Returns error dictionary
|
|
231 |
|
|
232 |
In the client code proper way to check if the rule applies:
|
|
233 |
info = relation.execute()
|
|
234 |
if not info.has_errors():
|
|
235 |
else: HANDLE ERRORS
|
|
236 |
"""
|
|
237 |
# logger.debug("Interpreter context %s" % self.interpreter.context)
|
|
238 |
self.interpreter.create_ast('%s %s %s' % (self.left, self.KEY, self.right))
|
|
239 |
ret = self.interpreter.eval()
|
|
240 |
return ret
|
|
241 |
|
|
242 |
def get_keys(self):
|
|
243 |
""" Returns the references from this relation.
|
|
244 |
"""
|
|
245 |
refs = ASTInterpreter.extract_refs(self.left)
|
|
246 |
refs += ASTInterpreter.extract_refs(self.right)
|
|
247 |
return refs
|
|
248 |
|
|
249 |
def has_ref(self, ref):
|
|
250 |
""" Returns if the 'ref' is included in this relation
|
|
251 |
"""
|
|
252 |
return ref in self.get_keys()
|
|
253 |
|
|
254 |
def has_errors(self):
|
|
255 |
return bool(self.interpreter.errors)
|
|
256 |
|
|
257 |
def get_refs(self):
|
|
258 |
return (ASTInterpreter.extract_refs(self.left), ASTInterpreter.extract_refs(self.right))
|
|
259 |
|
|
260 |
def _eval_rside_value(self, value): abstract()
|
|
261 |
def _compare_value(self, value): abstract()
|
|
262 |
|
|
263 |
def extract_erroneus_features_with_values(self):
|
|
264 |
"""
|
|
265 |
Extract references who have errors.
|
|
266 |
|
|
267 |
Returns dictionary { 'reference' : 'value' }
|
|
268 |
"""
|
|
269 |
data_dict = {}
|
|
270 |
for ref in ASTInterpreter.extract_refs(self.right):
|
|
271 |
value = self.data.get_feature(ref)
|
|
272 |
if self._compare_value(value):
|
|
273 |
data_dict[ref] = value
|
|
274 |
elif value == None:
|
|
275 |
data_dict[ref] = None
|
|
276 |
return data_dict
|
|
277 |
|
|
278 |
def get_errors(self):
|
|
279 |
return self.interpreter.errors
|
|
280 |
|
|
281 |
def expand_rule_elements(self, rule):
|
|
282 |
""" Expans rule elements base on the reference.
|
|
283 |
Context is used for fetching the child elements for parent references
|
|
284 |
which uses asterisk identifier for selecting all child features:
|
|
285 |
'parent_feature.*' -> 'child_fea_1 and child_fea_2'.
|
|
286 |
"""
|
|
287 |
tokens = get_tokens(rule) # [token for token in rule.split()]
|
|
288 |
|
|
289 |
expanded_rule = ""
|
|
290 |
for token in tokens:
|
|
291 |
if token.endswith('.*'):
|
|
292 |
index = token.index('.*')
|
|
293 |
parent_ref = token[:index]
|
|
294 |
children = self.context.get_children_for_reference(parent_ref)
|
|
295 |
expanded_element = ' and '.join([child.reference for child in children])
|
|
296 |
if expanded_rule:
|
|
297 |
expanded_rule = '%s and %s' % (expanded_rule, expanded_element.rstrip())
|
|
298 |
else:
|
|
299 |
expanded_rule = expanded_element.rstrip()
|
|
300 |
elif token.lower() in OPERATORS:
|
|
301 |
expanded_rule += ' %s ' % token
|
|
302 |
else:
|
|
303 |
if expanded_rule:
|
|
304 |
expanded_rule += '%s'% token
|
|
305 |
else:
|
|
306 |
expanded_rule = token
|
|
307 |
return expanded_rule.strip()
|
|
308 |
|
|
309 |
class RequireRelation(BaseRelation):
|
|
310 |
KEY = 'requires'
|
|
311 |
RELATIONS[RequireRelation.KEY] = RequireRelation
|
|
312 |
|
|
313 |
class ExcludesRelation(BaseRelation):
|
|
314 |
KEY = 'excludes'
|
|
315 |
|
|
316 |
RELATIONS['excludes'] = ExcludesRelation
|
|
317 |
|
|
318 |
################################
|
|
319 |
# Abstract syntax tree builder #
|
|
320 |
################################
|
|
321 |
|
|
322 |
def nor(expression, a, b):
|
|
323 |
return not ops.or_(a, b)
|
|
324 |
|
|
325 |
def nand(expression, a, b):
|
|
326 |
return not ops.and_(a, b)
|
|
327 |
|
|
328 |
def truth_and(expression, a, b):
|
|
329 |
return ops.truth(a) and ops.truth(b)
|
|
330 |
|
|
331 |
class DefaultContext(object):
|
|
332 |
""" DefaultContext implements ConE specific context for handling rules
|
|
333 |
"""
|
|
334 |
def __init__(self, data):
|
|
335 |
self.data = data
|
|
336 |
|
|
337 |
def eval(self, ast, expression, value):
|
|
338 |
pass
|
|
339 |
|
|
340 |
def get_keys(self, refs):
|
|
341 |
return ASTInterpreter.extract_refs(refs)
|
|
342 |
|
|
343 |
def get_children_for_reference(self, reference):
|
|
344 |
# implement ConE specific children expansion
|
|
345 |
pass
|
|
346 |
|
|
347 |
def handle_terminal(self, expression):
|
|
348 |
try:
|
|
349 |
return int(expression)
|
|
350 |
except:
|
|
351 |
return expression
|
|
352 |
|
|
353 |
PRECEDENCES = {
|
|
354 |
'PREFIX_OPERATORS' : 10,
|
|
355 |
'MULDIV_OPERATORS' : 8,
|
|
356 |
'ADDSUB_OPERATORS' : 7,
|
|
357 |
'SHIFT_OPERATORS' : 6,
|
|
358 |
'BITWISE_OPERATORS' : 5,
|
|
359 |
'COMPARISON_OPERATORS' : 4,
|
|
360 |
'SET_OPERATORS' : 3,
|
|
361 |
'BOOLEAN_OPERATORS' : 2,
|
|
362 |
'RELATION_OPERATORS' : 1,
|
|
363 |
'NOT_DEFINED' : 0
|
|
364 |
}
|
|
365 |
|
|
366 |
class Expression(object):
|
|
367 |
PRECEDENCE = PRECEDENCES['NOT_DEFINED']
|
|
368 |
KEY = 'base_expression'
|
|
369 |
|
|
370 |
def __init__(self, ast):
|
|
371 |
self.ast = ast
|
|
372 |
self.value = None
|
|
373 |
|
|
374 |
def get_title(self):
|
|
375 |
return self.KEY
|
|
376 |
|
|
377 |
def eval(self, context): pass
|
|
378 |
|
|
379 |
class OneParamExpression(Expression):
|
|
380 |
PARAM_COUNT = 1
|
|
381 |
def __init__(self, ast, expression):
|
|
382 |
super(OneParamExpression, self).__init__(ast)
|
|
383 |
self.expression = expression
|
|
384 |
|
|
385 |
def __unicode__(self):
|
|
386 |
return u'%s %s' % (self.KEY, self.expression)
|
|
387 |
|
|
388 |
def eval(self, context):
|
|
389 |
self.value = self.OP(self.expression.eval(context))
|
|
390 |
context.eval(self.ast, self, self.value)
|
|
391 |
return self.value
|
|
392 |
|
|
393 |
class TwoOperatorExpression(Expression):
|
|
394 |
PARAM_COUNT = 2
|
|
395 |
OP = None
|
|
396 |
EVAL_AS_BOOLS = True
|
|
397 |
|
|
398 |
def __init__(self, ast, left, right):
|
|
399 |
super(TwoOperatorExpression, self).__init__(ast)
|
|
400 |
self.left = left
|
|
401 |
self.right = right
|
|
402 |
|
|
403 |
def __unicode__(self):
|
|
404 |
return u'%s %s %s' % (self.left, self.KEY, self.right)
|
|
405 |
|
|
406 |
def eval(self, context):
|
|
407 |
self.value = self.OP(self.left.eval(context), self.right.eval(context))
|
|
408 |
context.eval(self.ast, self, self.value)
|
|
409 |
return self.value
|
|
410 |
|
|
411 |
class TwoOperatorBooleanExpression(TwoOperatorExpression):
|
|
412 |
def eval(self, context):
|
|
413 |
self.value = self.OP(bool(self.left.eval(context)), bool(self.right.eval(context)))
|
|
414 |
context.eval(self.ast, self, self.value)
|
|
415 |
return self.value
|
|
416 |
|
|
417 |
class TerminalExpression(Expression):
|
|
418 |
KEY = 'terminal'
|
|
419 |
|
|
420 |
def __init__(self, ast, expression):
|
|
421 |
super(TerminalExpression, self).__init__(ast)
|
|
422 |
self.expression = expression
|
|
423 |
|
|
424 |
def eval(self, context):
|
|
425 |
""" Use context to eval the value
|
|
426 |
Expression on TerminalExpression is feature reference or value
|
|
427 |
context should handle the reference conversion to correct value
|
|
428 |
"""
|
|
429 |
self.value = context.handle_terminal(self.expression)
|
|
430 |
return self.value
|
|
431 |
|
|
432 |
def __unicode__(self):
|
|
433 |
return self.expression
|
|
434 |
|
|
435 |
def __repr__(self):
|
|
436 |
return self.expression
|
|
437 |
|
|
438 |
class NegExpression(OneParamExpression):
|
|
439 |
PRECEDENCE = PRECEDENCES['PREFIX_OPERATORS']
|
|
440 |
KEY= '-'
|
|
441 |
OP = ops.neg
|
|
442 |
|
|
443 |
class AndExpression(TwoOperatorBooleanExpression):
|
|
444 |
PRECEDENCE = PRECEDENCES['BOOLEAN_OPERATORS']
|
|
445 |
KEY= 'and'
|
|
446 |
OP = truth_and
|
|
447 |
|
|
448 |
class NandExpression(TwoOperatorBooleanExpression):
|
|
449 |
PRECEDENCE = PRECEDENCES['BOOLEAN_OPERATORS']
|
|
450 |
KEY = 'nand'
|
|
451 |
OP = nand
|
|
452 |
|
|
453 |
class OrExpression(TwoOperatorBooleanExpression):
|
|
454 |
PRECEDENCE = PRECEDENCES['BOOLEAN_OPERATORS']
|
|
455 |
KEY = 'or'
|
|
456 |
OP = ops.or_
|
|
457 |
|
|
458 |
class XorExpression(TwoOperatorBooleanExpression):
|
|
459 |
PRECEDENCE = PRECEDENCES['BOOLEAN_OPERATORS']
|
|
460 |
KEY = 'xor'
|
|
461 |
OP = ops.xor
|
|
462 |
|
|
463 |
class NorExpression(TwoOperatorBooleanExpression):
|
|
464 |
PRECEDENCE = PRECEDENCES['BOOLEAN_OPERATORS']
|
|
465 |
KEY = 'nor'
|
|
466 |
OP = nor
|
|
467 |
|
|
468 |
class EqualExpression(TwoOperatorExpression):
|
|
469 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
470 |
KEY = '=='
|
|
471 |
OP = ops.eq
|
|
472 |
|
|
473 |
class NotEqualExpression(TwoOperatorExpression):
|
|
474 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
475 |
KEY = '!='
|
|
476 |
OP = ops.ne
|
|
477 |
|
|
478 |
class LessThanExpression(TwoOperatorExpression):
|
|
479 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
480 |
KEY = '<'
|
|
481 |
OP = ops.lt
|
|
482 |
|
|
483 |
class GreaterThanExpression(TwoOperatorExpression):
|
|
484 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
485 |
KEY = '>'
|
|
486 |
OP = ops.gt
|
|
487 |
|
|
488 |
class LessThanEqualExpression(TwoOperatorExpression):
|
|
489 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
490 |
KEY = '<='
|
|
491 |
OP = ops.le
|
|
492 |
|
|
493 |
class GreaterThanEqualExpression(TwoOperatorExpression):
|
|
494 |
PRECEDENCE = PRECEDENCES['COMPARISON_OPERATORS']
|
|
495 |
KEY = '>='
|
|
496 |
OP = ops.ge
|
|
497 |
|
|
498 |
|
|
499 |
def handle_require(expression, left, right):
|
|
500 |
if left and right:
|
|
501 |
return True
|
|
502 |
elif not left:
|
|
503 |
return True
|
|
504 |
return False
|
|
505 |
|
|
506 |
class RequireExpression(TwoOperatorExpression):
|
|
507 |
PRECEDENCE = PRECEDENCES['RELATION_OPERATORS']
|
|
508 |
KEY = 'requires'
|
|
509 |
OP = handle_require
|
|
510 |
|
|
511 |
def eval(self, context):
|
|
512 |
super(RequireExpression, self).eval(context)
|
|
513 |
if not self.value:
|
|
514 |
left_keys = []
|
|
515 |
for ref in self.ast.extract_refs(unicode(self.left)):
|
|
516 |
for key in context.get_keys(ref):
|
|
517 |
left_keys.append(key)
|
|
518 |
|
|
519 |
for key in left_keys:
|
|
520 |
self.ast.add_error(key, { 'error_string' : 'REQUIRES right side value is "False"',
|
|
521 |
'left_key' : key,
|
|
522 |
'rule' : self.ast.expression
|
|
523 |
})
|
|
524 |
return self.value
|
|
525 |
|
|
526 |
def handle_exclude(expression, left, right):
|
|
527 |
if left and not right:
|
|
528 |
return True
|
|
529 |
elif not left:
|
|
530 |
return True
|
|
531 |
return False
|
|
532 |
|
|
533 |
class ExcludeExpression(TwoOperatorExpression):
|
|
534 |
PRECEDENCE = PRECEDENCES['RELATION_OPERATORS']
|
|
535 |
KEY = 'excludes'
|
|
536 |
OP = handle_exclude
|
|
537 |
|
|
538 |
def eval(self, context):
|
|
539 |
super(ExcludeExpression, self).eval(context)
|
|
540 |
if not self.value:
|
|
541 |
left_keys = []
|
|
542 |
for ref in self.ast.extract_refs(unicode(self.left)):
|
|
543 |
for key in context.get_keys(ref):
|
|
544 |
left_keys.append(key)
|
|
545 |
|
|
546 |
for key in left_keys:
|
|
547 |
self.ast.add_error(key, { 'error_string' : 'EXCLUDE right side value is "True"',
|
|
548 |
'left_key' : key,
|
|
549 |
'rule' : self.ast.expression
|
|
550 |
})
|
|
551 |
return self.value
|
|
552 |
|
|
553 |
|
|
554 |
class NotExpression(OneParamExpression):
|
|
555 |
PRECEDENCE = PRECEDENCES['PREFIX_OPERATORS']
|
|
556 |
KEY = 'not'
|
|
557 |
OP = ops.not_
|
|
558 |
|
|
559 |
class TruthExpression(OneParamExpression):
|
|
560 |
PRECEDENCE = PRECEDENCES['PREFIX_OPERATORS']
|
|
561 |
KEY = 'truth'
|
|
562 |
OP = ops.truth
|
|
563 |
|
|
564 |
LEFT_PARENTHESIS = '('
|
|
565 |
RIGHT_PARENTHESIS = ')'
|
|
566 |
class SimpleCondition(EqualExpression):
|
|
567 |
"""
|
|
568 |
A simple condition object that can refer to a model object and evaluate if the value matches
|
|
569 |
"""
|
|
570 |
def __init__(self, left, right):
|
|
571 |
lterm = TerminalExpression(None, left)
|
|
572 |
rterm = TerminalExpression(None, right)
|
|
573 |
EqualExpression.__init__(self, None, lterm, rterm)
|
|
574 |
|
|
575 |
|
|
576 |
# in format KEY : OPERATOR CLASS
|
|
577 |
OPERATORS = {
|
|
578 |
'and' : AndExpression,
|
|
579 |
'nand' : NandExpression,
|
|
580 |
'or' : OrExpression,
|
|
581 |
'xor' : XorExpression,
|
|
582 |
'nor' : NorExpression,
|
|
583 |
'not' : NotExpression,
|
|
584 |
'truth' : TruthExpression,
|
|
585 |
'==' : EqualExpression,
|
|
586 |
'!=' : NotEqualExpression,
|
|
587 |
'<' : LessThanExpression,
|
|
588 |
'>' : GreaterThanExpression,
|
|
589 |
'<=' : LessThanEqualExpression,
|
|
590 |
'>=' : GreaterThanEqualExpression,
|
|
591 |
'requires' : RequireExpression,
|
|
592 |
'excludes' : ExcludeExpression,
|
|
593 |
'-' : NegExpression
|
|
594 |
}
|
|
595 |
|
|
596 |
def add_operator(key, operator_class=None, baseclass=RequireExpression):
|
|
597 |
"""
|
|
598 |
Add new operator key and operator class.
|
|
599 |
If operator class isn't provided the baseclass parameter is used as
|
|
600 |
operator base. The baseclass parameter is RequireExpression by default
|
|
601 |
which has success condition left_rule=True and right_rule=True
|
|
602 |
|
|
603 |
"""
|
|
604 |
OPERATORS[key] = operator_class or create_new_class(key, baseclass)
|
|
605 |
|
|
606 |
def create_new_class(key, baseclass):
|
|
607 |
ns = baseclass.__dict__.copy()
|
|
608 |
ns['KEY'] = key
|
|
609 |
key_pieces = key.split('_')
|
|
610 |
class_prefix = ''.join([key_piece.capitalize() for key_piece in key_pieces])
|
|
611 |
new_class = type(class_prefix + 'Expression', (baseclass,), ns)
|
|
612 |
return new_class
|
|
613 |
|
|
614 |
class ParseException(Exception): pass
|
|
615 |
|
|
616 |
class ASTInterpreter(object):
|
|
617 |
def __init__(self, infix_expression=None, context=None):
|
|
618 |
""" Takes infix expression as string """
|
|
619 |
self.context = context or DefaultContext(None)
|
|
620 |
# logger.debug("AST init context: %s" % self.context)
|
|
621 |
self._init_locals(infix_expression)
|
|
622 |
if infix_expression:
|
|
623 |
self.create_ast()
|
|
624 |
|
|
625 |
def _init_locals(self, infix_expression):
|
|
626 |
# The result value of full eval of the parse_tree
|
|
627 |
self.value = None
|
|
628 |
self.warnings = {}
|
|
629 |
self.errors = {}
|
|
630 |
self.postfix_array = []
|
|
631 |
self.parse_tree = []
|
|
632 |
self.expression = infix_expression
|
|
633 |
|
|
634 |
def __unicode__(self):
|
|
635 |
s = ''
|
|
636 |
for expr in self.parse_tree:
|
|
637 |
s += unicode(expr)
|
|
638 |
return s
|
|
639 |
|
|
640 |
def add_error(self, key, error_dict):
|
|
641 |
if self.errors.has_key(key):
|
|
642 |
self.errors[key].append(error_dict)
|
|
643 |
else:
|
|
644 |
self.errors[key] = [error_dict]
|
|
645 |
|
|
646 |
def create_ast(self, infix_expression=None):
|
|
647 |
if infix_expression:
|
|
648 |
self._init_locals(infix_expression)
|
|
649 |
self._infix_to_postfix()
|
|
650 |
self._create_parse_tree()
|
|
651 |
return self.parse_tree
|
|
652 |
|
|
653 |
def _infix_to_postfix(self):
|
|
654 |
"""
|
|
655 |
Shunting yard algorithm used to convert infix presentation to postfix.
|
|
656 |
"""
|
|
657 |
if not self.expression:
|
|
658 |
raise ParseException('Expression is None')
|
|
659 |
tokens = get_tokens(self.expression) # [token for token in self.expression.split()]
|
|
660 |
stack = []
|
|
661 |
# logger.debug('TOKENS: %s' % tokens)
|
|
662 |
for token in tokens:
|
|
663 |
# logger.debug('TOKEN: %s' % token)
|
|
664 |
if token.lower() in OPERATORS:
|
|
665 |
op_class = OPERATORS.get(token)
|
|
666 |
if stack:
|
|
667 |
while len(stack) != 0:
|
|
668 |
top = stack[-1]
|
|
669 |
if top in OPERATORS:
|
|
670 |
top_operator = OPERATORS.get(top)
|
|
671 |
if op_class.PRECEDENCE <= top_operator.PRECEDENCE:
|
|
672 |
self.postfix_array.append(stack.pop())
|
|
673 |
else:
|
|
674 |
# Break from loop if top operator precedence is less.
|
|
675 |
break
|
|
676 |
else:
|
|
677 |
# If top not operator break from loop
|
|
678 |
break
|
|
679 |
stack.append(token)
|
|
680 |
elif token == LEFT_PARENTHESIS:
|
|
681 |
# logger.debug('Left parenthesis')
|
|
682 |
stack.append(token)
|
|
683 |
elif token == RIGHT_PARENTHESIS:
|
|
684 |
# logger.debug('Right parenthesis')
|
|
685 |
left_par_found = False
|
|
686 |
stack_token = stack.pop()
|
|
687 |
while stack_token:
|
|
688 |
if stack_token != LEFT_PARENTHESIS:
|
|
689 |
self.postfix_array.append(stack_token)
|
|
690 |
else:
|
|
691 |
left_par_found = True
|
|
692 |
break
|
|
693 |
if stack:
|
|
694 |
stack_token = stack.pop()
|
|
695 |
else:
|
|
696 |
stack_token = None
|
|
697 |
|
|
698 |
if not left_par_found:
|
|
699 |
raise ParseException('Mismatched parenthesis "%s".' % LEFT_PARENTHESIS)
|
|
700 |
else:
|
|
701 |
# logger.debug('Adding value to output. %s' % repr((token)))
|
|
702 |
self.postfix_array.append((token))
|
|
703 |
|
|
704 |
# There should be only operators left in the stack
|
|
705 |
if stack:
|
|
706 |
# logger.debug('Operators in stack.')
|
|
707 |
operator = stack.pop()
|
|
708 |
while operator:
|
|
709 |
if operator != LEFT_PARENTHESIS:
|
|
710 |
self.postfix_array.append(operator)
|
|
711 |
else:
|
|
712 |
raise ParseException('Mismatched parenthesis "%s".' % LEFT_PARENTHESIS)
|
|
713 |
if stack:
|
|
714 |
operator = stack.pop()
|
|
715 |
else:
|
|
716 |
operator = None
|
|
717 |
|
|
718 |
# logger.debug('Infix to postfix conversion: %s' % self.postfix_array)
|
|
719 |
return self.postfix_array
|
|
720 |
|
|
721 |
def _create_parse_tree(self):
|
|
722 |
self.parse_tree = []
|
|
723 |
for token in self.postfix_array:
|
|
724 |
if token in OPERATORS:
|
|
725 |
# logger.debug('OP: %s' % (token))
|
|
726 |
expression_class = OPERATORS[token]
|
|
727 |
params = []
|
|
728 |
for i in range(expression_class.PARAM_COUNT):
|
|
729 |
try:
|
|
730 |
params.append(self.parse_tree.pop())
|
|
731 |
except IndexError, e:
|
|
732 |
raise ParseException('Syntax error: "%s"' % self.expression)
|
|
733 |
params.reverse()
|
|
734 |
expression = expression_class(self, *params)
|
|
735 |
|
|
736 |
# logger.debug('The operation: %s' % expression)
|
|
737 |
self.parse_tree.append(expression)
|
|
738 |
else:
|
|
739 |
expression = TerminalExpression(self, token)
|
|
740 |
self.parse_tree.append(expression)
|
|
741 |
|
|
742 |
#logger.debug('THE STACK: %s' % self.parse_tree)
|
|
743 |
#for s in self.parse_tree:
|
|
744 |
# logger.debug('Stack e: %s' % str(s))
|
|
745 |
|
|
746 |
return self.parse_tree
|
|
747 |
|
|
748 |
def eval(self):
|
|
749 |
""" Evals the AST
|
|
750 |
If empty expression is given, None is returned
|
|
751 |
"""
|
|
752 |
for expression in self.parse_tree:
|
|
753 |
self.value = expression.eval(self.context)
|
|
754 |
return self.value
|
|
755 |
|
|
756 |
@staticmethod
|
|
757 |
def extract_refs(expression):
|
|
758 |
tokens = get_tokens(expression)
|
|
759 |
refs = []
|
|
760 |
for token in tokens:
|
|
761 |
if not token.lower() in OPERATORS and token != LEFT_PARENTHESIS and token != RIGHT_PARENTHESIS:
|
|
762 |
refs.append(token.strip('%s%s' % (LEFT_PARENTHESIS, RIGHT_PARENTHESIS)))
|
|
763 |
return refs
|
|
764 |
|
|
765 |
##################################################################
|
|
766 |
# Create and configure the main level logger
|
|
767 |
logger = logging.getLogger('cone')
|