Package libxyz :: Package core :: Module fsrule
[hide private]
[frames] | no frames]

Source Code for Module libxyz.core.fsrule

  1  #-*- coding: utf8 -* 
  2  # 
  3  # Max E. Kuznecov <syhpoon@syhpoon.name> 2008 
  4  # 
  5  # This file is part of XYZCommander. 
  6  # XYZCommander is free software: you can redistribute it and/or modify 
  7  # it under the terms of the GNU Lesser Public License as published by 
  8  # the Free Software Foundation, either version 3 of the License, or 
  9  # (at your option) any later version. 
 10  # XYZCommander is distributed in the hope that it will be useful, 
 11  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
 12  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 
 13  # GNU Lesser Public License for more details. 
 14  # You should have received a copy of the GNU Lesser Public License 
 15  # along with XYZCommander. If not, see <http://www.gnu.org/licenses/>. 
 16   
 17  import stat 
 18  import pwd 
 19  import grp 
 20  import re 
 21   
 22  import libxyz.parser as parser 
 23   
 24  from libxyz.exceptions import XYZValueError 
 25  from libxyz.exceptions import LexerError 
 26  from libxyz.exceptions import FSRuleError 
 27  from libxyz.vfs.vfsobj import  VFSObject 
 28  from libxyz.vfs.types import * 
 29  from libxyz.core.utils import ustring, is_func 
30 31 -class FSRule(parser.BaseParser):
32 """ 33 FS rule parser 34 35 Rule syntax is following: 36 37 rule ::= expr $ 38 | expr op rule 39 expr ::= expr_body 40 | NOT expr_body 41 | "(" rule ")" 42 expr_body ::= ftype "{" ARG "}" 43 op ::= AND | OR 44 ftype ::= TYPE | PERM | OWNER | NAME | SIZE 45 | LINK_TYPE | LINK_PERM | LINK_OWNER | LINK_NAME 46 | LINK_EXISTS | LINK_SIZE 47 48 Examples: 49 50 type{file} and perm{+0111} 51 (owner{user} and not owner{:group}) or owner{root} 52 """ 53 54 # Tokens 55 TOKEN_TYPE = "type" 56 TOKEN_PERM = "perm" 57 TOKEN_OWNER = "owner" 58 TOKEN_NAME = "name" 59 TOKEN_INAME = "iname" 60 TOKEN_SIZE = "size" 61 TOKEN_LINK_TYPE = "link_type" 62 TOKEN_LINK_PERM = "link_perm" 63 TOKEN_LINK_OWNER = "link_owner" 64 TOKEN_LINK_NAME = "link_name" 65 TOKEN_LINK_INAME = "link_iname" 66 TOKEN_LINK_EXISTS = "link_exists" 67 TOKEN_LINK_SIZE = "link_size" 68 TOKEN_AND = "and" 69 TOKEN_OR = "or" 70 TOKEN_NOT = "not" 71 TOKEN_OPEN_BR = "{" 72 TOKEN_CLOSE_BR = "}" 73 TOKEN_OPEN_PAR = "(" 74 TOKEN_CLOSE_PAR = ")" 75 TOKEN_DEFAULT = True 76 TOKEN_ARG = False 77 EOF = None 78 79 TOKENS_EXTENDED = [] 80 TRANSFORM_EXTENDED = {} 81 82 TOKENS = [TOKEN_TYPE, TOKEN_PERM, TOKEN_OWNER, TOKEN_NAME, TOKEN_INAME, 83 TOKEN_LINK_TYPE, TOKEN_LINK_PERM, TOKEN_LINK_OWNER, 84 TOKEN_LINK_NAME, TOKEN_LINK_INAME, TOKEN_LINK_EXISTS, 85 TOKEN_AND, TOKEN_OR, TOKEN_NOT, TOKEN_OPEN_BR, TOKEN_CLOSE_BR, 86 TOKEN_OPEN_PAR, TOKEN_CLOSE_PAR, TOKEN_DEFAULT, 87 TOKEN_SIZE, TOKEN_LINK_SIZE, EOF] 88 89 # Nonterminals 90 NTOKEN_START = 100 91 NTOKEN_RULE = 101 92 NTOKEN_EXPR = 102 93 NTOKEN_EXPR_BODY = 103 94 NTOKEN_OP = 104 95 NTOKEN_FTYPE = 105 96 97 FTYPE = [TOKEN_TYPE, 98 TOKEN_PERM, 99 TOKEN_OWNER, 100 TOKEN_NAME, 101 TOKEN_INAME, 102 TOKEN_SIZE, 103 TOKEN_LINK_TYPE, 104 TOKEN_LINK_PERM, 105 TOKEN_LINK_OWNER, 106 TOKEN_LINK_NAME, 107 TOKEN_LINK_INAME, 108 TOKEN_LINK_EXISTS, 109 TOKEN_LINK_SIZE, 110 ] 111 112 INFIX_OP = (TOKEN_AND, TOKEN_OR) 113 114 @classmethod
115 - def extend(cls, token, trans_func, match_func):
116 """ 117 Extend FSRule parser with new expressions 118 @param token: new token expression 119 @param trans_func: Transformation function 120 @param match_func: Match function 121 """ 122 123 if token in cls.TOKENS_EXTENDED or token in cls.TOKENS or \ 124 token in cls.FTYPE: 125 raise FSRuleError(_(u"Error extending FSRule: "\ 126 u"token %s already registered") % token) 127 128 if not callable(trans_func) or not callable(match_func): 129 raise FSRuleError(_(u"Error extending FSRule: "\ 130 u"trans_func and match_func arguments "\ 131 u"must be functions.")) 132 133 # 1. Append token to lists 134 cls.TOKENS_EXTENDED.append(token) 135 cls.TOKENS.append(token) 136 cls.FTYPE.append(token) 137 138 # 2. Add transformation func 139 cls.TRANSFORM_EXTENDED[token] = trans_func 140 141 # 3. Add match func 142 Expression.extend(token, match_func)
143 144 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 145 146 @classmethod
147 - def unextend(cls, token):
148 """ 149 Remove extended expression from parser 150 """ 151 152 if token not in cls.TOKENS_EXTENDED: 153 return False 154 155 try: 156 cls.TOKENS_EXTENDED.remove(token) 157 except ValueError: 158 pass 159 160 try: 161 cls.TOKENS.remove(token) 162 except ValueError: 163 pass 164 165 try: 166 cls.FTYPE.remove(token) 167 except ValueError: 168 pass 169 170 try: 171 del(cls.TRANSFORM_EXTENDED[token]) 172 except KeyError: 173 pass 174 175 return Expression.unextend(token)
176 177 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 178
179 - def __init__(self, rule):
180 """ 181 @param rule: String rule 182 """ 183 184 super(FSRule, self).__init__() 185 186 self.raw_rule = rule 187 188 self._stack = [] 189 self._done = False 190 self._cur_obj = None 191 self._expressions = parser.lr.Tree() 192 self._exp_pointer = self._expressions 193 self._exp_stack = [] 194 195 # Action table 196 self._action = parser.lr.ActionTable() 197 198 _s = self._shift 199 _r = self._reduce 200 201 self._action.add(0, self.TOKEN_TYPE, (_s, 2)) 202 self._action.add(0, self.TOKEN_PERM, (_s, 3)) 203 self._action.add(0, self.TOKEN_OWNER, (_s, 4)) 204 self._action.add(0, self.TOKEN_NAME, (_s, 5)) 205 self._action.add(0, self.TOKEN_INAME, (_s, 5)) 206 self._action.add(0, self.TOKEN_SIZE, (_s, 27)) 207 self._action.add(0, self.TOKEN_LINK_TYPE, (_s, 27)) 208 self._action.add(0, self.TOKEN_LINK_PERM, (_s, 27)) 209 self._action.add(0, self.TOKEN_LINK_OWNER, (_s, 27)) 210 self._action.add(0, self.TOKEN_LINK_NAME, (_s, 27)) 211 self._action.add(0, self.TOKEN_LINK_INAME, (_s, 27)) 212 self._action.add(0, self.TOKEN_LINK_EXISTS, (_s, 27)) 213 self._action.add(0, self.TOKEN_LINK_SIZE, (_s, 27)) 214 self._action.add(0, self.TOKEN_NOT, (_s, 1)) 215 self._action.add(0, self.TOKEN_OPEN_PAR, (_s, 6)) 216 217 self._action.add(1, self.TOKEN_TYPE, (_s, 2)) 218 self._action.add(1, self.TOKEN_PERM, (_s, 3)) 219 self._action.add(1, self.TOKEN_OWNER, (_s, 4)) 220 self._action.add(1, self.TOKEN_NAME, (_s, 5)) 221 self._action.add(1, self.TOKEN_INAME, (_s, 5)) 222 self._action.add(1, self.TOKEN_SIZE, (_s, 27)) 223 self._action.add(1, self.TOKEN_LINK_TYPE, (_s, 27)) 224 self._action.add(1, self.TOKEN_LINK_PERM, (_s, 27)) 225 self._action.add(1, self.TOKEN_LINK_OWNER, (_s, 27)) 226 self._action.add(1, self.TOKEN_LINK_NAME, (_s, 27)) 227 self._action.add(1, self.TOKEN_LINK_INAME, (_s, 27)) 228 self._action.add(1, self.TOKEN_LINK_EXISTS, (_s, 27)) 229 self._action.add(1, self.TOKEN_LINK_SIZE, (_s, 27)) 230 231 self._action.add(2, self.TOKEN_DEFAULT, (_r, 10)) 232 self._action.add(3, self.TOKEN_DEFAULT, (_r, 11)) 233 self._action.add(4, self.TOKEN_DEFAULT, (_r, 12)) 234 self._action.add(5, self.TOKEN_DEFAULT, (_r, 13)) 235 236 self._action.add(6, self.TOKEN_TYPE, (_s, 2)) 237 self._action.add(6, self.TOKEN_PERM, (_s, 3)) 238 self._action.add(6, self.TOKEN_OWNER, (_s, 4)) 239 self._action.add(6, self.TOKEN_NAME, (_s, 5)) 240 self._action.add(6, self.TOKEN_INAME, (_s, 5)) 241 self._action.add(6, self.TOKEN_SIZE, (_s, 27)) 242 self._action.add(6, self.TOKEN_LINK_TYPE, (_s, 27)) 243 self._action.add(6, self.TOKEN_LINK_PERM, (_s, 27)) 244 self._action.add(6, self.TOKEN_LINK_OWNER, (_s, 27)) 245 self._action.add(6, self.TOKEN_LINK_NAME, (_s, 27)) 246 self._action.add(6, self.TOKEN_LINK_INAME, (_s, 27)) 247 self._action.add(6, self.TOKEN_LINK_EXISTS, (_s, 27)) 248 self._action.add(6, self.TOKEN_LINK_SIZE, (_s, 27)) 249 self._action.add(6, self.TOKEN_NOT, (_s, 1)) 250 self._action.add(6, self.TOKEN_OPEN_PAR, (_s, 6)) 251 252 self._action.add(7, self.EOF, (_s, 14)) 253 self._action.add(8, self.TOKEN_DEFAULT, (_r, 1)) 254 255 self._action.add(9, self.TOKEN_AND, (_s, 15)) 256 self._action.add(9, self.TOKEN_OR, (_s, 16)) 257 self._action.add(9, self.TOKEN_DEFAULT, (_r, 2)) 258 259 self._action.add(10, self.TOKEN_DEFAULT, (_r, 4)) 260 self._action.add(11, self.TOKEN_OPEN_BR, (_s, 18)) 261 self._action.add(12, self.TOKEN_DEFAULT, (_r, 5)) 262 self._action.add(13, self.TOKEN_CLOSE_PAR, (_s, 19)) 263 self._action.add(14, self.TOKEN_DEFAULT, (self._accept, None)) 264 self._action.add(15, self.TOKEN_DEFAULT, (_r, 8)) 265 self._action.add(16, self.TOKEN_DEFAULT, (_r, 9)) 266 267 self._action.add(17, self.TOKEN_TYPE, (_s, 2)) 268 self._action.add(17, self.TOKEN_PERM, (_s, 3)) 269 self._action.add(17, self.TOKEN_OWNER, (_s, 4)) 270 self._action.add(17, self.TOKEN_NAME, (_s, 5)) 271 self._action.add(17, self.TOKEN_INAME, (_s, 5)) 272 self._action.add(17, self.TOKEN_SIZE, (_s, 27)) 273 self._action.add(17, self.TOKEN_LINK_TYPE, (_s, 27)) 274 self._action.add(17, self.TOKEN_LINK_PERM, (_s, 27)) 275 self._action.add(17, self.TOKEN_LINK_OWNER, (_s, 27)) 276 self._action.add(17, self.TOKEN_LINK_NAME, (_s, 27)) 277 self._action.add(17, self.TOKEN_LINK_INAME, (_s, 27)) 278 self._action.add(17, self.TOKEN_LINK_EXISTS, (_s, 27)) 279 self._action.add(17, self.TOKEN_LINK_SIZE, (_s, 27)) 280 self._action.add(17, self.TOKEN_NOT, (_s, 1)) 281 self._action.add(17, self.TOKEN_OPEN_PAR, (_s, 6)) 282 283 self._action.add(18, self.TOKEN_ARG, (_s, 21)) 284 self._action.add(19, self.TOKEN_DEFAULT, (_r, 6)) 285 self._action.add(20, self.TOKEN_DEFAULT, (_r, 3)) 286 self._action.add(21, self.TOKEN_CLOSE_BR, (_s, 22)) 287 self._action.add(22, self.TOKEN_DEFAULT, (_r, 7)) 288 self._action.add(23, self.TOKEN_OPEN_BR, (_s, 24)) 289 self._action.add(24, self.TOKEN_ARG, (_s, 25)) 290 self._action.add(25, self.TOKEN_CLOSE_BR, (_s, 26)) 291 self._action.add(26, self.TOKEN_DEFAULT, (_r, 14)) 292 self._action.add(27, self.TOKEN_DEFAULT, (_r, 131)) 293 294 # For extended functionality 295 for _ext_token in self.TOKENS_EXTENDED: 296 for _state in (0, 1, 6, 17): 297 self._action.add(_state, _ext_token, (_s, 27)) 298 299 self._rules = parser.lr.Rules() 300 301 self._rules.add(1, self.NTOKEN_START, 1) 302 self._rules.add(2, self.NTOKEN_RULE, 1) 303 self._rules.add(3, self.NTOKEN_RULE, 3) 304 self._rules.add(4, self.NTOKEN_EXPR, 1) 305 self._rules.add(5, self.NTOKEN_EXPR, 2) 306 self._rules.add(6, self.NTOKEN_EXPR, 3) 307 self._rules.add(7, self.NTOKEN_EXPR_BODY, 4) 308 self._rules.add(8, self.NTOKEN_OP, 1) 309 self._rules.add(9, self.NTOKEN_OP, 1) 310 self._rules.add(10, self.NTOKEN_FTYPE, 1) 311 self._rules.add(11, self.NTOKEN_FTYPE, 1) 312 self._rules.add(12, self.NTOKEN_FTYPE, 1) 313 self._rules.add(13, self.NTOKEN_FTYPE, 1) 314 self._rules.add(14, self.NTOKEN_EXPR_BODY, 5) 315 self._rules.add(131, self.NTOKEN_FTYPE, 1) 316 317 # Goto table 318 self._goto = parser.lr.GotoTable() 319 320 self._goto.add(0, self.NTOKEN_START, 7) 321 self._goto.add(0, self.NTOKEN_RULE, 8) 322 self._goto.add(0, self.NTOKEN_EXPR, 9) 323 self._goto.add(0, self.NTOKEN_EXPR_BODY, 10) 324 self._goto.add(0, self.NTOKEN_FTYPE, 11) 325 326 self._goto.add(1, self.NTOKEN_EXPR_BODY, 10) 327 self._goto.add(1, self.NTOKEN_FTYPE, 23) 328 329 self._goto.add(6, self.NTOKEN_RULE, 13) 330 self._goto.add(6, self.NTOKEN_EXPR, 9) 331 self._goto.add(6, self.NTOKEN_EXPR_BODY, 10) 332 self._goto.add(6, self.NTOKEN_FTYPE, 11) 333 334 self._goto.add(9, self.NTOKEN_OP, 17) 335 336 self._goto.add(17, self.NTOKEN_RULE, 20) 337 self._goto.add(17, self.NTOKEN_EXPR, 9) 338 self._goto.add(17, self.NTOKEN_EXPR_BODY, 10) 339 self._goto.add(17, self.NTOKEN_FTYPE, 11) 340 341 self._unget = [] 342 self._chain = self._parse(rule)
343 344 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 345
346 - def match(self, obj):
347 """ 348 Match given object against rule 349 350 @param obj: VFSObject instance 351 @return: True if matches and False otherwise 352 """ 353 354 if not isinstance(obj, VFSObject): 355 raise XYZValueError(_(u"Invalid argument type: %s, "\ 356 u"VFSObject expected") % type(obj)) 357 358 return self._match(obj, self._expressions)
359 360 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 361
362 - def _match(self, obj, _expressions):
363 _op = None 364 _res = None 365 366 for exp in _expressions: 367 if exp in ("AND", "OR"): 368 _op = exp 369 continue 370 371 if isinstance(exp, parser.lr.Tree): 372 # Recursive match subrule 373 _r = self._match(obj, exp) 374 else: 375 _r = exp.match(obj) 376 377 if _res is not None: 378 if _op == "AND": 379 _res = _res and _r 380 381 # Short-circuit: do not continue if got false on AND 382 # expression 383 if not _res: 384 break 385 elif _op == "OR": 386 _res = _res or _r 387 388 # Short-circuit: do not continue if got true on OR 389 # expression 390 if _res: 391 break 392 else: 393 _res = _r 394 395 _op = None 396 397 if _res is None: 398 return _r 399 else: 400 return _res
401 402 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 403
404 - def _parse(self, rule):
405 """ 406 Parse rule 407 """ 408 409 # Initial state 410 self._stack.append(0) 411 412 _tokens = (self.TOKEN_OPEN_PAR, 413 self.TOKEN_CLOSE_PAR, 414 self.TOKEN_OPEN_BR, 415 self.TOKEN_CLOSE_BR, 416 u"=", u",") 417 418 self._lexer = parser.Lexer(rule, _tokens, u"#") 419 self._lexer.escaping_on() 420 421 try: 422 while True: 423 if self._done: 424 break 425 426 if self._unget: 427 _tok = self._unget.pop() 428 else: 429 _res = self._lexer.lexer() 430 431 if _res is not None: 432 _tok = _res[1] 433 else: 434 _tok = _res 435 436 if _tok not in self.TOKENS: 437 _tok_type = self.TOKEN_ARG 438 else: 439 _tok_type = _tok 440 441 try: 442 _f, _arg = self._action.get(self._stack[-1], _tok_type) 443 except KeyError: 444 try: 445 _f, _arg = self._action.get(self._stack[-1], 446 self.TOKEN_DEFAULT) 447 except KeyError: 448 self.error(_tok) 449 450 _f(_tok, _arg) 451 452 except LexerError, e: 453 self.error(e)
454 455 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 456
457 - def _shift(self, token, state):
458 """ 459 Shift token and state onto stack 460 """ 461 462 self._stack.append(token) 463 self._stack.append(state) 464 465 if state == 6: # ( 466 _new = parser.lr.Tree() 467 self._exp_pointer.add(_new) 468 self._exp_stack.append(self._exp_pointer) 469 self._exp_pointer = _new 470 elif state == 19: # ) 471 if self._exp_stack: 472 self._exp_pointer = self._exp_stack.pop()
473 474 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 475
476 - def _reduce(self, token, rule):
477 """ 478 Reduce stack by rule 479 """ 480 _transform = { 481 u"type": self._type, 482 u"name": self._name, 483 u"iname": self._iname, 484 u"owner": self._owner, 485 u"perm": self._perm, 486 u"size": self._size, 487 u"link_type": self._type, 488 u"link_name": self._name, 489 u"link_iname": self._iname, 490 u"link_owner": self._owner, 491 u"link_perm": self._perm, 492 u"link_size": self._size, 493 } 494 495 try: 496 _ntok, _len = self._rules.get(rule) 497 except KeyError: 498 self.error(token) 499 500 if rule in (10, 11, 12, 13, 131): 501 self._cur_obj = Expression() 502 self._cur_obj.otype = self._stack[-2] 503 elif rule in (7, 14): 504 _arg = self._stack[-4] 505 _cur = self._cur_obj 506 507 if _cur.otype in _transform: 508 _cur.arg = _transform[_cur.otype](_arg) 509 elif _cur.otype in self.TRANSFORM_EXTENDED: 510 try: 511 _cur.arg = self.TRANSFORM_EXTENDED[_cur.otype](_arg) 512 except Exception, e: 513 self.error(_(u"Error in calling extended transformation "\ 514 u"function: %s") % unicode(e)) 515 else: 516 _cur.arg = _arg 517 518 if rule == 14: 519 self._cur_obj.negative = True 520 elif rule in (4, 5): 521 self._exp_pointer.add(self._cur_obj) 522 self._cur_obj = None 523 elif rule == 8: 524 self._exp_pointer.add("AND") 525 elif rule == 9: 526 self._exp_pointer.add("OR") 527 528 self._stack = self._stack[:(_len * -2)] 529 _top = self._stack[-1] 530 self._stack.append(_ntok) 531 532 try: 533 self._stack.append(self._goto.get(_top, _ntok)) 534 except KeyError: 535 self.error(token) 536 537 self._unget.append(token)
538 539 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 540
541 - def _accept(self, *args):
542 """ 543 Complete parsing 544 """ 545 546 self._done = True
547 548 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 549
550 - def _type(self, arg):
551 _types ={ 552 u"file": VFSTypeFile, 553 u"file_or_link2": 554 lambda x: x.is_file() or (x.is_link() and x.data.is_file()), 555 u"dir": VFSTypeDir, 556 u"dir_or_link2": 557 lambda x: x.is_dir() or (x.is_link() and x.data.is_dir()), 558 u"link": VFSTypeLink, 559 u"socket": VFSTypeSocket, 560 u"socket_or_link2": 561 lambda x: x.is_socket() or (x.is_link() and x.data.is_socket()), 562 u"fifo": VFSTypeFifo, 563 u"fifo_or_link2": 564 lambda x: x.is_fifo() or (x.is_link() and x.data.is_fifo()), 565 u"char": VFSTypeChar, 566 u"char_or_link2": 567 lambda x: x.is_char() or (x.is_link() and x.data.is_char()), 568 u"block": VFSTypeBlock, 569 u"block_or_link2": 570 lambda x: x.is_block() or (x.is_link() and x.data.is_block()), 571 } 572 573 try: 574 return _types[arg] 575 except KeyError: 576 self.error(_(u"Invalid type{} argument: %s") % arg)
577 578 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 579
580 - def _name(self, arg):
581 return re.compile(arg, re.U)
582 583 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 584
585 - def _iname(self, arg):
586 return re.compile(arg, re.U | re.I)
587 588 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 589
590 - def _owner(self, arg):
591 if not re.match(r"^(\w+)?(:(\w+))?$", arg): 592 self.error(_(u"Invalid owner{} argument: %s") % arg) 593 594 _tmp = arg.split(":") 595 _uid = _tmp[0] 596 597 if _uid == "": 598 _uid = None 599 elif not _uid.isdigit(): 600 try: 601 _uid = pwd.getpwnam(_uid).pw_uid 602 except (KeyError, TypeError): 603 self.error(_(u"Invalid uid: %s") % _uid) 604 else: 605 _uid = int(_uid) 606 607 if len(_tmp) > 1: 608 _gid = _tmp[1] 609 610 if not _gid.isdigit(): 611 try: 612 _gid = grp.getgrnam(_gid).gr_gid 613 except (KeyError, TypeError): 614 self.error(_(u"Invalid gid: %s") % _gid) 615 else: 616 _gid = int(_gid) 617 else: 618 _gid = None 619 620 return (_uid, _gid)
621 622 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 623
624 - def _perm(self, arg):
625 _any = False 626 627 if not re.match(r"^\+?\d{4}$", arg): 628 self.error(_(u"Invalid perm{} argument: %s") % arg) 629 630 if arg.startswith(u"+"): 631 _any = True 632 _perm = int(arg[1:], 8) 633 else: 634 _perm = int(arg, 8) 635 636 return (_any, _perm)
637 638 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 639
640 - def _size(self, arg):
641 _bytes = { 642 u"B": 1, 643 u"K": 1024, 644 u"M": 1024 * 1024, 645 u"G": 1024 * 1024 * 1024, 646 u"T": 1024 * 1024 * 1024 * 1024, 647 } 648 649 _re = re.match(r"^\s*([<>]?\=?)\s*(\d+)\s*([BbKkMmGgTt]?)\s*$", arg) 650 651 if _re is None: 652 self.error(_(u"Invalid size{} argument: %s") % arg) 653 else: 654 _op = _re.group(1) or u"=" 655 _size = long(_re.group(2)) 656 _mod = _re.group(3) or None 657 658 if _mod is not None: 659 _size *= _bytes[_mod.upper()] 660 661 return (_op, _size)
662 675 676 return _trans 677
678 #++++++++++++++++++++++++++++++++++++++++++++++++ 679 680 -class Expression(object):
681 """ 682 FS rule expression class 683 """ 684 685 MATCH_EXTENDED = {} 686 687 @classmethod
688 - def extend(cls, token, match_func):
689 cls.MATCH_EXTENDED[token] = match_func
690 691 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 692 693 @classmethod
694 - def unextend(cls, token):
695 try: 696 del(cls.MATCH_EXTENDED[token]) 697 except KeyError: 698 pass
699 700 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 701
702 - def __init__(self):
703 self.otype = None 704 self.arg = None 705 self.negative = False
706 707 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 708
709 - def match(self, vfsobj):
710 """ 711 Check if object matches the rule 712 """ 713 714 def _match_type(obj, arg): 715 if is_func(arg): 716 return arg(obj) 717 else: 718 return isinstance(obj.ftype, arg)
719 720 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 721 722 def _match_name(obj, arg): 723 if arg.search(obj.name) is None: 724 return False 725 else: 726 return True
727 728 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 729 730 def _match_iname(obj, arg): 731 if arg.search(obj.name) is None: 732 return False 733 else: 734 return True 735 736 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 737 738 def _match_owner(obj, arg): 739 if arg[0] is not None and arg[1] is not None: 740 if (obj.uid, obj.gid) == arg: 741 return True 742 elif arg[0] is not None and obj.uid == arg[0]: 743 return True 744 elif arg[1] is not None and obj.gid == arg[1]: 745 return True 746 747 return False 748 749 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 750 751 def _match_perm(obj, arg): 752 if obj.mode is None: 753 return False 754 755 _any, _m = arg 756 _mode = stat.S_IMODE(obj.mode.raw) 757 758 if not _any and _mode == _m: 759 return True 760 elif _mode & _m: 761 return True 762 763 return False 764 765 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 766 767 def _match_size(obj, args): 768 if obj.size is None: 769 return False 770 771 _op, _size = args 772 773 _data = {u">": lambda x, y: x > y, 774 u">=": lambda x, y: x >= y, 775 u"<": lambda x, y: x < y, 776 u"<=": lambda x, y: x <= y, 777 u"=": lambda x, y: x == y, 778 } 779 780 if _op in _data and _data[_op](obj.size, _size): 781 return True 782 783 return False 784 785 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 786 787 def _match_link_exists(obj, arg): 788 if isinstance(obj.ftype, VFSTypeLink) and obj.data is not None: 789 return True 790 else: 791 return False 792 793 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 794 795 _match_link_type = link(_match_type) 796 _match_link_name = link(_match_name) 797 _match_link_iname = link(_match_iname) 798 _match_link_owner = link(_match_owner) 799 _match_link_perm = link(_match_perm) 800 _match_link_size = link(_match_size) 801 802 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 803 804 _match_f = { 805 u"type": _match_type, 806 u"name": _match_name, 807 u"iname": _match_iname, 808 u"owner": _match_owner, 809 u"perm": _match_perm, 810 u"size": _match_size, 811 u"link_type": _match_link_type, 812 u"link_name": _match_link_name, 813 u"link_iname": _match_link_iname, 814 u"link_owner": _match_link_owner, 815 u"link_perm": _match_link_perm, 816 u"link_exists": _match_link_exists, 817 u"link_size": _match_link_size, 818 } 819 820 if self.otype in _match_f: 821 _res = _match_f[self.otype](vfsobj, self.arg) 822 elif self.otype in self.MATCH_EXTENDED: 823 try: 824 _res = self.MATCH_EXTENDED[self.otype](vfsobj, self.arg) 825 except Exception, e: 826 self.error(_(u"Error in calling extended match "\ 827 u"function: %s") % unicode(e)) 828 else: 829 raise FSRuleError(_(u"Unable to find match function for token: %s") 830 % self.otype) 831 832 if self.negative: 833 return not _res 834 else: 835 return _res 836 837 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 838
839 - def __str__(self):
840 return "<FSRule expression: %s, %s, %s>" % \ 841 (self.otype, str(self.arg), str(self.negative))
842 843 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 844
845 - def __repr__(self):
846 return self.__str__()
847