Package cssutils :: Module util
[hide private]
[frames] | no frames]

Source Code for Module cssutils.util

  1  """base classes for css and stylesheets packages 
  2  """ 
  3  __all__ = [] 
  4  __docformat__ = 'restructuredtext' 
  5  __author__ = '$LastChangedBy: cthedot $' 
  6  __date__ = '$LastChangedDate: 2007-12-29 14:33:59 +0100 (Sa, 29 Dez 2007) $' 
  7  __version__ = '$LastChangedRevision: 758 $' 
  8   
  9  import re 
 10  import types 
 11  import xml.dom 
 12  import cssutils 
 13  from tokenize2 import Tokenizer 
 14   
15 -class Seq(object):
16 """ 17 (EXPERIMENTAL) 18 a list like sequence of (value, type) used in some cssutils classes 19 as property ``seq`` 20 21 behaves almost like a list but keeps extra attribute "type" for 22 each value in the list 23 24 types are token types like e.g. "COMMENT" (all uppercase, value='/*...*/') 25 or productions like e.g. "universal" (all lowercase, value='*') 26 """
27 - def __init__(self):
28 self.values = [] 29 self.types = []
30
31 - def __contains__(self, item):
32 return item in self.values
33
34 - def __delitem__(self, index):
35 del self.values[index]
36
37 - def __getitem__(self, index):
38 return self.values[index]
39
40 - def __setitem__(self, index, value_type):
41 "might be set with tuple (value, type) or a single value" 42 if type(value_type) == tuple: 43 val = value_type[0] 44 typ = value_type[1] 45 else: 46 val = value_type 47 typ = None 48 self.values[index] = val 49 self.types[index] = typ
50
51 - def __iter__(self):
52 "returns an iterator for values" 53 return iter(self.values)
54
55 - def __len__(self):
56 "same as len(list)" 57 return len(self.values)
58
59 - def __repr__(self):
60 "returns a repr same as a list of tuples of (value, type)" 61 return u'[%s]' % u',\n '.join([u'(%r, %r)' % (value, self.types[i]) 62 for i, value in enumerate(self.values)])
63 - def __str__(self):
64 "returns a concatated string of all values" 65 items = [] 66 for i, value in enumerate(self.values): 67 if self.types[i]: 68 if self.types[i] != 'COMMENT': 69 items.append(value) 70 # items.append(value) 71 return u''.join(str(items))
72
73 - def append(self, value, type=None):
74 """ 75 same as list.append but not a simple value but a SeqItem is appended 76 """ 77 self.values.append(value) # str(value)??? does not work if value is e.g. comment 78 self.types.append(type)
79 80 # TODO: should this be the default and the list the special case???
81 - def _get_values_types(self):
82 return ((self.values[i], self.types[i]) for i in range(0, len(self)))
83 84 _items = property(_get_values_types, 85 doc="EXPERIMENTAL: returns an iterator for (value, type) tuples")
86 87
88 -class ListSeq(object):
89 """ 90 (EXPERIMENTAL) 91 A base class used for list classes like css.SelectorList or 92 stylesheets.MediaList 93 94 adds list like behaviour running on inhering class' property ``seq`` 95 96 - item in x => bool 97 - len(x) => integer 98 - get, set and del x[i] 99 - for item in x 100 - append(item) 101 102 some methods must be overwritten in inheriting class 103 """
104 - def __init__(self):
105 self.seq = [] # does not need to use ``Seq`` as simple list only
106
107 - def __contains__(self, item):
108 return item in self.seq
109
110 - def __delitem__(self, index):
111 del self.seq[index]
112
113 - def __getitem__(self, index):
114 return self.seq[index]
115
116 - def __iter__(self):
117 return iter(self.seq)
118
119 - def __len__(self):
120 return len(self.seq)
121
122 - def __setitem__(self, index, item):
123 "must be overwritten" 124 raise NotImplementedError
125
126 - def append(self, item):
127 "must be overwritten" 128 raise NotImplementedError
129 130
131 -class Base(object):
132 """ 133 Base class for most CSS and StyleSheets classes 134 135 Contains helper methods for inheriting classes helping parsing 136 137 ``_normalize`` is static as used be Preferences. 138 """ 139 __tokenizer2 = Tokenizer() 140 _log = cssutils.log 141 _prods = cssutils.tokenize2.CSSProductions 142 143 # for more on shorthand properties see 144 # http://www.dustindiaz.com/css-shorthand/ 145 # format: shorthand: [(propname, mandatorycheck?)*] 146 _SHORTHANDPROPERTIES = { 147 u'background': [], 148 u'border': [], 149 u'border-left': [], 150 u'border-right': [], 151 u'border-top': [], 152 u'border-bottom': [], 153 u'border-color': [], 154 u'border-style': [], 155 u'border-width': [], 156 u'cue': [], 157 u'font': [('font-weight', True), 158 ('font-size', True), 159 ('line-height', False), 160 ('font-family', True)], 161 u'list-style': [], 162 u'margin': [], 163 u'outline': [], 164 u'padding': [], 165 u'pause': [] 166 } 167 168 # simple escapes, all non unicodes 169 __escapes = re.compile(ur'(\\[^0-9a-fA-F])').sub 170 # all unicode (see cssproductions "unicode") 171 __unicodes = re.compile(ur'\\[0-9a-fA-F]{1,6}[\t|\r|\n|\f|\x20]?').sub 172 173 @staticmethod
174 - def _normalize(x):
175 """ 176 normalizes x, namely: 177 178 - remove any \ before non unicode sequences (0-9a-zA-Z) so for 179 x=="c\olor\" return "color" (unicode escape sequences should have 180 been resolved by the tokenizer already) 181 - lowercase 182 """ 183 if x: 184 def removeescape(matchobj): 185 return matchobj.group(0)[1:]
186 x = Base.__escapes(removeescape, x) 187 return x.lower() 188 else: 189 return x
190
191 - def _checkReadonly(self):
192 "raises xml.dom.NoModificationAllowedErr if rule/... is readonly" 193 if hasattr(self, '_readonly') and self._readonly: 194 raise xml.dom.NoModificationAllowedErr( 195 u'%s is readonly.' % self.__class__) 196 return True 197 return False
198
199 - def _newseq(self):
200 # used by Selector but should be used by most classes 201 return Seq()
202
203 - def _valuestr(self, t):
204 """ 205 returns string value of t (t may be a string, a list of token tuples 206 or a single tuple in format (type, value, line, col) or a 207 tokenlist[old]) 208 """ 209 if not t: 210 return u'' 211 elif isinstance(t, basestring): 212 return t 213 elif isinstance(t, list) and isinstance(t[0], tuple): 214 return u''.join([x[1] for x in t]) 215 elif isinstance(t, tuple): # needed? 216 return self._tokenvalue(t) 217 else: # old 218 return u''.join([x.value for x in t])
219
220 - def _tokenize2(self, textortokens, aslist=False, fullsheet=False):
221 """ 222 returns tokens of textortokens which may already be tokens in which 223 case simply returns input 224 """ 225 if not textortokens: 226 return None 227 if types.GeneratorType == type(textortokens) and not aslist: 228 # already tokenized 229 return textortokens 230 if isinstance(textortokens, basestring): 231 if aslist: 232 return [t for t in self.__tokenizer2.tokenize( 233 textortokens, fullsheet=fullsheet)] 234 else: 235 return self.__tokenizer2.tokenize( 236 textortokens, fullsheet=fullsheet) 237 elif isinstance(textortokens, tuple): 238 # a single token (like a comment) 239 return [textortokens] 240 else: 241 # already tokenized but return generator 242 return (x for x in textortokens)
243
244 - def _nexttoken(self, tokenizer, default=None):
245 "returns next token in generator tokenizer or the default value" 246 try: 247 return tokenizer.next() 248 except (StopIteration, AttributeError): 249 return default
250
251 - def _type(self, token):
252 "type of Tokenizer token" 253 if not token: 254 return None 255 else: 256 return token[0]
257
258 - def _tokenvalue(self, token, normalize=False):
259 "value of Tokenizer token" 260 if not token: 261 return None 262 elif normalize: 263 return Base._normalize(token[1]) 264 else: 265 return token[1]
266
267 - def _tokensupto2(self, 268 tokenizer, 269 starttoken=None, 270 blockstartonly=False, 271 blockendonly=False, 272 mediaendonly=False, 273 semicolon=False, 274 propertynameendonly=False, 275 propertyvalueendonly=False, 276 propertypriorityendonly=False, 277 selectorattendonly=False, 278 funcendonly=False, 279 listseponly=False, # , 280 keepEnd=True, 281 keepEOF=True):
282 """ 283 returns tokens upto end of atrule and end index 284 end is defined by parameters, might be ; } ) or other 285 286 default looks for ending "}" and ";" 287 """ 288 ends = u';}' 289 brace = bracket = parant = 0 # {}, [], () 290 291 if blockstartonly: # { 292 ends = u'{' 293 brace = -1 # set to 0 with first { 294 elif blockendonly: # } 295 ends = u'}' 296 elif mediaendonly: # } 297 ends = u'}' 298 brace = 1 # rules } and mediarules } 299 elif semicolon: 300 ends = u';' 301 elif propertynameendonly: # : and ; in case of an error 302 ends = u':;' 303 elif propertyvalueendonly: # ; or !important 304 ends = (u';', u'!') 305 elif propertypriorityendonly: # ; 306 ends = u';' 307 elif selectorattendonly: # ] 308 ends = u']' 309 if starttoken and self._tokenvalue(starttoken) == u'[': 310 bracket = 1 311 elif funcendonly: # ) 312 ends = u')' 313 parant = 1 314 elif listseponly: # , 315 ends = u',' 316 317 resulttokens = [] 318 319 # NEEDED? 320 if starttoken: 321 resulttokens.append(starttoken) 322 323 if not tokenizer: 324 return resulttokens 325 else: 326 for token in tokenizer: 327 if self._type(token) == 'EOF': 328 if keepEOF and keepEnd: 329 resulttokens.append(token) 330 break 331 val = self._tokenvalue(token) 332 if u'{' == val: brace += 1 333 elif u'}' == val: brace -= 1 334 elif u'[' == val: bracket += 1 335 elif u']' == val: bracket -= 1 336 # function( or single ( 337 elif u'(' == val or \ 338 Base._prods.FUNCTION == self._type(token): parant += 1 339 elif u')' == val: parant -= 1 340 if val in ends and (brace == bracket == parant == 0): 341 if keepEnd: 342 resulttokens.append(token) 343 break 344 else: 345 resulttokens.append(token) 346 347 return resulttokens
348
349 - def _getProductions(self, productions):
350 """ 351 each production should return the next expected token 352 normaly a name like "uri" or "EOF" 353 some have no expectation like S or COMMENT, so simply return 354 the current value of self.__expected 355 """ 356 def ATKEYWORD(expected, seq, token, tokenizer=None): 357 "TODO: add default impl for unexpected @rule" 358 return expected
359 360 def COMMENT(expected, seq, token, tokenizer=None): 361 "default implementation for COMMENT token" 362 seq.append(cssutils.css.CSSComment([token])) 363 return expected 364 365 def S(expected, seq, token, tokenizer=None): 366 "default implementation for S token" 367 return expected 368 369 def EOF(expected=None, seq=None, token=None, tokenizer=None): 370 "default implementation for EOF token" 371 return 'EOF' 372 373 p = {'COMMENT': COMMENT, 374 'S': S, 375 'ATKEYWORD': ATKEYWORD, 376 'EOF': EOF # only available if fullsheet 377 } 378 p.update(productions) 379 return p 380
381 - def _parse(self, expected, seq, tokenizer, productions, default=None):
382 """ 383 puts parsed tokens in seq by calling a production with 384 (seq, tokenizer, token) 385 386 expected 387 a name what token or value is expected next, e.g. 'uri' 388 seq 389 to add rules etc to 390 tokenizer 391 call tokenizer.next() to get next token 392 productions 393 callbacks {tokentype: callback} 394 default 395 default callback if tokentype not in productions 396 397 returns (wellformed, expected) which the last prod might have set 398 """ 399 wellformed = True 400 401 if not tokenizer: 402 return wellformed, expected 403 404 prods = self._getProductions(productions) 405 for token in tokenizer: 406 typ, val, lin, col = token 407 p = prods.get(typ, default) 408 if p: 409 expected = p(expected, seq, token, tokenizer) 410 else: 411 wellformed = False 412 self._log.error(u'Unexpected token (%s, %s, %s, %s)' % token) 413 414 return wellformed, expected
415 416
417 -class Deprecated(object):
418 """This is a decorator which can be used to mark functions 419 as deprecated. It will result in a warning being emitted 420 when the function is used. 421 422 It accepts a single paramter ``msg`` which is shown with the warning. 423 It should contain information which function or method to use instead. 424 """
425 - def __init__(self, msg):
426 self.msg = msg
427
428 - def __call__(self, func):
429 def newFunc(*args, **kwargs): 430 import warnings 431 warnings.warn("Call to deprecated method %r. %s" % 432 (func.__name__, self.msg), 433 category=DeprecationWarning, 434 stacklevel=2) 435 return func(*args, **kwargs)
436 newFunc.__name__ = func.__name__ 437 newFunc.__doc__ = func.__doc__ 438 newFunc.__dict__.update(func.__dict__) 439 return newFunc
440