Package cssutils :: Module util
[hide private]
[frames] | no frames]

Source Code for Module cssutils.util

  1  """base classes for css and stylesheets packages 
  2  """ 
  3  __all__ = [] 
  4  __docformat__ = 'restructuredtext' 
  5  __author__ = '$LastChangedBy: cthedot $' 
  6  __date__ = '$LastChangedDate: 2007-10-27 22:28:43 +0200 (Sa, 27 Okt 2007) $' 
  7  __version__ = '$LastChangedRevision: 592 $' 
  8   
  9  import types 
 10  import xml.dom 
 11  import cssutils 
 12  from tokenize2 import Tokenizer 
 13   
14 -class Seq(object):
15 """ 16 (EXPERIMENTAL) 17 a list like sequence of (value, type) used in almost all cssutils classes 18 19 behaves almost like a list but keeps extra attribute "type" for 20 each value in the list 21 22 types are tokens types like e.g. "COMMENT" (value='/*...*/', all uppercase) 23 or productions like e.g. "universal" (value='*', all lowercase) 24 """
25 - def __init__(self):
26 self.values = [] 27 self.types = []
28
29 - def __contains__(self, item):
30 return item in self.values
31
32 - def __getitem__(self, index):
33 return self.values[index]
34
35 - def __setitem__(self, index, value_type):
36 "might be set with tuple (value, type) or a single value" 37 if type(value_type) == tuple: 38 val = value_type[0] 39 typ = value_type[1] 40 else: 41 val = value_type 42 typ = None 43 self.values[index] = val 44 self.types[index] = typ
45
46 - def __iter__(self):
47 "returns an iterator of values only " 48 return iter(self.values)
49
50 - def __len__(self):
51 "same as len(list)" 52 return len(self.values)
53
54 - def __repr__(self):
55 "returns a repr same as a list of tuples of (value, type)" 56 return u'[%s]' % u',\n '.join([u'(%r, %r)' % (value, self.types[i]) 57 for i, value in enumerate(self.values)])
58 - def __str__(self):
59 "returns a concanated string of all values" 60 items = [] 61 for i, value in enumerate(self.values): 62 if self.types[i]: 63 if self.types[i] != 'COMMENT': 64 items.append(value) 65 items.append(value) 66 return u''.join(items)
67
68 - def append(self, value, type=None):
69 """ 70 same as list.append but not a simple value but a SeqItem is appended 71 """ 72 self.values.append(value) # str(value)??? does not work if value is e.g. comment 73 self.types.append(type)
74 75
76 -class Base(object):
77 """ 78 Base class for most CSS and StyleSheets classes 79 80 Contains helper methods for inheriting classes helping parsing 81 82 ``_normalize`` is static as used be Preferences. 83 """ 84 __tokenizer2 = Tokenizer() 85 _log = cssutils.log 86 _prods = cssutils.tokenize2.CSSProductions 87 88 # for more on shorthand properties see 89 # http://www.dustindiaz.com/css-shorthand/ 90 # format: shorthand: [(propname, mandatorycheck?)*] 91 _SHORTHANDPROPERTIES = { 92 u'background': [], 93 u'border': [], 94 u'border-left': [], 95 u'border-right': [], 96 u'border-top': [], 97 u'border-bottom': [], 98 u'border-color': [], 99 u'border-style': [], 100 u'border-width': [], 101 u'cue': [], 102 u'font': [('font-weight', True), 103 ('font-size', True), 104 ('line-height', False), 105 ('font-family', True)], 106 u'list-style': [], 107 u'margin': [], 108 u'outline': [], 109 u'padding': [], 110 u'pause': [] 111 } 112 113 @staticmethod
114 - def _normalize(x):
115 """ 116 normalizes x namely: 117 118 - lowercase 119 - removes any \ (TODO: check for escapes like \65) 120 so for x=="c\olor\" return "color" 121 122 """ 123 # TODO: more normalizing stuff 124 if x: 125 return x.replace(u'\\', u'').lower() 126 else: 127 return x
128
129 - def _checkReadonly(self):
130 "raises xml.dom.NoModificationAllowedErr if rule/... is readonly" 131 if hasattr(self, '_readonly') and self._readonly: 132 raise xml.dom.NoModificationAllowedErr( 133 u'%s is readonly.' % self.__class__) 134 return True 135 return False
136
137 - def _newseq(self):
138 # used by Selector but should be used by most classes? 139 return Seq()
140
141 - def _valuestr(self, t):
142 """ 143 returns string value of t (t may be a string, a list of token tuples 144 or a single tuple in format (type, value, line, col) or a 145 tokenlist[old]) 146 """ 147 if not t: 148 return u'' 149 elif isinstance(t, basestring): 150 return t 151 elif isinstance(t, list) and isinstance(t[0], tuple): 152 return u''.join([x[1] for x in t]) 153 elif isinstance(t, tuple): # needed? 154 return self._tokenvalue(t) 155 else: # old 156 return u''.join([x.value for x in t])
157
158 - def _tokenize2(self, textortokens, aslist=False, fullsheet=False):
159 """ 160 returns tokens of textortokens which may already be tokens in which 161 case simply returns input 162 """ 163 if not textortokens: 164 return None 165 if types.GeneratorType == type(textortokens) and not aslist: 166 # already tokenized 167 return textortokens 168 if isinstance(textortokens, basestring): 169 if aslist: 170 return [t for t in self.__tokenizer2.tokenize( 171 textortokens, fullsheet=fullsheet)] 172 else: 173 return self.__tokenizer2.tokenize( 174 textortokens, fullsheet=fullsheet) 175 elif isinstance(textortokens, tuple): 176 # a single token (like a comment) 177 return [textortokens] 178 else: 179 # already tokenized but return generator 180 return (x for x in textortokens)
181
182 - def _nexttoken(self, tokenizer, default=None):
183 "returns next token in generator tokenizer or the default value" 184 try: 185 return tokenizer.next() 186 except (StopIteration, AttributeError): 187 return default
188
189 - def _type(self, token):
190 "type of Tokenizer token" 191 if not token: 192 return None 193 else: 194 return token[0]
195
196 - def _tokenvalue(self, token, normalize=False):
197 "value of Tokenizer token" 198 if not token: 199 return None 200 elif normalize: 201 return Base._normalize(token[1]) 202 else: 203 return token[1]
204
205 - def _tokensupto2(self, 206 tokenizer, 207 starttoken=None, 208 blockstartonly=False, 209 blockendonly=False, 210 mediaendonly=False, 211 semicolon=False, 212 propertynameendonly=False, 213 propertyvalueendonly=False, 214 propertypriorityendonly=False, 215 selectorattendonly=False, 216 funcendonly=False, 217 listseponly=False, # , 218 keepEnd=True, 219 keepEOF=True):
220 """ 221 returns tokens upto end of atrule and end index 222 end is defined by parameters, might be ; } ) or other 223 224 default looks for ending "}" and ";" 225 """ 226 ends = u';}' 227 brace = bracket = parant = 0 # {}, [], () 228 229 if blockstartonly: # { 230 ends = u'{' 231 brace = -1 # set to 0 with first { 232 elif blockendonly: # } 233 ends = u'}' 234 elif mediaendonly: # } 235 ends = u'}' 236 brace = 1 # rules } and mediarules } 237 elif semicolon: 238 ends = u';' 239 elif propertynameendonly: # : and ; in case of an error 240 ends = u':;' 241 elif propertyvalueendonly: # ; or !important 242 ends = (u';', u'!') 243 elif propertypriorityendonly: # ; 244 ends = u';' 245 elif selectorattendonly: # ] 246 ends = u']' 247 if starttoken and self._tokenvalue(starttoken) == u'[': 248 bracket = 1 249 elif funcendonly: # ) 250 ends = u')' 251 parant = 1 252 elif listseponly: # , 253 ends = u',' 254 255 resulttokens = [] 256 257 # NEEDED? 258 if starttoken: 259 resulttokens.append(starttoken) 260 261 if not tokenizer: 262 return resulttokens 263 else: 264 for token in tokenizer: 265 if self._type(token) == 'EOF': 266 if keepEOF and keepEnd: 267 resulttokens.append(token) 268 break 269 val = self._tokenvalue(token) 270 if u'{' == val: brace += 1 271 elif u'}' == val: brace -= 1 272 elif u'[' == val: bracket += 1 273 elif u']' == val: bracket -= 1 274 # function( or single ( 275 elif u'(' == val or \ 276 Base._prods.FUNCTION == self._type(token): parant += 1 277 elif u')' == val: parant -= 1 278 if val in ends and (brace == bracket == parant == 0): 279 if keepEnd: 280 resulttokens.append(token) 281 break 282 else: 283 resulttokens.append(token) 284 285 return resulttokens
286
287 - def _getProductions(self, productions):
288 """ 289 each production should return the next expected token 290 normaly a name like "uri" or "EOF" 291 some have no expectation like S or COMMENT, so simply return 292 the current value of self.__expected 293 """ 294 def ATKEYWORD(expected, seq, token, tokenizer=None): 295 "TODO: add default impl for unexpected @rule" 296 return expected
297 298 def COMMENT(expected, seq, token, tokenizer=None): 299 "default implementation for COMMENT token" 300 seq.append(cssutils.css.CSSComment([token])) 301 return expected
302 303 def S(expected, seq, token, tokenizer=None): 304 "default implementation for S token" 305 return expected 306 307 def EOF(expected=None, seq=None, token=None, tokenizer=None): 308 "default implementation for EOF token" 309 return 'EOF' 310 311 p = {'COMMENT': COMMENT, 312 'S': S, 313 'ATKEYWORD': ATKEYWORD, 314 'EOF': EOF # only available if fullsheet 315 } 316 p.update(productions) 317 return p 318
319 - def _parse(self, expected, seq, tokenizer, productions, default=None):
320 """ 321 puts parsed tokens in seq by calling a production with 322 (seq, tokenizer, token) 323 324 expected 325 a name what token or value is expected next, e.g. 'uri' 326 seq 327 to add rules etc to 328 tokenizer 329 call tokenizer.next() to get next token 330 productions 331 callbacks {tokentype: callback} 332 default 333 default callback if tokentype not in productions 334 335 returns (wellformed, expected) which the last prod might have set 336 """ 337 wellformed = True 338 339 if not tokenizer: 340 return wellformed, expected 341 342 prods = self._getProductions(productions) 343 for token in tokenizer: 344 typ, val, lin, col = token 345 p = prods.get(typ, default) 346 if p: 347 expected = p(expected, seq, token, tokenizer) 348 else: 349 wellformed = False 350 self._log.error(u'Unexpected token (%s, %s, %s, %s)' % token) 351 352 return wellformed, expected
353 354
355 -class Deprecated(object):
356 """This is a decorator which can be used to mark functions 357 as deprecated. It will result in a warning being emitted 358 when the function is used. 359 360 It accepts a single paramter ``msg`` which is shown with the warning. 361 It should contain information which function or method to use instead. 362 """
363 - def __init__(self, msg):
364 self.msg = msg
365
366 - def __call__(self, func):
367 def newFunc(*args, **kwargs): 368 import warnings 369 warnings.warn("Call to deprecated method %r. %s" % 370 (func.__name__, self.msg), 371 category=DeprecationWarning, 372 stacklevel=2) 373 return func(*args, **kwargs)
374 newFunc.__name__ = func.__name__ 375 newFunc.__doc__ = func.__doc__ 376 newFunc.__dict__.update(func.__dict__) 377 return newFunc
378