Package dbf :: Module tables
[hide private]

Source Code for Module dbf.tables

   1  "table definitions" 
   2  import os 
   3  import sys 
   4  import csv 
   5  import codecs 
   6  import unicodedata 
   7  import weakref 
   8  from array import array 
   9  from decimal import Decimal 
  10  from dbf import _io as io 
  11  from dbf.dates import Date, DateTime, Time 
  12  from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode 
  13   
  14  input_decoding = 'cp850'    # treat non-unicode data as ... 
  15  default_codepage = 'cp1252' # if no codepage specified on dbf creation, use this 
  16  return_ascii = True         # convert back to icky ascii, losing chars if no mapping 
  17   
  18  version_map = { 
  19          '\x02' : 'FoxBASE', 
  20          '\x03' : 'dBase III Plus', 
  21          '\x04' : 'dBase IV', 
  22          '\x05' : 'dBase V', 
  23          '\x30' : 'Visual FoxPro', 
  24          '\x31' : 'Visual FoxPro (auto increment field)', 
  25          '\x43' : 'dBase IV SQL', 
  26          '\x7b' : 'dBase IV w/memos', 
  27          '\x83' : 'dBase III Plus w/memos', 
  28          '\x8b' : 'dBase IV w/memos', 
  29          '\x8e' : 'dBase IV w/SQL table' } 
  30   
  31  code_pages = { 
  32          '\x01' : ('cp437', 'U.S. MS-DOS'), 
  33          '\x02' : ('cp850', 'International MS-DOS'), 
  34          '\x03' : ('cp1252', 'Windows ANSI'), 
  35          '\x04' : ('mac_roman', 'Standard Macintosh'), 
  36   
  37          '\x08' : ('cp865', 'Danish OEM'), 
  38          '\x09' : ('cp437', 'Dutch OEM'), 
  39          '\x0A' : ('cp850', 'Dutch OEM (secondary)'), 
  40          '\x0B' : ('cp437', 'Finnish OEM'), 
  41          '\x0D' : ('cp437', 'French OEM'), 
  42          '\x0E' : ('cp850', 'French OEM (secondary)'), 
  43          '\x0F' : ('cp437', 'German OEM'), 
  44          '\x10' : ('cp850', 'German OEM (secondary)'), 
  45          '\x11' : ('cp437', 'Italian OEM'), 
  46          '\x12' : ('cp850', 'Italian OEM (secondary)'), 
  47          '\x13' : ('cp932', 'Japanese Shift-JIS'), 
  48          '\x14' : ('cp850', 'Spanish OEM (secondary)'), 
  49          '\x15' : ('cp437', 'Swedish OEM'), 
  50          '\x16' : ('cp850', 'Swedish OEM (secondary)'), 
  51          '\x17' : ('cp865', 'Norwegian OEM'), 
  52          '\x18' : ('cp437', 'Spanish OEM'), 
  53          '\x19' : ('cp437', 'English OEM (Britain)'), 
  54          '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'), 
  55          '\x1B' : ('cp437', 'English OEM (U.S.)'), 
  56          '\x1C' : ('cp863', 'French OEM (Canada)'), 
  57          '\x1D' : ('cp850', 'French OEM (secondary)'), 
  58          '\x1F' : ('cp852', 'Czech OEM'), 
  59          '\x22' : ('cp852', 'Hungarian OEM'), 
  60          '\x23' : ('cp852', 'Polish OEM'), 
  61          '\x24' : ('cp860', 'Portugese OEM'), 
  62          '\x25' : ('cp850', 'Potugese OEM (secondary)'), 
  63          '\x26' : ('cp866', 'Russian OEM'), 
  64          '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'), 
  65          '\x40' : ('cp852', 'Romanian OEM'), 
  66          '\x4D' : ('cp936', 'Chinese GBK (PRC)'), 
  67          '\x4E' : ('cp949', 'Korean (ANSI/OEM)'), 
  68          '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'), 
  69          '\x50' : ('cp874', 'Thai (ANSI/OEM)'), 
  70          '\x57' : ('cp1252', 'ANSI'), 
  71          '\x58' : ('cp1252', 'Western European ANSI'), 
  72          '\x59' : ('cp1252', 'Spanish ANSI'), 
  73   
  74          '\x64' : ('cp852', 'Eastern European MS-DOS'), 
  75          '\x65' : ('cp866', 'Russian MS-DOS'), 
  76          '\x66' : ('cp865', 'Nordic MS-DOS'), 
  77          '\x67' : ('cp861', 'Icelandic MS-DOS'), 
  78   
  79          '\x68' : (None, 'Kamenicky (Czech) MS-DOS'), 
  80          '\x69' : (None, 'Mazovia (Polish) MS-DOS'), 
  81   
  82          '\x6a' : ('cp737', 'Greek MS-DOS (437G)'), 
  83          '\x6b' : ('cp857', 'Turkish MS-DOS'), 
  84          '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 
  85          '\x79' : ('cp949', 'Korean Windows'), 
  86          '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 
  87          '\x7b' : ('cp932', 'Japanese Windows'), 
  88          '\x7c' : ('cp874', 'Thai Windows'), 
  89          '\x7d' : ('cp1255', 'Hebrew Windows'), 
  90          '\x7e' : ('cp1256', 'Arabic Windows'), 
  91          '\xc8' : ('cp1250', 'Eastern European Windows'), 
  92          '\xc9' : ('cp1251', 'Russian Windows'), 
  93          '\xca' : ('cp1254', 'Turkish Windows'), 
  94          '\xcb' : ('cp1253', 'Greek Windows'), 
  95          '\x96' : ('mac_cyrillic', 'Russian Macintosh'), 
  96          '\x97' : ('mac_latin2', 'Macintosh EE'), 
  97          '\x98' : ('mac_greek', 'Greek Macintosh') } 
  98   
99 -class _DbfRecord(object):
100 """Provides routines to extract and save data within the fields of a dbf record.""" 101 __slots__ = ['_recnum', '_layout', '_data', '__weakref__']
102 - def _retrieveFieldValue(yo, record_data, fielddef):
103 """calls appropriate routine to fetch value stored in field from array 104 @param record_data: the data portion of the record 105 @type record_data: array of characters 106 @param fielddef: description of the field definition 107 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags' 108 @returns: python data stored in field""" 109 110 field_type = fielddef['type'] 111 retrieve = yo._layout.fieldtypes[field_type]['Retrieve'] 112 datum = retrieve(record_data, fielddef, yo._layout.memo) 113 if field_type in yo._layout.character_fields: 114 datum = yo._layout.decoder(datum)[0] 115 if yo._layout.return_ascii: 116 try: 117 datum = yo._layout.output_encoder(datum)[0] 118 except UnicodeEncodeError: 119 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore') 120 return datum
121 - def _updateFieldValue(yo, fielddef, value):
122 "calls appropriate routine to convert value to ascii bytes, and save it in record" 123 field_type = fielddef['type'] 124 update = yo._layout.fieldtypes[field_type]['Update'] 125 if field_type in yo._layout.character_fields: 126 if not isinstance(value, unicode): 127 if yo._layout.input_decoder is None: 128 raise NonUnicode("String not in unicode format, no default encoding specified") 129 value = yo._layout.input_decoder(value)[0] # input ascii => unicode 130 value = yo._layout.encoder(value)[0] # unicode => table ascii 131 bytes = array('c', update(value, fielddef, yo._layout.memo)) 132 size = fielddef['length'] 133 if len(bytes) > size: 134 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) 135 blank = array('c', ' ' * size) 136 start = fielddef['start'] 137 end = start + size 138 blank[:len(bytes)] = bytes[:] 139 yo._data[start:end] = blank[:] 140 yo._updateDisk(yo._recnum * yo._layout.header.recordlength() + yo._layout.header.start(), yo._data.tostring())
141 - def _updateDisk(yo, location='', data=None):
142 if not yo._layout.inmemory: 143 if yo._recnum < 0: 144 raise DbfError("Attempted to update record that has been packed") 145 if location == '': 146 location = yo._recnum * yo._layout.header.recordlength() + yo._layout.header.start() 147 if data is None: 148 data = yo._data 149 yo._layout.dfd.seek(location) 150 yo._layout.dfd.write(data)
151 - def __call__(yo, *specs):
152 results = [] 153 if not specs: 154 specs = yo._layout.index 155 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop]) 156 for field, func in specs: 157 results.append(func(yo[field])) 158 return tuple(results)
159
160 - def __contains__(yo, key):
161 return key in yo._layout.fields
162 - def __iter__(yo):
163 return (yo[field] for field in yo._layout.fields)
164 - def __getattr__(yo, name):
165 if name[0:2] == '__' and name[-2:] == '__': 166 raise AttributeError, 'Method %s is not implemented.' % name 167 elif not name in yo._layout.fields: 168 raise FieldMissing(name) 169 try: 170 fielddef = yo._layout[name] 171 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) 172 return value 173 except DbfError, error: 174 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 175 raise
176 - def __getitem__(yo, item):
177 if type(item) == int: 178 if not -yo._layout.header.fieldcount() <= item < yo._layout.header.fieldcount(): 179 raise IndexError("Field offset %d is not in record" % item) 180 return yo[yo._layout.fields[item]] 181 elif type(item) == slice: 182 sequence = [] 183 for index in yo._layout.fields[item]: 184 sequence.append(yo[index]) 185 return sequence 186 elif type(item) == str: 187 return yo.__getattr__(item) 188 else: 189 raise TypeError("%s is not a field name" % item)
190 - def __len__(yo):
191 return yo._layout.header.fieldcount()
192 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
193 """record = ascii array of entire record; layout=record specification; memo = memo object for table""" 194 record = object.__new__(cls) 195 record._recnum = recnum 196 record._layout = layout 197 if layout.blankrecord is None and not _fromdisk: 198 record._createBlankRecord() 199 record._data = layout.blankrecord 200 if recnum == -1: # not a disk-backed record 201 return record 202 elif type(kamikaze) == array: 203 record._data = kamikaze[:] 204 elif type(kamikaze) == str: 205 record._data = array('c', kamikaze) 206 else: 207 record._data = kamikaze._data[:] 208 datalen = len(record._data) 209 if datalen < layout.header.recordlength(): 210 record._data.extend(layout.blankrecord[datalen:]) 211 elif datalen > layout.header.recordlength(): 212 record._data = record._data[:layout.header.recordlength()] 213 if not _fromdisk and not layout.inmemory: 214 record._updateDisk() 215 return record
216 - def __setattr__(yo, name, value):
217 if name in yo.__slots__: 218 object.__setattr__(yo, name, value) 219 return 220 elif not name in yo._layout.fields: 221 raise FieldMissing(name) 222 fielddef = yo._layout[name] 223 try: 224 yo._updateFieldValue(fielddef, value) 225 except DbfError, error: 226 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 227 error.data = name 228 raise 229 raise DbfError(message)
230 - def __setitem__(yo, name, value):
231 if type(name) == str: 232 yo.__setattr__(name, value) 233 elif type(name) in (int, long): 234 yo.__setattr__(yo._layout.fields[name], value) 235 else: 236 raise TypeError("%s is not a field name" % name)
237 - def __str__(yo):
238 result = [] 239 for field in yo.field_names(): 240 result.append("%-10s: %s" % (field, yo[field])) 241 return '\n'.join(result)
242 - def __repr__(yo):
243 return yo._data.tostring()
244 - def _createBlankRecord(yo):
245 "creates a blank record data chunk" 246 layout = yo._layout 247 ondisk = layout.ondisk 248 layout.ondisk = False 249 yo._data = array('c', ' ' * layout.header.recordlength()) 250 layout.memofields = [] 251 for field in layout.fields: 252 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) 253 if layout[field]['type'] in layout.memotypes: 254 layout.memofields.append(field) 255 layout.blankrecord = yo._data[:] 256 layout.ondisk = ondisk
257 - def record_number(yo):
258 "physical record number" 259 return yo._recnum
260 - def has_been_deleted(yo):
261 "marked for deletion?" 262 return yo._data[0] == '*'
263 - def field_names(yo):
264 "fields in table/record" 265 return yo._layout.fields[:]
266 - def delete_record(yo):
267 "marks record as deleted" 268 yo._data[0] = '*' 269 yo._updateDisk(data='*')
270 - def gather_fields(yo, dict, drop=False):
271 "saves a dictionary into a records fields\nkeys with no matching field will raise a FieldMissing exception unless drop = True" 272 for key in dict: 273 if not key in yo.field_names(): 274 if drop: 275 continue 276 raise FieldMissing(key) 277 yo.__setattr__(key, dict[key])
278 - def reset_record(yo, keep_fields=None):
279 "blanks record" 280 if keep_fields is None: 281 keep_fields = [] 282 keep = {} 283 for field in keep_fields: 284 keep[field] = yo[field] 285 if yo._layout.blankrecord == None: 286 yo._createBlankRecord() 287 yo._data[:] = yo._layout.blankrecord[:] 288 for field in keep_fields: 289 yo[field] = keep[field] 290 yo._updateDisk()
291 - def scatter_fields(yo, blank=False):
292 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." 293 keys = yo._layout.fields 294 if blank: 295 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] 296 else: 297 values = [yo[field] for field in keys] 298 return dict(zip(keys, values))
299 - def undelete_record(yo):
300 "marks record as active" 301 yo._data[0] = ' ' 302 yo._updateDisk(data=' ')
303 -class _DbfMemo(object):
304 """Provides access to memo fields as dictionaries 305 must override _init, _get_memo, and _put_memo to 306 store memo contents to disk"""
307 - def _init(yo):
308 "initialize disk file usage"
309 - def _get_memo(yo, block):
310 "retrieve memo contents from disk"
311 - def _put_memo(yo, data):
312 "store memo contents to disk"
313 - def __init__(yo, meta):
314 "" 315 yo.meta = meta 316 yo.memory = {} 317 yo.nextmemo = 1 318 yo._init() 319 yo.meta.newmemofile = False
320 - def get_memo(yo, block, field):
321 "gets the memo in block" 322 if yo.meta.ignorememos or not block: 323 return '' 324 if yo.meta.ondisk: 325 return yo._get_memo(block) 326 else: 327 return yo.memory[block]
328 - def put_memo(yo, data):
329 "stores data in memo file, returns block number" 330 if yo.meta.ignorememos or data == '': 331 return 0 332 if yo.meta.inmemory: 333 thismemo = yo.nextmemo 334 yo.nextmemo += 1 335 yo.memory[thismemo] = data 336 else: 337 thismemo = yo._put_memo(data) 338 return thismemo
339 -class _Db3Memo(_DbfMemo):
340 - def _init(yo):
341 "dBase III specific" 342 yo.meta.memo_size= 512 343 yo.record_header_length = 2 344 if yo.meta.ondisk and not yo.meta.ignorememos: 345 if yo.meta.newmemofile: 346 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 347 yo.meta.mfd.write(io.packLongInt(1) + '\x00' * 508) 348 else: 349 try: 350 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 351 yo.meta.mfd.seek(0) 352 yo.nextmemo = io.unpackLongInt(yo.meta.mfd.read(4)) 353 except: 354 raise DbfError("memo file appears to be corrupt")
355 - def _get_memo(yo, block):
356 block = int(block) 357 yo.meta.mfd.seek(block * yo.meta.memo_size) 358 eom = -1 359 data = '' 360 while eom == -1: 361 newdata = yo.meta.mfd.read(yo.meta.memo_size) 362 if not newdata: 363 return data 364 data += newdata 365 eom = data.find('\x1a\x1a') 366 return data[:eom].rstrip()
367 - def _put_memo(yo, data):
368 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 369 blocks = length // yo.meta.memo_size 370 if length % yo.meta.memo_size: 371 blocks += 1 372 thismemo = yo.nextmemo 373 yo.nextmemo = thismemo + blocks 374 yo.meta.mfd.seek(0) 375 yo.meta.mfd.write(io.packLongInt(yo.nextmemo)) 376 yo.meta.mfd.seek(thismemo * yo.meta.memo_size) 377 yo.meta.mfd.write(data) 378 yo.meta.mfd.write('\x1a\x1a') 379 if len(yo._get_memo(thismemo)) != len(data): 380 raise DbfError("unknown error: memo not saved") 381 return thismemo
382 -class _VfpMemo(_DbfMemo):
383 - def _init(yo):
384 "Visual Foxpro 6 specific" 385 if yo.meta.ondisk and not yo.meta.ignorememos: 386 yo.record_header_length = 8 387 if yo.meta.newmemofile: 388 if yo.meta.memo_size == 0: 389 yo.meta.memo_size = 1 390 elif 1 < yo.meta.memo_size < 33: 391 yo.meta.memo_size *= 512 392 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 393 nextmemo = 512 // yo.meta.memo_size 394 if nextmemo * yo.meta.memo_size < 512: 395 nextmemo += 1 396 yo.nextmemo = nextmemo 397 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ 398 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) 399 else: 400 try: 401 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 402 yo.meta.mfd.seek(0) 403 header = yo.meta.mfd.read(512) 404 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True) 405 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True) 406 except: 407 raise DbfError("memo file appears to be corrupt")
408 - def _get_memo(yo, block):
409 yo.meta.mfd.seek(block * yo.meta.memo_size) 410 header = yo.meta.mfd.read(8) 411 length = io.unpackLongInt(header[4:], bigendian=True) 412 return yo.meta.mfd.read(length)
413 - def _put_memo(yo, data):
414 yo.meta.mfd.seek(0) 415 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True) 416 yo.meta.mfd.seek(0) 417 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 418 blocks = length // yo.meta.memo_size 419 if length % yo.meta.memo_size: 420 blocks += 1 421 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True)) 422 yo.meta.mfd.seek(thismemo*yo.meta.memo_size) 423 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data) 424 return thismemo
425 -class DbfTable(object):
426 """Provides a framework for dbf style tables.""" 427 _version = 'basic memory table' 428 _versionabbv = 'dbf' 429 _fieldtypes = { 430 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, }, 431 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, }, 432 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } } 433 _memoext = '' 434 _memotypes = tuple('M', ) 435 _memoClass = _DbfMemo 436 _yesMemoMask = '' 437 _noMemoMask = '' 438 _fixed_fields = ('M','D','L') # always same length in table 439 _variable_fields = tuple() # variable length in table 440 _character_fields = tuple('M', ) # field representing character data 441 _decimal_fields = tuple() # text-based numeric fields 442 _numeric_fields = tuple() # fields representing a number 443 _dbfTableHeader = array('c', '\x00' * 32) 444 _dbfTableHeader[0] = '\x00' # table type - none 445 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 446 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 447 _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii 448 _dbfTableHeader = _dbfTableHeader.tostring() 449 _dbfTableHeaderExtra = '' 450 _supported_tables = [] 451 _read_only = False 452 _meta_only = False 453 _use_deleted = True
454 - class _MetaData(dict):
455 blankrecord = None 456 fields = None 457 filename = None 458 dfd = None 459 memoname = None 460 newmemofile = False 461 memo = None 462 mfd = None 463 ignorememos = False 464 memofields = None 465 index = [] # never mutated 466 index_reversed = False 467 orderresults = None 468 current = -1
469 - class _TableHeader(object):
470 - def __init__(yo, data):
471 if len(data) != 32: 472 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) 473 yo._data = array('c', data + '\x0d')
474 - def codepage(yo, cp=None):
475 "get/set code page of table" 476 if cp is None: 477 return yo._data[29] 478 else: 479 if cp not in code_pages: 480 for code_page in sorted(code_pages.keys()): 481 sd, ld = code_pages[code_page] 482 if cp == sd or cp == ld: 483 if sd is None: 484 raise DbfError("Unsupported codepage: %s" % ld) 485 cp = code_page 486 break 487 else: 488 raise DbfError("Unsupported codepage: %s" % cp) 489 yo._data[29] = cp 490 return cp
491 - def data(yo, bytes=None):
492 "get/set entire structure" 493 if bytes is None: 494 date = io.packDate(Date.today()) 495 yo._data[1:4] = array('c', date) 496 return yo._data.tostring() 497 else: 498 if len(bytes) < 32: 499 raise DbfError("length for data of %d is less than 32" % len(bytes)) 500 yo._data[:] = array('c', bytes)
501 - def extra(yo, data=None):
502 "get/set any extra dbf info (located after headers, before data records)" 503 fieldblock = yo._data[32:] 504 for i in range(len(fieldblock)//32+1): 505 cr = i * 32 506 if fieldblock[cr] == '\x0d': 507 break 508 else: 509 raise DbfError("corrupt field structure") 510 cr += 33 # skip past CR 511 if data is None: 512 return yo._data[cr:].tostring() 513 else: 514 yo._data[cr:] = array('c', data) # extra 515 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start
516 - def fieldcount(yo):
517 "number of fields (read-only)" 518 fieldblock = yo._data[32:] 519 for i in range(len(fieldblock)//32+1): 520 cr = i * 32 521 if fieldblock[cr] == '\x0d': 522 break 523 else: 524 raise DbfError("corrupt field structure") 525 return len(fieldblock[:cr]) // 32
526 - def fields(yo, block=None):
527 "get/set field block structure" 528 fieldblock = yo._data[32:] 529 for i in range(len(fieldblock)//32+1): 530 cr = i * 32 531 if fieldblock[cr] == '\x0d': 532 break 533 else: 534 raise DbfError("corrupt field structure") 535 if block is None: 536 return fieldblock[:cr].tostring() 537 else: 538 cr += 32 # convert to indexing main structure 539 fieldlen = len(block) 540 if fieldlen % 32 != 0: 541 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 542 yo._data[32:cr] = array('c', block) # fields 543 yo._data[8:10] = array('c', io.packShortInt(len(yo._data))) # start 544 fieldlen = fieldlen // 32 545 recordlen = 1 # deleted flag 546 for i in range(fieldlen): 547 recordlen += ord(block[i*32+16]) 548 yo._data[10:12] = array('c', io.packShortInt(recordlen))
549 - def recordcount(yo, count=None):
550 "get/set number of records (maximum 16,777,215)" 551 if count is None: 552 return io.unpackLongInt(yo._data[4:8].tostring()) 553 else: 554 yo._data[4:8] = array('c', io.packLongInt(count))
555 - def recordlength(yo):
556 "length of a record (read_only) (max of 65,535)" 557 return io.unpackShortInt(yo._data[10:12].tostring())
558 - def start(yo, pos=None):
559 "starting position of first record in file (must be within first 64K)" 560 if pos is None: 561 return io.unpackShortInt(yo._data[8:10].tostring()) 562 else: 563 yo._data[8:10] = array('c', io.packShortInt(pos))
564 - def update(yo):
565 "date of last table modification (read-only)" 566 return io.unpackDate(yo._data[1:4].tostring())
567 - def version(yo, ver=None):
568 "dbf version" 569 if ver is None: 570 return yo._data[0] 571 else: 572 yo._data[0] = ver
573 - class _Table(object):
574 "implements the weakref table for records"
575 - def __init__(yo, count, meta):
576 yo._meta = meta 577 yo._weakref_list = [weakref.ref(lambda x: None)] * count
578 - def __getitem__(yo, index):
579 maybe = yo._weakref_list[index]() 580 if maybe is None: 581 if index < 0: 582 index += yo._meta.header.recordcount() 583 size = yo._meta.header.recordlength() 584 location = index * size + yo._meta.header.start() 585 yo._meta.dfd.seek(location) 586 bytes = yo._meta.dfd.read(size) 587 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) 588 yo._weakref_list[index] = weakref.ref(maybe) 589 return maybe
590 - def append(yo, record):
591 yo._weakref_list.append(weakref.ref(record))
592 - class DbfIterator(object):
593 "returns records using current index"
594 - def __init__(yo, table):
595 yo._table = table 596 yo._index = -1 597 yo._more_records = True
598 - def __iter__(yo):
599 return yo
600 - def next(yo):
601 while yo._more_records: 602 yo._index += 1 603 if yo._index >= len(yo._table): 604 yo._more_records = False 605 continue 606 record = yo._table[yo._index] 607 if not yo._table.use_deleted() and record.has_been_deleted(): 608 continue 609 return record 610 else: 611 raise StopIteration
612 - def _buildHeaderFields(yo):
613 "constructs fieldblock for disk table" 614 fieldblock = array('c', '') 615 memo = False 616 yo._meta.header.version(chr(ord(yo._meta.header.version()) & ord(yo._noMemoMask))) 617 for field in yo._meta.fields: 618 if yo._meta.fields.count(field) > 1: 619 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") 620 fielddef = array('c', '\x00' * 32) 621 fielddef[:11] = array('c', io.packStr(field)) 622 fielddef[11] = yo._meta[field]['type'] 623 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start'])) 624 fielddef[16] = chr(yo._meta[field]['length']) 625 fielddef[17] = chr(yo._meta[field]['decimals']) 626 fielddef[18] = chr(yo._meta[field]['flags']) 627 fieldblock.extend(fielddef) 628 if yo._meta[field]['type'] in yo._meta.memotypes: 629 memo = True 630 yo._meta.header.fields(fieldblock.tostring()) 631 if memo: 632 yo._meta.header.version(chr(ord(yo._meta.header.version()) | ord(yo._yesMemoMask))) 633 if yo._meta.memo is None: 634 yo._meta.memo = yo._memoClass(yo._meta)
635 - def _checkMemoIntegrity(yo):
636 "dBase III specific" 637 if yo._meta.header.version() == '\x83': 638 try: 639 yo._meta.memo = yo._memoClass(yo._meta) 640 except: 641 yo._meta.dfd.close() 642 yo._meta.dfd = None 643 raise 644 if not yo._meta.ignorememos: 645 for field in yo._meta.fields: 646 if yo._meta[field]['type'] in yo._memotypes: 647 if yo._meta.header.version() != '\x83': 648 yo._meta.dfd.close() 649 yo._meta.dfd = None 650 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 651 elif not os.path.exists(yo._meta.memoname): 652 yo._meta.dfd.close() 653 yo._meta.dfd = None 654 raise DbfError("Table structure corrupt: memo fields exist without memo file") 655 break
656 - def _initializeFields(yo):
657 "builds the FieldList of names, types, and descriptions from the disk file" 658 offset = 1 659 fieldsdef = yo._meta.header.fields() 660 if len(fieldsdef) % 32 != 0: 661 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 662 if len(fieldsdef) // 32 != yo.field_count(): 663 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count(), len(fieldsdef)//32)) 664 for i in range(yo.field_count()): 665 fieldblock = fieldsdef[i*32:(i+1)*32] 666 name = io.unpackStr(fieldblock[:11]) 667 type = fieldblock[11] 668 if not type in yo._meta.fieldtypes: 669 raise DbfError("Unknown field type: %s" % type) 670 start = offset 671 length = ord(fieldblock[16]) 672 offset += length 673 end = start + length 674 decimals = ord(fieldblock[17]) 675 flags = ord(fieldblock[18]) 676 yo._meta.fields.append(name) 677 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
678 - def _fieldLayout(yo, i):
679 "Returns field information Name Type(Length[,Decimals])" 680 name = yo._meta.fields[i] 681 type = yo._meta[name]['type'] 682 length = yo._meta[name]['length'] 683 decimals = yo._meta[name]['decimals'] 684 if type in yo._decimal_fields: 685 description = "%s %s(%d,%d)" % (name, type, length, decimals) 686 elif type in yo._fixed_fields: 687 description = "%s %s" % (name, type) 688 else: 689 description = "%s %s(%d)" % (name, type, length) 690 return description
691 - def _loadtable(yo):
692 "loads the records from disk to memory" 693 if yo._meta_only: 694 raise DbfError("%s has been closed, records are unavailable" % yo.filename()) 695 dfd = yo._meta.dfd 696 header = yo._meta.header 697 dfd.seek(header.start()) 698 allrecords = dfd.read() # kludge to get around mysterious errno 0 problems 699 dfd.seek(0) 700 length = header.recordlength() 701 for i in range(header.recordcount()): 702 record_data = allrecords[length*i:length*i+length] 703 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) 704 yo._index.append(i) 705 dfd.seek(0)
706 - def _list_fields(yo, specs, sep=','):
707 if specs is None: 708 specs = yo.field_names() 709 elif isinstance(specs, str): 710 specs = specs.split(sep) 711 else: 712 specs = list(specs) 713 specs = [s.strip() for s in specs] 714 return specs
715 - def _updateDisk(yo, headeronly=False):
716 "synchronizes the disk file with current data" 717 if yo._meta.inmemory: 718 return 719 fd = yo._meta.dfd 720 fd.seek(0) 721 fd.write(yo._meta.header.data()) 722 if not headeronly: 723 for record in yo._table: 724 record._updateDisk() 725 fd.flush() 726 fd.truncate(yo._meta.header.start() + yo._meta.header.recordcount() * yo._meta.header.recordlength())
727 - def __contains__(yo, key):
728 return key in yo.field_names()
729 - def __enter__(yo):
730 return yo
731 - def __exit__(yo, *exc_info):
732 yo.close()
733 - def __getattr__(yo, name):
734 if name in ('_index','_table'): 735 if yo._meta.ondisk: 736 yo._table = yo._Table(len(yo), yo._meta) 737 yo._index = range(len(yo)) 738 else: 739 yo._table = [] 740 yo._index = [] 741 yo._loadtable() 742 return object.__getattribute__(yo, name)
743 - def __getitem__(yo, value):
744 if type(value) == int: 745 if not -yo._meta.header.recordcount() <= value < yo._meta.header.recordcount(): 746 raise IndexError("Record %d is not in table." % value) 747 return yo._table[yo._index[value]] 748 elif type(value) == slice: 749 sequence = [] 750 for index in yo._index[value]: 751 record = yo._table[index] 752 if yo.use_deleted() is True or not record.has_been_deleted(): 753 sequence.append(record) 754 return DbfList(yo, sequence, desc='%s --> %s' % (yo.filename(), value)) 755 else: 756 raise TypeError('type <%s> not valid for indexing' % type(value))
757 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, 758 read_only=False, keep_memos=False, meta_only=False, codepage=None):
759 """open/create dbf file 760 filename should include path if needed 761 field_specs can be either a ;-delimited string or a list of strings 762 memo_size is always 512 for db3 memos 763 ignore_memos is useful if the memo file is missing or corrupt 764 read_only will load records into memory, then close the disk file 765 keep_memos will also load any memo fields into memory 766 meta_only will ignore all records, keeping only basic table information 767 codepage will override whatever is set in the table itself""" 768 if filename == ':memory:': 769 if field_specs is None: 770 raise DbfError("field list must be specified for in-memory tables") 771 elif type(yo) is DbfTable: 772 raise DbfError("only memory tables supported") 773 yo._meta = meta = yo._MetaData() 774 meta.filename = filename 775 meta.fields = [] 776 meta.fieldtypes = yo._fieldtypes 777 meta.fixed_fields = yo._fixed_fields 778 meta.variable_fields = yo._variable_fields 779 meta.character_fields = yo._character_fields 780 meta.decimal_fields = yo._decimal_fields 781 meta.numeric_fields = yo._numeric_fields 782 meta.memotypes = yo._memotypes 783 meta.ignorememos = ignore_memos 784 meta.memo_size = memo_size 785 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 786 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 787 meta.return_ascii = return_ascii 788 meta.header = header = yo._TableHeader(yo._dbfTableHeader) 789 header.extra(yo._dbfTableHeaderExtra) 790 header.data() #force update of date 791 yo.codepage(codepage or default_codepage) 792 if filename == ':memory:': 793 yo._index = [] 794 yo._table = [] 795 meta.ondisk = False 796 meta.inmemory = True 797 meta.memoname = ':memory:' 798 else: 799 base, ext = os.path.splitext(filename) 800 if ext == '': 801 meta.filename = base + '.dbf' 802 meta.memoname = base + yo._memoext 803 meta.ondisk = True 804 meta.inmemory = False 805 if field_specs: 806 if meta.ondisk: 807 meta.dfd = open(meta.filename, 'w+b') 808 meta.newmemofile = True 809 yo.add_fields(field_specs) 810 return 811 dfd = meta.dfd = open(meta.filename, 'r+b') 812 dfd.seek(0) 813 meta.header = header = yo._TableHeader(dfd.read(32)) 814 if not header.version() in yo._supported_tables: 815 dfd.close() 816 dfd = None 817 raise TypeError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 818 yo.codepage(meta.header.codepage()) 819 fieldblock = dfd.read(header.start() - 32) 820 for i in range(len(fieldblock)//32+1): 821 fieldend = i * 32 822 if fieldblock[fieldend] == '\x0d': 823 break 824 else: 825 raise DbfError("corrupt field structure in header") 826 if len(fieldblock[:fieldend]) % 32 != 0: 827 raise DbfError("corrupt field structure in header") 828 header.fields(fieldblock[:fieldend]) 829 header.extra(fieldblock[fieldend+1:]) # skip trailing \r 830 yo._initializeFields() 831 yo._checkMemoIntegrity() 832 meta.current = -1 833 dfd.seek(0) 834 if meta_only: 835 yo.close(keep_table=False, keep_memos=False) 836 elif read_only: 837 yo.close(keep_table=True, keep_memos=keep_memos)
838 - def __iter__(yo):
839 return yo.DbfIterator(yo)
840 - def __len__(yo):
841 return yo._meta.header.recordcount()
842 - def __nonzero__(yo):
843 return yo._meta.header.recordcount() != 0
844 - def __repr__(yo):
845 if yo._read_only: 846 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename 847 elif yo._meta_only: 848 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename 849 else: 850 return __name__ + ".Table('%s')" % yo._meta.filename
851 - def __str__(yo):
852 if yo._read_only: 853 status = "read-only" 854 elif yo._meta_only: 855 status = "meta-only" 856 else: 857 status = "read/write" 858 str = """ 859 Table: %s 860 Type: %s 861 Codepage: %s 862 Status: %s 863 Last updated: %s 864 Record count: %d 865 Field count: %d 866 Record length: %d 867 """ % (yo.filename(), version_map.get(yo._meta.header.version(), 'unknown - ' + hex(ord(yo._meta.header.version()))), 868 yo.codepage(), status, yo.last_update(), len(yo), yo.field_count(), yo.record_length()) 869 str += "\n --Fields--\n" 870 for i in range(len(yo._meta.fields)): 871 str += " " + yo._fieldLayout(i) + "\n" 872 return str
873 - def field_count(yo):
874 "the number of fields in the table" 875 return yo._meta.header.fieldcount()
876 - def field_names(yo):
877 "a list of the fields in the table" 878 return yo._meta.fields[:]
879 - def filename(yo):
880 "table's file name, including path (if specified on open)" 881 return yo._meta.filename
882 - def last_update(yo):
883 "date of last update" 884 return yo._meta.header.update()
885 - def memoname(yo):
886 "table's memo name (if path included in filename on open)" 887 return yo._meta.memoname
888 - def record_length(yo):
889 "number of bytes in a record" 890 return yo._meta.header.recordlength()
891 - def record_number(yo):
892 "index number of the current record" 893 return yo._meta.current
894 - def supported_tables(yo):
895 "allowable table types" 896 return yo._supported_tables
897 - def use_deleted(yo, new_setting=None):
898 if new_setting is None: 899 return yo._use_deleted 900 else: 901 yo._use_deleted = new_setting
902 - def version(yo):
903 "returns the dbf type of the table" 904 return yo._version
905 - def add_fields(yo, field_specs):
906 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 907 backup table is created with _backup appended to name 908 then modifies current structure""" 909 all_records = [record for record in yo] 910 if yo: 911 yo.create_backup() 912 yo._meta.blankrecord = None 913 meta = yo._meta 914 offset = meta.header.recordlength() 915 fields = yo._list_fields(field_specs, sep=';') 916 for field in fields: 917 try: 918 name, format = field.split() 919 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): 920 raise DbfError("Field names cannot start with _ or digits, and can only contain the _, letters, and digits") 921 name = name.lower() 922 if name in meta.fields: 923 raise DbfError("Field '%s' already exists" % name) 924 field_type = format[0].upper() 925 if len(name) > 10: 926 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 927 if not field_type in meta.fieldtypes.keys(): 928 raise DbfError("Unknown field type: %s" % field_type) 929 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) 930 except ValueError: 931 raise DbfError("invalid field specifier: %s" % field) 932 start = offset 933 end = offset + length 934 offset = end 935 meta.fields.append(name) 936 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} 937 if meta[name]['type'] in yo._memotypes and meta.memo is None: 938 meta.memo = yo._memoClass(meta) 939 for record in yo: 940 record[name] = meta.fieldtypes[field_type]['Blank']() 941 yo._buildHeaderFields() 942 yo._updateDisk()
943 - def append(yo, kamikaze='', drop=False, multiple=1):
944 "adds <multiple> blank records, and fills fields with dict/tuple values if present" 945 if not yo.field_count(): 946 raise DbfError("No fields defined, cannot append") 947 dictdata = False 948 tupledata = False 949 if not isinstance(kamikaze, _DbfRecord): 950 if isinstance(kamikaze, dict): 951 dictdata = kamikaze 952 kamikaze = '' 953 elif isinstance(kamikaze, tuple): 954 tupledata = kamikaze 955 kamikaze = '' 956 newrecord = _DbfRecord(recnum=yo._meta.header.recordcount(), layout=yo._meta, kamikaze=kamikaze) 957 yo._table.append(newrecord) 958 yo._index.append(yo._meta.header.recordcount()) 959 yo._meta.header.recordcount(yo._meta.header.recordcount() + 1) 960 if dictdata: 961 newrecord.gather_fields(dictdata, drop) 962 elif tupledata: 963 for index, item in enumerate(tupledata): 964 newrecord[index] = item 965 elif kamikaze == str: 966 for field in yo._meta.memofields: 967 newrecord[field] = '' 968 elif kamikaze: 969 for field in yo._meta.memofields: 970 newrecord[field] = kamikaze[field] 971 multiple -= 1 972 if multiple: 973 data = newrecord._data 974 single = yo._meta.header.recordcount() 975 total = single + multiple 976 while single < total: 977 multi_record = _DbfRecord(single, yo._meta, kamikaze=data) 978 yo._table.append(multi_record) 979 yo._index.append(single) 980 for field in yo._meta.memofields: 981 multi_record[field] = newrecord[field] 982 single += 1 983 yo._meta.header.recordcount(total) # += multiple 984 yo._meta.current = yo._meta.header.recordcount() - 1 985 newrecord = multi_record 986 yo._updateDisk(headeronly=True) 987 return newrecord
988 - def bof(yo):
989 "moves record pointer to previous usable record; returns True if no more usable records" 990 while yo._meta.current > 0: 991 yo._meta.current -= 1 992 if yo.use_deleted() or not yo.current().has_been_deleted(): 993 break 994 else: 995 yo._meta.current = -1 996 return True 997 return False
998 - def bottom(yo, get_record=False):
999 """sets record pointer to bottom of table 1000 if get_record, seeks to and returns last (non-deleted) record 1001 DbfError if table is empty 1002 Bof if all records deleted and use_deleted() is False""" 1003 yo._meta.current = yo._meta.header.recordcount() 1004 if get_record: 1005 try: 1006 return yo.prev() 1007 except Bof: 1008 yo._meta.current = yo._meta.header.recordcount() 1009 raise Eof()
1010 - def close(yo, keep_table=False, keep_memos=False):
1011 """closes disk files 1012 ensures table data is available if keep_table 1013 ensures memo data is available if keep_memos""" 1014 if keep_table: 1015 yo._table # force read of table if not already in memory 1016 else: 1017 if '_index' in dir(yo): 1018 del yo._table 1019 del yo._index 1020 yo._meta.inmemory = True 1021 if yo._meta.ondisk: 1022 yo._meta.dfd.close() 1023 yo._meta.dfd = None 1024 if '_index' in dir(yo): 1025 yo._read_only = True 1026 else: 1027 yo._meta_only = True 1028 if yo._meta.mfd is not None: 1029 if not keep_memos: 1030 yo._meta.ignorememos = True 1031 else: 1032 memo_fields = [] 1033 for field in yo.field_names(): 1034 if yo.is_memotype(field): 1035 memo_fields.append(field) 1036 for record in yo: 1037 for field in memo_fields: 1038 record[field] = record[field] 1039 yo._meta.mfd.close() 1040 yo._meta.mfd = None 1041 yo._meta.ondisk = False
1042 - def codepage(yo, cp=None):
1043 result = yo._meta.header.codepage(cp) 1044 if cp is None: 1045 return "%s (%s)" % code_pages[result] 1046 else: 1047 yo._meta.decoder = codecs.getdecoder(code_pages[result][0]) 1048 yo._meta.encoder = codecs.getencoder(code_pages[result][0])
1049 - def create_backup(yo, new_name=None, fields=None):
1050 "creates a backup table -- ignored if memory table" 1051 if new_name is None: 1052 new_name = os.path.splitext(yo.filename())[0] + '_backup' 1053 if yo.filename().startswith(':memory:'): 1054 return 1055 fields = yo._list_fields(fields) 1056 bkup_field_specs = yo.structure(fields) 1057 bkup_table = yo.__class__(filename=new_name, field_specs=bkup_field_specs) 1058 for record in yo: 1059 bkup_table.append(record.scatter_fields(), drop=True) 1060 bkup_table.close()
1061 - def current(yo, index=False):
1062 "returns current logical record, or its index" 1063 if yo._meta.current < 0: 1064 raise Bof() 1065 elif yo._meta.current >= yo._meta.header.recordcount(): 1066 raise Eof() 1067 if index: 1068 return yo._meta.current 1069 return yo._table[yo._index[yo._meta.current]]
1070 - def delete_fields(yo, doomed):
1071 """removes field(s) from the table 1072 creates backup files with _backup appended to the file name, 1073 then modifies current structure""" 1074 doomed = yo._list_fields(doomed) 1075 for victim in doomed: 1076 if victim not in yo._meta.fields: 1077 raise DbfError("field %s not in table -- delete aborted" % victim) 1078 all_records = [record for record in yo] 1079 yo.create_backup() 1080 for victim in doomed: 1081 yo._meta.fields.pop(yo._meta.fields.index(victim)) 1082 start = yo._meta[victim]['start'] 1083 end = yo._meta[victim]['end'] 1084 for record in yo: 1085 record._data = record._data[:start] + record._data[end:] 1086 for field in yo._meta.fields: 1087 if yo._meta[field]['start'] == end: 1088 end = yo._meta[field]['end'] 1089 yo._meta[field]['start'] = start 1090 yo._meta[field]['end'] = start + yo._meta[field]['length'] 1091 start = yo._meta[field]['end'] 1092 yo._buildHeaderFields() 1093 yo._updateDisk()
1094 - def eof(yo):
1095 "moves record pointer to next usable record; returns True if no more usable records" 1096 while yo._meta.current < yo._meta.header.recordcount() - 1: 1097 yo._meta.current += 1 1098 if yo.use_deleted() or not yo.current().has_been_deleted(): 1099 break 1100 else: 1101 yo._meta.current = yo._meta.header.recordcount() 1102 return True 1103 return False
1104 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1105 """writes the table using CSV or tab-delimited format, using the filename 1106 given if specified, otherwise the table name""" 1107 if filename is None: 1108 filename = yo.filename() 1109 field_specs = yo._list_fields(field_specs) 1110 if records is None: 1111 records = yo 1112 format = format.lower() 1113 if format not in ('csv', 'tab'): 1114 raise DbfError("export format: csv or tab, not %s" % format) 1115 base, ext = os.path.splitext(filename) 1116 if ext.lower() in ('', '.dbf'): 1117 filename = base + "." + format 1118 fd = open(filename, 'wb') 1119 try: 1120 if format == 'csv': 1121 csvfile = csv.writer(fd, dialect='dbf') 1122 if header: 1123 csvfile.writerow(field_specs) 1124 for record in records: 1125 fields = [] 1126 for fieldname in field_specs: 1127 fields.append(record[fieldname]) 1128 csvfile.writerow(fields) 1129 else: 1130 if header: 1131 fd.write('\t'.join(field_specs) + '\n') 1132 for record in records: 1133 fields = [] 1134 for fieldname in field_specs: 1135 fields.append(str(record[fieldname])) 1136 fd.write('\t'.join(fields) + '\n') 1137 finally: 1138 fd.close() 1139 fd = None 1140 return len(records)
1141 - def get_record(yo, recno):
1142 "returns record at physical_index[recno]" 1143 return yo._table[recno]
1144 - def goto(yo, criteria):
1145 """changes the record pointer to the first matching (non-deleted) record 1146 criteria should be either a tuple of tuple(value, field, func) triples, 1147 or an integer to go to""" 1148 if isinstance(criteria, int): 1149 if not -yo._meta.header.recordcount() <= criteria < yo._meta.header.recordcount(): 1150 raise IndexError("Record %d does not exist" % criteria) 1151 if criteria < 0: 1152 criteria += yo._meta.header.recordcount() 1153 yo._meta.current = criteria 1154 return yo.current() 1155 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) 1156 specs = tuple([(field, func) for value, field, func in criteria]) 1157 match = tuple([value for value, field, func in criteria]) 1158 current = yo.current(index=True) 1159 matchlen = len(match) 1160 while not yo.Eof(): 1161 record = yo.current() 1162 results = record(*specs) 1163 if results == match: 1164 return record 1165 return yo.goto(current)
1166 - def index(yo, sort=None, reverse=False):
1167 "orders the table using the sort provided; removes index if no sort provided" 1168 if sort is None: 1169 results = [] 1170 for field, func in yo._meta.index: 1171 results.append("%s(%s)" % (func.__name__, field)) 1172 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) 1173 yo._meta.index_reversed = reverse 1174 if sort == 'ORIGINAL': 1175 yo._index = range(yo._meta.header.recordcount()) 1176 yo._meta.index = [] 1177 if reverse: 1178 yo._index.reverse() 1179 return 1180 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) 1181 yo._meta.index = tuple(new_sort) 1182 yo._meta.orderresults = [''] * len(yo) 1183 for record in yo: 1184 yo._meta.orderresults[record.record_number()] = record() 1185 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
1186 - def is_memotype(yo, name):
1187 "returns True if name is a memo type field" 1188 return yo._meta[name]['type'] in yo._memotypes
1189 - def new(yo, filename, _field_specs=None):
1190 "returns a new table of the same type" 1191 if _field_specs is None: 1192 _field_specs = yo.structure() 1193 if filename != ':memory:': 1194 path, name = os.path.split(filename) 1195 if path == "": 1196 filename = os.path.join(os.path.split(yo.filename)[0], filename) 1197 elif name == "": 1198 filename = os.path.join(path, os.path.split(yo.filename)[1]) 1199 return yo.__class__(filename, _field_specs)
1200 - def next(yo):
1201 "set record pointer to next (non-deleted) record, and return it" 1202 if yo.eof(): 1203 raise Eof() 1204 return yo.current()
1205 - def pack(yo, _pack=True):
1206 "physically removes all deleted records" 1207 newtable = [] 1208 newindex = [] 1209 i = 0 1210 for record in yo._table: 1211 if record.has_been_deleted() and _pack: 1212 record._recnum = -1 1213 else: 1214 record._recnum = i 1215 newtable.append(record) 1216 newindex.append(i) 1217 i += 1 1218 yo._table = newtable 1219 yo._index = newindex 1220 yo._meta.header.recordcount(i) 1221 yo._current = -1 1222 yo._meta.index = '' 1223 yo._updateDisk()
1224 - def prev(yo):
1225 "set record pointer to previous (non-deleted) record, and return it" 1226 if yo.bof(): 1227 raise Bof 1228 return yo.current()
1229 - def query(yo, sql=None, python=None):
1230 "uses exec to perform python queries on the table" 1231 if python is None: 1232 raise DbfError("query: python parameter must be specified") 1233 possible = DbfList(desc="%s --> %s" % (yo.filename(), python)) 1234 query_result = {} 1235 select = 'query_result["keep"] = %s' % python 1236 g = {} 1237 for record in yo: 1238 query_result['keep'] = False 1239 g['query_result'] = query_result 1240 exec select in g, record 1241 if query_result['keep']: 1242 possible.append(yo, record) 1243 return possible
1244 - def rename_field(yo, oldname, newname):
1245 "renames an existing field" 1246 if not oldname in yo._meta.fields: 1247 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) 1248 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): 1249 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 1250 newname = newname.lower() 1251 if newname in yo._meta.fields: 1252 raise DbfError("field --%s-- already exists" % newname) 1253 if len(newname) > 10: 1254 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 1255 yo._meta[newname] = yo._meta[oldname] 1256 yo._meta.fields[yo._meta.fields.index(oldname)] = newname 1257 yo._buildHeaderFields() 1258 yo._updateDisk(headeronly=True)
1259 - def search(yo, match, fuzzy=None, indices=False):
1260 """searches using a binary algorythm 1261 looking for records that match the criteria in match, which is a tuple 1262 with a data item per ordered field. table must be sorted. if index, 1263 returns a list of records' indices from the current sort order. 1264 """ 1265 if yo._meta.index is None: 1266 raise DbfError('table must be indexed to use Search') 1267 matchlen = len(match) 1268 if fuzzy: 1269 matchlen -= 1 1270 fuzzy_match = match[-1] 1271 fuzzy_field = yo._meta.index[matchlen][0] 1272 match = match[:-1] 1273 records = DbfList(desc="%s --> search: index=%s, match=%s, fuzzy=%s(%s))" % (yo.filename(), yo.index(), match, fuzzy.__name__, fuzzy_match)) 1274 else: 1275 records = DbfList(desc="%s --> search: index=%s, match=%s)" % (yo.filename(), yo.index(), match)) 1276 if indices: 1277 records = [] 1278 if not isinstance(match, tuple): 1279 match = tuple(match) 1280 segment = len(yo) 1281 current = 0 1282 toosoon = True 1283 notFound = True 1284 while notFound: 1285 segment = segment // 2 1286 if toosoon: 1287 current += segment 1288 else: 1289 current -= segment 1290 if current % 2: 1291 segment += 1 1292 if current == len(yo) or segment == 0: 1293 break 1294 value = yo._meta.orderresults[yo[current].record_number()][:matchlen] 1295 if value < match: 1296 toosoon = True 1297 elif value > match: 1298 toosoon = False 1299 else: 1300 notFound = False 1301 break 1302 if current == 0: 1303 break 1304 if notFound: 1305 return records 1306 while current > 0: 1307 current -= 1 1308 value = yo._meta.orderresults[yo[current].record_number()][:matchlen] 1309 if value != match: 1310 current += 1 1311 break 1312 while True: 1313 value = yo._meta.orderresults[yo[current].record_number()][:matchlen] 1314 if value != match: 1315 break 1316 if yo.use_deleted() or not yo[current].has_been_deleted(): 1317 if indices: 1318 records.append(current) 1319 else: 1320 records.append(yo, yo[current]) 1321 current += 1 1322 if current == len(yo): 1323 break 1324 if fuzzy: 1325 if indices: 1326 records = [rec for rec in records if fuzzy(yo[rec][fuzzy_field]) == fuzzy_match] 1327 else: 1328 records[:] = [rec for rec in records if fuzzy(rec[fuzzy_field]) == fuzzy_match] 1329 return records
1330 - def size(yo, field):
1331 "returns size of field as a tuple of (length, decimals)" 1332 if field in yo: 1333 return (yo._meta[field]['length'], yo._meta[field]['decimals']) 1334 raise DbfError("%s is not a field in %s" % (field, yo.filename()))
1335 - def structure(yo, fields=None):
1336 """return list of fields suitable for creating same table layout 1337 @param fields: list of fields or None for all fields""" 1338 field_specs = [] 1339 fields = yo._list_fields(fields) 1340 try: 1341 for name in fields: 1342 field_specs.append(yo._fieldLayout(yo.field_names().index(name))) 1343 except ValueError: 1344 raise DbfError("field --%s-- does not exist" % name) 1345 return field_specs
1346 - def top(yo, get_record=False):
1347 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record 1348 DbfError if table is empty 1349 Eof if all records are deleted and use_deleted() is False""" 1350 yo._meta.current = -1 1351 if get_record: 1352 try: 1353 return yo.next() 1354 except Eof: 1355 yo._meta.current = -1 1356 raise Bof()
1357 - def type(yo, field):
1358 "returns type of field" 1359 if field in yo: 1360 return yo._meta[field]['type'] 1361 raise DbfError("%s is not a field in %s" % (field, yo.filename()))
1362 - def zap(yo, areyousure=False):
1363 """removes all records from table -- this cannot be undone! 1364 areyousure must be True, else error is raised""" 1365 if areyousure: 1366 yo._table = [] 1367 yo._index = [] 1368 yo._meta.header.recordcount(0) 1369 yo._current = -1 1370 yo._meta.index = '' 1371 yo._updateDisk() 1372 else: 1373 raise DbfError("You must say you are sure to wipe the table")
1374 # these asignments are for backward compatibility, and will go away
1375 -class Db3Table(DbfTable):
1376 """Provides an interface for working with dBase III tables.""" 1377 _version = 'dBase III Plus' 1378 _versionabbv = 'db3' 1379 _fieldtypes = { 1380 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1381 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1382 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1383 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1384 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} } 1385 _memoext = '.dbt' 1386 _memotypes = ('M',) 1387 _memoClass = _Db3Memo 1388 _yesMemoMask = '\x80' 1389 _noMemoMask = '\x7f' 1390 _fixed_fields = ('D','L','M') 1391 _variable_fields = ('C','N') 1392 _character_fields = ('C','M') 1393 _decimal_fields = ('N',) 1394 _numeric_fields = ('N',) 1395 _dbfTableHeader = array('c', '\x00' * 32) 1396 _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's 1397 _dbfTableHeader[8:10] = array('c', io.packShortInt(33)) 1398 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1399 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1400 _dbfTableHeader = _dbfTableHeader.tostring() 1401 _dbfTableHeaderExtra = '' 1402 _supported_tables = ['\x03', '\x83'] 1403 _read_only = False 1404 _meta_only = False 1405 _use_deleted = True
1406 - def _checkMemoIntegrity(yo):
1407 "dBase III specific" 1408 if yo._meta.header.version() == '\x83': 1409 try: 1410 yo._meta.memo = yo._memoClass(yo._meta) 1411 except: 1412 yo._meta.dfd.close() 1413 yo._meta.dfd = None 1414 raise 1415 if not yo._meta.ignorememos: 1416 for field in yo._meta.fields: 1417 if yo._meta[field]['type'] in yo._memotypes: 1418 if yo._meta.header.version() != '\x83': 1419 yo._meta.dfd.close() 1420 yo._meta.dfd = None 1421 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1422 elif not os.path.exists(yo._meta.memoname): 1423 yo._meta.dfd.close() 1424 yo._meta.dfd = None 1425 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1426 break
1427 - def _initializeFields(yo):
1428 "builds the FieldList of names, types, and descriptions" 1429 offset = 1 1430 fieldsdef = yo._meta.header.fields() 1431 if len(fieldsdef) % 32 != 0: 1432 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1433 if len(fieldsdef) // 32 != yo.field_count(): 1434 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count(), len(fieldsdef)//32)) 1435 for i in range(yo.field_count()): 1436 fieldblock = fieldsdef[i*32:(i+1)*32] 1437 name = io.unpackStr(fieldblock[:11]) 1438 type = fieldblock[11] 1439 if not type in yo._meta.fieldtypes: 1440 raise DbfError("Unknown field type: %s" % type) 1441 start = offset 1442 length = ord(fieldblock[16]) 1443 offset += length 1444 end = start + length 1445 decimals = ord(fieldblock[17]) 1446 flags = ord(fieldblock[18]) 1447 yo._meta.fields.append(name) 1448 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1449 -class VfpTable(DbfTable):
1450 version = 'Provides an interface for working with Visual FoxPro 6 tables' 1451 _versionabbv = 'vfp' 1452 _fieldtypes = { 1453 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1454 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1455 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1456 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1457 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1458 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1459 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1460 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1461 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1462 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1463 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1464 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo}, 1465 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1466 _memoext = '.fpt' 1467 _memotypes = ('G','M','P') 1468 _memoClass = _VfpMemo 1469 _yesMemoMask = '\x30' # 0011 0000 1470 _noMemoMask = '\x30' # 0011 0000 1471 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1472 _variable_fields = ('C','F','N') 1473 _character_fields = ('C','M') # field representing character data 1474 _decimal_fields = ('F','N') 1475 _numeric_fields = ('B','F','I','N','Y') 1476 _supported_tables = ('\x30',) 1477 _dbfTableHeader = array('c', '\x00' * 32) 1478 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 1479 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263)) 1480 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1481 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1482 _dbfTableHeader = _dbfTableHeader.tostring() 1483 _dbfTableHeaderExtra = '\x00' * 263 1484 _use_deleted = True
1485 - def _checkMemoIntegrity(yo):
1486 if os.path.exists(yo._meta.memoname): 1487 try: 1488 yo._meta.memo = yo._memoClass(yo._meta) 1489 except: 1490 yo._meta.dfd.close() 1491 yo._meta.dfd = None 1492 raise 1493 if not yo._meta.ignorememos: 1494 for field in yo._meta.fields: 1495 if yo._meta[field]['type'] in yo._memotypes: 1496 if not os.path.exists(yo._meta.memoname): 1497 yo._meta.dfd.close() 1498 yo._meta.dfd = None 1499 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1500 break
1501 - def _initializeFields(yo):
1502 "builds the FieldList of names, types, and descriptions" 1503 offset = 1 1504 fieldsdef = yo._meta.header.fields() 1505 for i in range(yo.field_count()): 1506 fieldblock = fieldsdef[i*32:(i+1)*32] 1507 name = io.unpackStr(fieldblock[:11]) 1508 type = fieldblock[11] 1509 if not type in yo._meta.fieldtypes: 1510 raise DbfError("Unknown field type: %s" % type) 1511 elif type == '0': 1512 return # ignore nullflags 1513 start = io.unpackLongInt(fieldblock[12:16]) 1514 length = ord(fieldblock[16]) 1515 offset += length 1516 end = start + length 1517 decimals = ord(fieldblock[17]) 1518 flags = ord(fieldblock[18]) 1519 yo._meta.fields.append(name) 1520 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1521 -class DbfList(object):
1522 "list of Dbf records, with set-like behavior" 1523 _desc = ''
1524 - def __init__(yo, table=None, new_records=None, desc=None):
1525 yo._list = [] 1526 yo._set = set() 1527 yo._current = -1 1528 if isinstance(new_records, DbfList): 1529 yo._list = new_records._list 1530 for item in yo._list: 1531 yo._set.add(item) 1532 yo._current = 0 1533 elif new_records is not None: 1534 for record in new_records: 1535 item = (table, record.record_number()) 1536 if item not in yo._set: 1537 yo._set.add(item) 1538 yo._list.append(item) 1539 yo._current = 0 1540 if desc is not None: 1541 yo._desc = desc
1542 - def __add__(yo, other):
1543 if isinstance(other, DbfList): 1544 result = DbfList() 1545 result._set = set(yo._set) 1546 result._list[:] = yo._list[:] 1547 for item in other._list: 1548 if item not in result._set: 1549 result._set.add(item) 1550 result._list.append(item) 1551 result._current = 0 if result else -1 1552 return result 1553 raise NotImplemented
1554 - def __delitem__(yo, key):
1555 if isinstance(key, int): 1556 loc = yo._current - len(yo._list) + 1 # +1 = eof, 0 = at last record, -any = somewhere else 1557 item = yo._list.pop[key] 1558 yo._set.remove(item) 1559 if loc > 0: 1560 yo._current = len(yo._list) 1561 elif loc == 0 or yo._current >= len(yo._list): 1562 yo._current = len(yo._list) - 1 1563 elif isinstance(key, slice): 1564 loc = yo._current - len(yo._list) + 1 # +1 = eof, 0 = at last record, -any = somewhere else 1565 yo._set.difference_update(yo._list[key]) 1566 yo._list.__delitem__(key) 1567 item = yo._list.pop(index) 1568 if loc > 0: 1569 yo._current = len(yo._list) 1570 elif loc == 0 or yo._current >= len(yo._list): 1571 yo._current = len(yo._list) - 1 1572 else: 1573 raise TypeError
1574 - def __getitem__(yo, key):
1575 if isinstance(key, int): 1576 count = len(yo._list) 1577 if not -count <= key < count: 1578 raise IndexError("Record %d is not in list." % key) 1579 return yo._get_record(*yo._list[key]) 1580 elif isinstance(key, slice): 1581 result = DbfList() 1582 result._list[:] = yo._list[key] 1583 result._set.update(result._set) 1584 result._current = 0 if result else -1 1585 return result 1586 else: 1587 raise TypeError
1588 - def __iter__(yo):
1589 return (table.get_record(recno) for table, recno in yo._list)
1590 - def __len__(yo):
1591 return len(yo._list)
1592 - def __nonzero__(yo):
1593 return len(yo) > 0
1594 - def __radd__(yo, other):
1595 return yo.__add__(other)
1596 - def __repr__(yo):
1597 if yo._desc: 1598 return "DbfList(%s - %d records)" % (yo._desc, len(yo._list)) 1599 else: 1600 return "DbfList(%d records)" % len(yo._list)
1601 - def __rsub__(yo, other):
1602 if isinstance(other, DbfList): 1603 result = DbfList() 1604 result._list[:] = other._list[:] 1605 result._set = set(other._set) 1606 lost = set() 1607 for item in yo._list: 1608 if item in result._list: 1609 result._set.remove(item) 1610 lost.add(item) 1611 result._list = [item for item in result._list if item not in lost] 1612 result._current = 0 if result else -1 1613 return result 1614 raise NotImplemented
1615 - def __sub__(yo, other):
1616 if isinstance(other, DbfList): 1617 result = DbfList() 1618 result._list[:] = yo._list[:] 1619 result._set = set(yo._set) 1620 lost = set() 1621 for item in other._list: 1622 if item in result._set: 1623 result._set.remove(item) 1624 lost.add(item) 1625 result._list = [item for item in result._list if item not in lost] 1626 result._current = 0 if result else -1 1627 return result 1628 raise NotImplemented
1629 - def _maybe_add(yo, table=None, record=None):
1630 if record is None: 1631 item = table 1632 else: 1633 item = table, record.record_number() 1634 if item not in yo._set: 1635 yo._set.add(item) 1636 yo._list.append(item)
1637 - def _get_record(yo, table=None, rec_no=None):
1638 if table is rec_no is None: 1639 table, rec_no = yo._list[yo._current] 1640 return table.get_record(rec_no)
1641 - def append(yo, table, new_record):
1642 yo._maybe_add(table, new_record) 1643 yo._current = len(yo._list) - 1
1644 - def bottom(yo):
1645 if yo._list: 1646 yo._current = len(yo._list) - 1 1647 return _get_record() 1648 raise DbfError("DbfList is empty")
1649 - def current(yo):
1650 if yo._current < 0: 1651 raise Bof() 1652 elif yo._current == len(yo._list): 1653 raise Eof() 1654 return _get_record()
1655 - def extend(yo, table=None, new_records=None):
1656 if isinstance(new_records, DbfList): 1657 for item in new_records: 1658 yo._maybe_add(item) 1659 else: 1660 for record in new_records: 1661 yo._maybe_add(table, record) 1662 yo._current = len(yo._list) - 1
1663 - def goto(yo, index_number):
1664 if yo._list: 1665 if 0 <= index_number <= len(yo._list): 1666 yo._current = index_number 1667 return _get_record() 1668 raise DbfError("index %d not in DbfList of %d records" % (index_number, len(yo._list))) 1669 raise DbfError("DbfList is empty")
1670 - def insert(yo, i, table, record):
1671 item = table, record.record_number() 1672 if item not in yo._set: 1673 yo._set.add(item) 1674 yo._list.insert(i, item)
1675 - def next(yo):
1676 if yo._current < len(yo._list): 1677 yo._current += 1 1678 if yo._current < len(yo._list): 1679 return _get_record() 1680 raise Eof()
1681 - def pop(yo, index=None):
1682 loc = yo._current - len(yo._list) + 1 1683 if index is None: 1684 table, recno = yo._list.pop() 1685 yo._set.remove((table, recno)) 1686 else: 1687 table, recno = yo._list.pop(index) 1688 yo._set.remove((table, recno)) 1689 if loc > 0: 1690 yo._current = len(yo._list) 1691 elif loc == 0 or yo._current >= len(yo._list): 1692 yo._current = len(yo._list) - 1 1693 return _get_record(table, recno)
1694 - def prev(yo):
1695 if yo._current >= 0: 1696 yo._current -= 1 1697 if yo._current > -1: 1698 return yo._get_record() 1699 raise Bof()
1700 - def reverse(yo):
1701 return yo._list.reverse()
1702 - def top(yo):
1703 if yo._list: 1704 yo._current = 0 1705 return yo._get_record() 1706 raise DbfError("DbfList is empty")
1707 - def sort(yo, key=None, reverse=None):
1708 return yo._list.sort(key, reverse)
1709 -class DbfCsv(csv.Dialect):
1710 "csv format for exporting tables" 1711 delimiter = ',' 1712 doublequote = True 1713 escapechar = None 1714 lineterminator = '\r\n' 1715 quotechar = '"' 1716 skipinitialspace = True 1717 quoting = csv.QUOTE_NONNUMERIC
1718 csv.register_dialect('dbf', DbfCsv) 1719
1720 -def _nop(value):
1721 "returns parameter unchanged" 1722 return value
1723 -def _normalize_tuples(tuples, length, filler):
1724 "ensures each tuple is the same length, using filler[-missing] for the gaps" 1725 final = [] 1726 for t in tuples: 1727 if len(t) < length: 1728 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 1729 else: 1730 final.append(t) 1731 return tuple(final)
1732 -def _codepage_lookup(cp):
1733 if cp not in code_pages: 1734 for code_page in sorted(code_pages.keys()): 1735 sd, ld = code_pages[code_page] 1736 if cp == sd or cp == ld: 1737 if sd is None: 1738 raise DbfError("Unsupported codepage: %s" % ld) 1739 cp = code_page 1740 break 1741 else: 1742 raise DbfError("Unsupported codepage: %s" % cp) 1743 sd, ld = code_pages[cp] 1744 return cp, sd, ld
1745 -def ascii(new_setting=None):
1746 "get/set return_ascii setting" 1747 global return_ascii 1748 if new_setting is None: 1749 return return_ascii 1750 else: 1751 return_ascii = new_setting
1752 -def codepage(cp=None):
1753 "get/set default codepage for any new tables" 1754 global default_codepage 1755 cp, sd, ld = _codepage_lookup(cp or default_codepage) 1756 default_codepage = sd 1757 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
1758 -def encoding(cp=None):
1759 "get/set default encoding for non-unicode strings passed into a table" 1760 global input_decoding 1761 cp, sd, ld = _codepage_lookup(cp or input_decoding) 1762 default_codepage = sd 1763 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
1764 -class _Db4Table(DbfTable):
1765 version = 'dBase IV w/memos (non-functional)' 1766 _versionabbv = 'db4' 1767 _fieldtypes = { 1768 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter}, 1769 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency}, 1770 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble}, 1771 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric}, 1772 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric}, 1773 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger}, 1774 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical}, 1775 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate}, 1776 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime}, 1777 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1778 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1779 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo}, 1780 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} } 1781 _memoext = '.dbt' 1782 _memotypes = ('G','M','P') 1783 _memoClass = _VfpMemo 1784 _yesMemoMask = '\x8b' # 0011 0000 1785 _noMemoMask = '\x04' # 0011 0000 1786 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 1787 _variable_fields = ('C','F','N') 1788 _character_fields = ('C','M') # field representing character data 1789 _decimal_fields = ('F','N') 1790 _numeric_fields = ('B','F','I','N','Y') 1791 _supported_tables = ('\x04', '\x8b') 1792 _dbfTableHeader = ['\x00'] * 32 1793 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 1794 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1795 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 1796 _dbfTableHeader = ''.join(_dbfTableHeader) 1797 _dbfTableHeaderExtra = '' 1798 _use_deleted = True
1799 - def _checkMemoIntegrity(yo):
1800 "dBase III specific" 1801 if yo._meta.header.version == '\x8b': 1802 try: 1803 yo._meta.memo = yo._memoClass(yo._meta) 1804 except: 1805 yo._meta.dfd.close() 1806 yo._meta.dfd = None 1807 raise 1808 if not yo._meta.ignorememos: 1809 for field in yo._meta.fields: 1810 if yo._meta[field]['type'] in yo._memotypes: 1811 if yo._meta.header.version != '\x8b': 1812 yo._meta.dfd.close() 1813 yo._meta.dfd = None 1814 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1815 elif not os.path.exists(yo._meta.memoname): 1816 yo._meta.dfd.close() 1817 yo._meta.dfd = None 1818 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1819 break
1820