Module dbf
[hide private]

Source Code for Module dbf

   1  """ 
   2  ========= 
   3  Copyright 
   4  ========= 
   5      - Copyright: 2008-2011 Ad-Mail, Inc -- All rights reserved. 
   6      - Author: Ethan Furman 
   7      - Contact: ethanf@admailinc.com 
   8      - Organization: Ad-Mail, Inc. 
   9      - Version: 0.88.022 as of 12 Jul 2011 
  10   
  11  Redistribution and use in source and binary forms, with or without 
  12  modification, are permitted provided that the following conditions are met: 
  13      - Redistributions of source code must retain the above copyright 
  14        notice, this list of conditions and the following disclaimer. 
  15      - Redistributions in binary form must reproduce the above copyright 
  16        notice, this list of conditions and the following disclaimer in the 
  17        documentation and/or other materials provided with the distribution. 
  18      - Neither the name of Ad-Mail, Inc nor the 
  19        names of its contributors may be used to endorse or promote products 
  20        derived from this software without specific prior written permission. 
  21   
  22  THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY 
  23  EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
  24  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
  25  DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY 
  26  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 
  27  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
  28  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 
  29  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 
  30  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 
  31  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  32   
  33  ------- 
  34  Summary 
  35  ------- 
  36   
  37  Python package for reading/writing dBase III and VFP 6 tables and memos 
  38   
  39  Goals:  programming style with databases 
  40      - table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]]) 
  41          - fielddesc examples:  name C(30); age N(3,0); wisdom M; marriage D 
  42      - record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ] 
  43      - record.field | record['field'] accesses the field 
  44   
  45  NOTE:  Of the VFP data types, auto-increment and null settings are not implemented. 
  46   
  47  Example: 
  48   
  49      Create a test table: 
  50      table = dbf.Table('temptable', 'name C(30); age N(3,0); birth D') 
  51   
  52      Populate it: 
  53      for datum in ( 
  54              ('John Doe', 31, dbf.Date(1979, 9,13)), 
  55              ('Ethan Furman', 102, dbf.Date(1909, 4, 1)), 
  56              ('Jane Smith', 57, dbf.Date(1954, 7, 2)), 
  57              ): 
  58          table.append(datum) 
  59   
  60      Export to csv: 
  61      table.export(filename='filename', header=False) 
  62   
  63      Iterate over it: 
  64      for record in table: 
  65          print "%s was born on %s, so s/he is %d years of age" % (record.name, record.birth, record.date) 
  66   
  67      Create a new table from a csv file: 
  68      table = dbf.from_csv('filename.csv') # this has field names of f0, f1, f2, etc 
  69      or 
  70      table = dbf.from_csv('filename.csv', field_names="name age birth".split()) 
  71   
  72      Sort it: 
  73      name_index = table.create_index(lambda rec: rec.name) 
  74      for record in name_index: 
  75          print record.name 
  76   
  77      Primitive SQL (work in progress): 
  78      records = table.sql("select * where name[0] == 'J'") 
  79      for rec in records: 
  80          print rec 
  81          print 
  82   
  83   
  84  """ 
  85  __docformat__ = 'epytext' 
  86  version = (0, 88, 22) 
  87   
  88  __all__ = ( 
  89          'Table', 'List', 'Date', 'DateTime', 'Time', 
  90          'DbfError', 'DataOverflow', 'FieldMissing', 'NonUnicode', 
  91          'DbfWarning', 'Eof', 'Bof', 'DoNotIndex', 
  92          ) 
  93   
  94   
  95  import codecs 
  96  import csv 
  97  import datetime 
  98  import locale 
  99  import os 
 100  import struct 
 101  import sys 
 102  import time 
 103  import unicodedata 
 104  import weakref 
 105   
 106  from array import array 
 107  from bisect import bisect_left, bisect_right 
 108  from decimal import Decimal 
 109  from math import floor 
 110  from shutil import copyfileobj 
 111   
 112  __metaclass__ = type 
 113   
 114  input_decoding = locale.getdefaultlocale()[1]    # treat non-unicode data as ... 
 115  default_codepage = input_decoding  # if no codepage specified on dbf creation, use this 
 116  return_ascii = False         # if True -- convert back to icky ascii, losing chars if no mapping 
 117  temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TEMP") or "" 
 118   
 119  default_type = 'db3'    # default format if none specified 
 120  sql_user_functions = {}      # user-defined sql functions 
 121   
 122  # 2.6+ property for 2.5- 
 123  if sys.version_info[:2] < (2, 6): 
124 # define our own property type 125 - class property():
126 "Emulate PyProperty_Type() in Objects/descrobject.c" 127
128 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
129 self.fget = fget 130 self.fset = fset 131 self.fdel = fdel 132 self.__doc__ = doc or fget.__doc__
133 - def __call__(self, func):
134 self.fget = func 135 if not self.__doc__: 136 self.__doc__ = fget.__doc__
137 - def __get__(self, obj, objtype=None):
138 if obj is None: 139 return self 140 if self.fget is None: 141 raise AttributeError("unreadable attribute") 142 return self.fget(obj)
143 - def __set__(self, obj, value):
144 if self.fset is None: 145 raise AttributeError("can't set attribute") 146 self.fset(obj, value)
147 - def __delete__(self, obj):
148 if self.fdel is None: 149 raise AttributeError("can't delete attribute") 150 self.fdel(obj)
151 - def setter(self, func):
152 self.fset = func 153 return self
154 - def deleter(self, func):
155 self.fdel = func 156 return self
157
158 # warnings and errors 159 160 -class DbfError(Exception):
161 "Fatal errors elicit this response." 162 pass
163 -class DataOverflow(DbfError):
164 "Data too large for field"
165 - def __init__(yo, message, data=None):
166 super(DataOverflow, yo).__init__(message) 167 yo.data = data
168 -class FieldMissing(KeyError, DbfError):
169 "Field does not exist in table"
170 - def __init__(yo, fieldname):
171 super(FieldMissing, yo).__init__('%s: no such field in table' % fieldname) 172 yo.data = fieldname
173 -class NonUnicode(DbfError):
174 "Data for table not in unicode"
175 - def __init__(yo, message=None):
176 super(NonUnicode, yo).__init__(message)
177 -class DbfWarning(Exception):
178 "Normal operations elicit this response"
179 -class Eof(DbfWarning, StopIteration):
180 "End of file reached" 181 message = 'End of file reached'
182 - def __init__(yo):
183 super(Eof, yo).__init__(yo.message)
184 -class Bof(DbfWarning, StopIteration):
185 "Beginning of file reached" 186 message = 'Beginning of file reached'
187 - def __init__(yo):
188 super(Bof, yo).__init__(yo.message)
189 -class DoNotIndex(DbfWarning):
190 "Returned by indexing functions to suppress a record from becoming part of the index" 191 message = 'Not indexing record'
192 - def __init__(yo):
193 super(DoNotIndex, yo).__init__(yo.message)
194 # wrappers around datetime and logical objects to allow null values
195 196 -class Date():
197 "adds null capable datetime.date constructs" 198 __slots__ = ['_date']
199 - def __new__(cls, year=None, month=0, day=0):
200 """date should be either a datetime.date or date/month/day should all be appropriate integers""" 201 nd = object.__new__(cls) 202 nd._date = False 203 if type(year) == datetime.date: 204 nd._date = year 205 elif type(year) == Date: 206 nd._date = year._date 207 elif year == 'no date': 208 pass # date object is already False 209 elif year is not None: 210 nd._date = datetime.date(year, month, day) 211 return nd
212 - def __add__(yo, other):
213 if yo and type(other) == datetime.timedelta: 214 return Date(yo._date + other) 215 else: 216 return NotImplemented
217 - def __eq__(yo, other):
218 if yo: 219 if type(other) == datetime.date: 220 return yo._date == other 221 elif type(other) == Date: 222 if other: 223 return yo._date == other._date 224 return False 225 else: 226 if type(other) == datetime.date: 227 return False 228 elif type(other) == Date: 229 if other: 230 return False 231 return True 232 return NotImplemented
233 - def __getattr__(yo, name):
234 if yo: 235 attribute = yo._date.__getattribute__(name) 236 return attribute 237 else: 238 raise AttributeError('null Date object has no attribute %s' % name)
239 - def __ge__(yo, other):
240 if yo: 241 if type(other) == datetime.date: 242 return yo._date >= other 243 elif type(other) == Date: 244 if other: 245 return yo._date >= other._date 246 return False 247 else: 248 if type(other) == datetime.date: 249 return False 250 elif type(other) == Date: 251 if other: 252 return False 253 return True 254 return NotImplemented
255 - def __gt__(yo, other):
256 if yo: 257 if type(other) == datetime.date: 258 return yo._date > other 259 elif type(other) == Date: 260 if other: 261 return yo._date > other._date 262 return True 263 else: 264 if type(other) == datetime.date: 265 return False 266 elif type(other) == Date: 267 if other: 268 return False 269 return False 270 return NotImplemented
271 - def __hash__(yo):
272 return yo._date.__hash__()
273 - def __le__(yo, other):
274 if yo: 275 if type(other) == datetime.date: 276 return yo._date <= other 277 elif type(other) == Date: 278 if other: 279 return yo._date <= other._date 280 return False 281 else: 282 if type(other) == datetime.date: 283 return True 284 elif type(other) == Date: 285 if other: 286 return True 287 return True 288 return NotImplemented
289 - def __lt__(yo, other):
290 if yo: 291 if type(other) == datetime.date: 292 return yo._date < other 293 elif type(other) == Date: 294 if other: 295 return yo._date < other._date 296 return False 297 else: 298 if type(other) == datetime.date: 299 return True 300 elif type(other) == Date: 301 if other: 302 return True 303 return False 304 return NotImplemented
305 - def __ne__(yo, other):
306 if yo: 307 if type(other) == datetime.date: 308 return yo._date != other 309 elif type(other) == Date: 310 if other: 311 return yo._date != other._date 312 return True 313 else: 314 if type(other) == datetime.date: 315 return True 316 elif type(other) == Date: 317 if other: 318 return True 319 return False 320 return NotImplemented
321 - def __nonzero__(yo):
322 if yo._date: 323 return True 324 return False
325 __radd__ = __add__
326 - def __rsub__(yo, other):
327 if yo and type(other) == datetime.date: 328 return other - yo._date 329 elif yo and type(other) == Date: 330 return other._date - yo._date 331 elif yo and type(other) == datetime.timedelta: 332 return Date(other - yo._date) 333 else: 334 return NotImplemented
335 - def __repr__(yo):
336 if yo: 337 return "Date(%d, %d, %d)" % yo.timetuple()[:3] 338 else: 339 return "Date()"
340 - def __str__(yo):
341 if yo: 342 return yo.isoformat() 343 return "no date"
344 - def __sub__(yo, other):
345 if yo and type(other) == datetime.date: 346 return yo._date - other 347 elif yo and type(other) == Date: 348 return yo._date - other._date 349 elif yo and type(other) == datetime.timedelta: 350 return Date(yo._date - other) 351 else: 352 return NotImplemented
353 - def date(yo):
354 if yo: 355 return yo._date 356 return None
357 @classmethod
358 - def fromordinal(cls, number):
359 if number: 360 return cls(datetime.date.fromordinal(number)) 361 return cls()
362 @classmethod
363 - def fromtimestamp(cls, timestamp):
364 return cls(datetime.date.fromtimestamp(timestamp))
365 @classmethod
366 - def fromymd(cls, yyyymmdd):
367 if yyyymmdd in ('', ' ','no date'): 368 return cls() 369 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
370 - def strftime(yo, format):
371 if yo: 372 return yo._date.strftime(format) 373 return '<no date>'
374 @classmethod
375 - def today(cls):
376 return cls(datetime.date.today())
377 - def ymd(yo):
378 if yo: 379 return "%04d%02d%02d" % yo.timetuple()[:3] 380 else: 381 return ' '
382 Date.max = Date(datetime.date.max) 383 Date.min = Date(datetime.date.min)
384 -class DateTime():
385 "adds null capable datetime.datetime constructs" 386 __slots__ = ['_datetime']
387 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0):
388 """year may be a datetime.datetime""" 389 ndt = object.__new__(cls) 390 ndt._datetime = False 391 if type(year) == datetime.datetime: 392 ndt._datetime = year 393 elif type(year) == DateTime: 394 ndt._datetime = year._datetime 395 elif year is not None: 396 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsec) 397 return ndt
398 - def __add__(yo, other):
399 if yo and type(other) == datetime.timedelta: 400 return DateTime(yo._datetime + other) 401 else: 402 return NotImplemented
403 - def __eq__(yo, other):
404 if yo: 405 if type(other) == datetime.datetime: 406 return yo._datetime == other 407 elif type(other) == DateTime: 408 if other: 409 return yo._datetime == other._datetime 410 return False 411 else: 412 if type(other) == datetime.datetime: 413 return False 414 elif type(other) == DateTime: 415 if other: 416 return False 417 return True 418 return NotImplemented
419 - def __getattr__(yo, name):
420 if yo: 421 attribute = yo._datetime.__getattribute__(name) 422 return attribute 423 else: 424 raise AttributeError('null DateTime object has no attribute %s' % name)
425 - def __ge__(yo, other):
426 if yo: 427 if type(other) == datetime.datetime: 428 return yo._datetime >= other 429 elif type(other) == DateTime: 430 if other: 431 return yo._datetime >= other._datetime 432 return False 433 else: 434 if type(other) == datetime.datetime: 435 return False 436 elif type(other) == DateTime: 437 if other: 438 return False 439 return True 440 return NotImplemented
441 - def __gt__(yo, other):
442 if yo: 443 if type(other) == datetime.datetime: 444 return yo._datetime > other 445 elif type(other) == DateTime: 446 if other: 447 return yo._datetime > other._datetime 448 return True 449 else: 450 if type(other) == datetime.datetime: 451 return False 452 elif type(other) == DateTime: 453 if other: 454 return False 455 return False 456 return NotImplemented
457 - def __hash__(yo):
458 return yo._datetime.__hash__()
459 - def __le__(yo, other):
460 if yo: 461 if type(other) == datetime.datetime: 462 return yo._datetime <= other 463 elif type(other) == DateTime: 464 if other: 465 return yo._datetime <= other._datetime 466 return False 467 else: 468 if type(other) == datetime.datetime: 469 return True 470 elif type(other) == DateTime: 471 if other: 472 return True 473 return True 474 return NotImplemented
475 - def __lt__(yo, other):
476 if yo: 477 if type(other) == datetime.datetime: 478 return yo._datetime < other 479 elif type(other) == DateTime: 480 if other: 481 return yo._datetime < other._datetime 482 return False 483 else: 484 if type(other) == datetime.datetime: 485 return True 486 elif type(other) == DateTime: 487 if other: 488 return True 489 return False 490 return NotImplemented
491 - def __ne__(yo, other):
492 if yo: 493 if type(other) == datetime.datetime: 494 return yo._datetime != other 495 elif type(other) == DateTime: 496 if other: 497 return yo._datetime != other._datetime 498 return True 499 else: 500 if type(other) == datetime.datetime: 501 return True 502 elif type(other) == DateTime: 503 if other: 504 return True 505 return False 506 return NotImplemented
507 - def __nonzero__(yo):
508 if yo._datetime is not False: 509 return True 510 return False
511 __radd__ = __add__
512 - def __rsub__(yo, other):
513 if yo and type(other) == datetime.datetime: 514 return other - yo._datetime 515 elif yo and type(other) == DateTime: 516 return other._datetime - yo._datetime 517 elif yo and type(other) == datetime.timedelta: 518 return DateTime(other - yo._datetime) 519 else: 520 return NotImplemented
521 - def __repr__(yo):
522 if yo: 523 return "DateTime(%d, %d, %d, %d, %d, %d, %d, %d, %d)" % yo._datetime.timetuple()[:] 524 else: 525 return "DateTime()"
526 - def __str__(yo):
527 if yo: 528 return yo.isoformat() 529 return "no datetime"
530 - def __sub__(yo, other):
531 if yo and type(other) == datetime.datetime: 532 return yo._datetime - other 533 elif yo and type(other) == DateTime: 534 return yo._datetime - other._datetime 535 elif yo and type(other) == datetime.timedelta: 536 return DateTime(yo._datetime - other) 537 else: 538 return NotImplemented
539 @classmethod
540 - def combine(cls, date, time):
541 if Date(date) and Time(time): 542 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) 543 return cls()
544 - def date(yo):
545 if yo: 546 return Date(yo.year, yo.month, yo.day) 547 return Date()
548 - def datetime(yo):
549 if yo: 550 return yo._datetime 551 return None
552 @classmethod
553 - def fromordinal(cls, number):
554 if number: 555 return cls(datetime.datetime.fromordinal(number)) 556 else: 557 return cls()
558 @classmethod
559 - def fromtimestamp(cls, timestamp):
560 return DateTime(datetime.datetime.fromtimestamp(timestamp))
561 @classmethod
562 - def now(cls):
563 return cls(datetime.datetime.now())
564 - def time(yo):
565 if yo: 566 return Time(yo.hour, yo.minute, yo.second, yo.microsecond) 567 return Time()
568 @classmethod
569 - def utcnow(cls):
570 return cls(datetime.datetime.utcnow())
571 @classmethod
572 - def today(cls):
573 return cls(datetime.datetime.today())
574 DateTime.max = DateTime(datetime.datetime.max) 575 DateTime.min = DateTime(datetime.datetime.min)
576 -class Time():
577 "adds null capable datetime.time constructs" 578 __slots__ = ['_time']
579 - def __new__(cls, hour=None, minute=0, second=0, microsec=0):
580 """hour may be a datetime.time""" 581 nt = object.__new__(cls) 582 nt._time = False 583 if type(hour) == datetime.time: 584 nt._time = hour 585 elif type(hour) == Time: 586 nt._time = hour._time 587 elif hour is not None: 588 nt._time = datetime.time(hour, minute, second, microsec) 589 return nt
590 - def __add__(yo, other):
591 if yo and type(other) == datetime.timedelta: 592 return Time(yo._time + other) 593 else: 594 return NotImplemented
595 - def __eq__(yo, other):
596 if yo: 597 if type(other) == datetime.time: 598 return yo._time == other 599 elif type(other) == Time: 600 if other: 601 return yo._time == other._time 602 return False 603 else: 604 if type(other) == datetime.time: 605 return False 606 elif type(other) == Time: 607 if other: 608 return False 609 return True 610 return NotImplemented
611 - def __getattr__(yo, name):
612 if yo: 613 attribute = yo._time.__getattribute__(name) 614 return attribute 615 else: 616 raise AttributeError('null Time object has no attribute %s' % name)
617 - def __ge__(yo, other):
618 if yo: 619 if type(other) == datetime.time: 620 return yo._time >= other 621 elif type(other) == Time: 622 if other: 623 return yo._time >= other._time 624 return False 625 else: 626 if type(other) == datetime.time: 627 return False 628 elif type(other) == Time: 629 if other: 630 return False 631 return True 632 return NotImplemented
633 - def __gt__(yo, other):
634 if yo: 635 if type(other) == datetime.time: 636 return yo._time > other 637 elif type(other) == DateTime: 638 if other: 639 return yo._time > other._time 640 return True 641 else: 642 if type(other) == datetime.time: 643 return False 644 elif type(other) == Time: 645 if other: 646 return False 647 return False 648 return NotImplemented
649 - def __hash__(yo):
650 return yo._datetime.__hash__()
651 - def __le__(yo, other):
652 if yo: 653 if type(other) == datetime.time: 654 return yo._time <= other 655 elif type(other) == Time: 656 if other: 657 return yo._time <= other._time 658 return False 659 else: 660 if type(other) == datetime.time: 661 return True 662 elif type(other) == Time: 663 if other: 664 return True 665 return True 666 return NotImplemented
667 - def __lt__(yo, other):
668 if yo: 669 if type(other) == datetime.time: 670 return yo._time < other 671 elif type(other) == Time: 672 if other: 673 return yo._time < other._time 674 return False 675 else: 676 if type(other) == datetime.time: 677 return True 678 elif type(other) == Time: 679 if other: 680 return True 681 return False 682 return NotImplemented
683 - def __ne__(yo, other):
684 if yo: 685 if type(other) == datetime.time: 686 return yo._time != other 687 elif type(other) == Time: 688 if other: 689 return yo._time != other._time 690 return True 691 else: 692 if type(other) == datetime.time: 693 return True 694 elif type(other) == Time: 695 if other: 696 return True 697 return False 698 return NotImplemented
699 - def __nonzero__(yo):
700 if yo._time is not False: 701 return True 702 return False
703 __radd__ = __add__
704 - def __rsub__(yo, other):
705 if yo and type(other) == datetime.time: 706 return other - yo._time 707 elif yo and type(other) == Time: 708 return other._time - yo._time 709 elif yo and type(other) == datetime.timedelta: 710 return Time(other - yo._datetime) 711 else: 712 return NotImplemented
713 - def __repr__(yo):
714 if yo: 715 return "Time(%d, %d, %d, %d)" % (yo.hour, yo.minute, yo.second, yo.microsecond) 716 else: 717 return "Time()"
718 - def __str__(yo):
719 if yo: 720 return yo.isoformat() 721 return "no time"
722 - def __sub__(yo, other):
723 if yo and type(other) == datetime.time: 724 return yo._time - other 725 elif yo and type(other) == Time: 726 return yo._time - other._time 727 elif yo and type(other) == datetime.timedelta: 728 return Time(yo._time - other) 729 else: 730 return NotImplemented
731 Time.max = Time(datetime.time.max) 732 Time.min = Time(datetime.time.min)
733 734 -class Logical():
735 "return type for Logical fields; implements boolean algebra" 736 _need_init = True
737 - def A(x, y):
738 "OR (disjunction): x | y => True iff at least one of x, y is True" 739 if not isinstance(y, (x.__class__, bool, type(None))): 740 return NotImplemented 741 if x.value is None or y == None: 742 return x.unknown 743 elif x.value is True or y == True: 744 return x.true 745 return x.false
746 - def _C_material(x, y):
747 "IMP (material implication) x >> y => False iff x == True and y == False" 748 if not isinstance(y, (x.__class__, bool, type(None))): 749 return NotImplemented 750 if x.value is None or y == None: 751 return x.unknown 752 elif y == False and x.value is True: 753 return x.false 754 return x.true
755 - def _C_material_reversed(y, x):
756 "IMP (material implication) x >> y => False iff x = True and y = False" 757 if not isinstance(x, (y.__class__, bool, type(None))): 758 return NotImplemented 759 if x == None or y.value is None: 760 return y.unknown 761 elif x == True and y.value is False: 762 return y.false 763 return y.true
764 - def _C_relevant(x, y):
765 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Unknown if x is False" 766 if not isinstance(y, (x.__class__, bool, type(None))): 767 return NotImplemented 768 if x.value is True and y == True: 769 return x.true 770 if x.value is True and y == False: 771 return x.false 772 return x.unknown
773 - def _C_relevant_reversed(y, x):
774 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Unknown if y is False" 775 if not isinstance(x, (y.__class__, bool, type(None))): 776 return NotImplemented 777 if x == True and y.value is True: 778 return y.true 779 if x == True and y.value is False: 780 return y.false 781 return y.unknown
782 - def D(x, y):
783 "NAND (negative AND) x.D(y): False iff x and y are both True" 784 if not isinstance(y, (x.__class__, bool, type(None))): 785 return NotImplemented 786 if x.value is None or y == None: 787 return x.unknown 788 elif x.value is True and y == True: 789 return x.false 790 return x.true
791 - def E(x, y):
792 "EQV (equivalence) x.E(y): True iff x and y are the same" 793 if not isinstance(y, (x.__class__, bool, type(None))): 794 return NotImplemented 795 if x.value is None or y == None: 796 return x.unknown 797 elif y == True: 798 return (x.false, x.true)[x] 799 elif y == False: 800 return (x.true, x.false)[x]
801 - def J(x, y):
802 "XOR (parity) x ^ y: True iff only one of x,y is True" 803 if not isinstance(y, (x.__class__, bool, type(None))): 804 return NotImplemented 805 if x.value is None or y == None: 806 return x.unknown 807 elif y == True: 808 return (x.true, x.false)[x] 809 elif y == False: 810 return (x.false, x.true)[x]
811 - def K(x, y):
812 "AND (conjunction) x & y: True iff both x, y are True" 813 if not isinstance(y, (x.__class__, bool, type(None))): 814 return NotImplemented 815 if x.value is None or y == None: 816 return x.unknown 817 elif y == True: 818 return (x.false, x.true)[x] 819 elif y == False: 820 return x.false
821 - def N(x):
822 "NEG (negation) -x: True iff x = False" 823 if x is x.true: 824 return x.false 825 elif x is x.false: 826 return x.true 827 else: 828 return x.unknown
829 @classmethod
830 - def set_implication(cls, method):
831 "sets IMP to material or relevant" 832 if not isinstance(method, (str, unicode)) or method.lower() not in ('material','relevant'): 833 raise ValueError("method should be 'material' (for strict boolean) or 'relevant', not %r'" % method) 834 if method.lower() == 'material': 835 cls.C = cls._C_material 836 cls.__rshift__ = cls._C_material 837 cls.__rrshift__ = cls._C_material_reversed 838 elif method.lower() == 'relevant': 839 cls.C = cls._C_relevant 840 cls.__rshift__ = cls._C_relevant 841 cls.__rrshift__ = cls._C_relevant_reversed
842 - def __new__(cls, value=None):
843 if value is None: 844 return cls.unknown 845 elif isinstance(value, (str, unicode)): 846 if value.lower() in ('t','true','y','yes','on'): 847 return cls.true 848 elif value.lower() in ('f','false','n','no','off'): 849 return cls.false 850 elif value.lower() in ('?','unknown','null','none',' '): 851 return cls.unknown 852 else: 853 raise ValueError('unknown value for Logical: %s' % value) 854 else: 855 return (cls.false, cls.true)[bool(value)]
856 - def __eq__(x, y):
857 if isinstance(y, (bool, type(None))): 858 return x.__class__(x.value == y) 859 if isinstance(y, x.__class__): 860 return x.__class__(x.value == y.value) 861 return NotImplemented
862 - def __hash__(x):
863 return hash(x.value)
864 - def __index__(x):
865 if x.value is False: 866 return 0 867 if x.value is True: 868 return 1 869 if x.value is None: 870 return 2
871 - def __ne__(x, y):
872 if isinstance(y, (bool, type(None))): 873 return x.__class__(x.value != y) 874 if isinstance(y, x.__class__): 875 return x.__class__(x.value != y.value) 876 return NotImplemented
877 - def __nonzero__(x):
878 return x.value == True
879 - def __repr__(x):
880 return "Logical(%r)" % x.string
881 - def __str__(x):
882 return x.string
883 __add__ = A 884 __and__ = K 885 __mul__ = K 886 __neg__ = N 887 __or__ = A 888 __radd__ = A 889 __rand__ = K 890 __rshift__ = None 891 __rmul__ = K 892 __ror__ = A 893 __rrshift__ = None 894 __rxor__ = J 895 __xor__ = J
896 if hasattr(Logical, '_need_init'): 897 Logical.true = true = object.__new__(Logical) 898 true.value = True 899 true.string = 'T' 900 Logical.false = false = object.__new__(Logical) 901 false.value = False 902 false.string = 'F' 903 Logical.unknown = unknown = object.__new__(Logical) 904 unknown.value = None 905 unknown.string = '?' 906 Logical.set_implication('material') 907 del Logical._need_init
908 909 # Internal classes 910 -class _DbfRecord():
911 """Provides routines to extract and save data within the fields of a dbf record.""" 912 __slots__ = ['_recnum', '_layout', '_data', '_dirty', '__weakref__']
913 - def _retrieveFieldValue(yo, record_data, fielddef):
914 """calls appropriate routine to fetch value stored in field from array 915 @param record_data: the data portion of the record 916 @type record_data: array of characters 917 @param fielddef: description of the field definition 918 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags' 919 @returns: python data stored in field""" 920 921 field_type = fielddef['type'] 922 classtype = yo._layout.fieldtypes[field_type]['Class'] 923 retrieve = yo._layout.fieldtypes[field_type]['Retrieve'] 924 if classtype is not None: 925 datum = retrieve(record_data, fielddef, yo._layout.memo, classtype) 926 else: 927 datum = retrieve(record_data, fielddef, yo._layout.memo) 928 if field_type in yo._layout.character_fields: 929 datum = yo._layout.decoder(datum)[0] 930 if yo._layout.return_ascii: 931 try: 932 datum = yo._layout.output_encoder(datum)[0] 933 except UnicodeEncodeError: 934 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore') 935 return datum
936 - def _updateFieldValue(yo, fielddef, value):
937 "calls appropriate routine to convert value to ascii bytes, and save it in record" 938 field_type = fielddef['type'] 939 update = yo._layout.fieldtypes[field_type]['Update'] 940 if field_type in yo._layout.character_fields: 941 if not isinstance(value, unicode): 942 if yo._layout.input_decoder is None: 943 raise NonUnicode("String not in unicode format, no default encoding specified") 944 value = yo._layout.input_decoder(value)[0] # input ascii => unicode 945 value = yo._layout.encoder(value)[0] # unicode => table ascii 946 bytes = array('c', update(value, fielddef, yo._layout.memo)) 947 size = fielddef['length'] 948 if len(bytes) > size: 949 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size)) 950 blank = array('c', ' ' * size) 951 start = fielddef['start'] 952 end = start + size 953 blank[:len(bytes)] = bytes[:] 954 yo._data[start:end] = blank[:] 955 yo._dirty = True
956 - def _update_disk(yo, location='', data=None):
957 if not yo._layout.inmemory: 958 if yo._recnum < 0: 959 raise DbfError("Attempted to update record that has been packed") 960 if location == '': 961 location = yo._recnum * yo._layout.header.record_length + yo._layout.header.start 962 if data is None: 963 data = yo._data 964 yo._layout.dfd.seek(location) 965 yo._layout.dfd.write(data) 966 yo._dirty = False 967 for index in yo.record_table._indexen: 968 index(yo)
969 - def __contains__(yo, key):
970 return key in yo._layout.fields or key in ['record_number','delete_flag']
971 - def __iter__(yo):
972 return (yo[field] for field in yo._layout.fields)
973 - def __getattr__(yo, name):
974 if name[0:2] == '__' and name[-2:] == '__': 975 raise AttributeError, 'Method %s is not implemented.' % name 976 elif name == 'record_number': 977 return yo._recnum 978 elif name == 'delete_flag': 979 return yo._data[0] != ' ' 980 elif not name in yo._layout.fields: 981 raise FieldMissing(name) 982 try: 983 fielddef = yo._layout[name] 984 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef) 985 return value 986 except DbfError, error: 987 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 988 raise
989 - def __getitem__(yo, item):
990 if type(item) in (int, long): 991 if not -yo._layout.header.field_count <= item < yo._layout.header.field_count: 992 raise IndexError("Field offset %d is not in record" % item) 993 return yo[yo._layout.fields[item]] 994 elif type(item) == slice: 995 sequence = [] 996 for index in yo._layout.fields[item]: 997 sequence.append(yo[index]) 998 return sequence 999 elif type(item) == str: 1000 return yo.__getattr__(item) 1001 else: 1002 raise TypeError("%s is not a field name" % item)
1003 - def __len__(yo):
1004 return yo._layout.header.field_count
1005 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
1006 """record = ascii array of entire record; layout=record specification; memo = memo object for table""" 1007 record = object.__new__(cls) 1008 record._dirty = False 1009 record._recnum = recnum 1010 record._layout = layout 1011 if layout.blankrecord is None and not _fromdisk: 1012 record._createBlankRecord() 1013 record._data = layout.blankrecord 1014 if recnum == -1: # not a disk-backed record 1015 return record 1016 elif type(kamikaze) == array: 1017 record._data = kamikaze[:] 1018 elif type(kamikaze) == str: 1019 record._data = array('c', kamikaze) 1020 else: 1021 record._data = kamikaze._data[:] 1022 datalen = len(record._data) 1023 if datalen < layout.header.record_length: 1024 record._data.extend(layout.blankrecord[datalen:]) 1025 elif datalen > layout.header.record_length: 1026 record._data = record._data[:layout.header.record_length] 1027 if not _fromdisk and not layout.inmemory: 1028 record._update_disk() 1029 return record
1030 - def __setattr__(yo, name, value):
1031 if name in yo.__slots__: 1032 object.__setattr__(yo, name, value) 1033 return 1034 elif not name in yo._layout.fields: 1035 raise FieldMissing(name) 1036 fielddef = yo._layout[name] 1037 try: 1038 yo._updateFieldValue(fielddef, value) 1039 except DbfError, error: 1040 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message) 1041 error.data = name 1042 raise
1043 - def __setitem__(yo, name, value):
1044 if type(name) == str: 1045 yo.__setattr__(name, value) 1046 elif type(name) in (int, long): 1047 yo.__setattr__(yo._layout.fields[name], value) 1048 elif type(name) == slice: 1049 sequence = [] 1050 for field in yo._layout.fields[name]: 1051 sequence.append(field) 1052 if len(sequence) != len(value): 1053 raise DbfError("length of slices not equal") 1054 for field, val in zip(sequence, value): 1055 yo[field] = val 1056 else: 1057 raise TypeError("%s is not a field name" % name)
1058 - def __str__(yo):
1059 result = [] 1060 for seq, field in enumerate(yo.field_names): 1061 result.append("%3d - %-10s: %s" % (seq, field, yo[field])) 1062 return '\n'.join(result)
1063 - def __repr__(yo):
1064 return yo._data.tostring()
1065 - def _createBlankRecord(yo):
1066 "creates a blank record data chunk" 1067 layout = yo._layout 1068 ondisk = layout.ondisk 1069 layout.ondisk = False 1070 yo._data = array('c', ' ' * layout.header.record_length) 1071 layout.memofields = [] 1072 for field in layout.fields: 1073 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']()) 1074 if layout[field]['type'] in layout.memotypes: 1075 layout.memofields.append(field) 1076 layout.blankrecord = yo._data[:] 1077 layout.ondisk = ondisk
1078 - def delete_record(yo):
1079 "marks record as deleted" 1080 yo._data[0] = '*' 1081 yo._dirty = True 1082 return yo
1083 @property
1084 - def field_names(yo):
1085 "fields in table/record" 1086 return yo._layout.fields[:]
1087 - def gather_fields(yo, dictionary, drop=False): # dict, drop_missing=False):
1088 "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True" 1089 old_data = yo._data[:] 1090 try: 1091 for key in dictionary: 1092 if not key in yo.field_names: 1093 if drop: 1094 continue 1095 raise FieldMissing(key) 1096 yo.__setattr__(key, dictionary[key]) 1097 except: 1098 yo._data[:] = old_data 1099 raise 1100 return yo
1101 @property
1102 - def has_been_deleted(yo):
1103 "marked for deletion?" 1104 return yo._data[0] == '*'
1105 - def read_record(yo):
1106 "refresh record data from disk" 1107 size = yo._layout.header.record_length 1108 location = yo._recnum * size + yo._layout.header.start 1109 yo._layout.dfd.seek(location) 1110 yo._data[:] = yo._meta.dfd.read(size) 1111 yo._dirty = False 1112 return yo
1113 @property
1114 - def record_number(yo):
1115 "physical record number" 1116 return yo._recnum
1117 @property
1118 - def record_table(yo):
1119 table = yo._layout.table() 1120 if table is None: 1121 raise DbfError("table is no longer available") 1122 return table
1123 - def check_index(yo):
1124 for dbfindex in yo._layout.table()._indexen: 1125 dbfindex(yo)
1126 - def reset_record(yo, keep_fields=None):
1127 "blanks record" 1128 if keep_fields is None: 1129 keep_fields = [] 1130 keep = {} 1131 for field in keep_fields: 1132 keep[field] = yo[field] 1133 if yo._layout.blankrecord == None: 1134 yo._createBlankRecord() 1135 yo._data[:] = yo._layout.blankrecord[:] 1136 for field in keep_fields: 1137 yo[field] = keep[field] 1138 yo._dirty = True 1139 return yo
1140 - def scatter_fields(yo, blank=False):
1141 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty." 1142 keys = yo._layout.fields 1143 if blank: 1144 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys] 1145 else: 1146 values = [yo[field] for field in keys] 1147 return dict(zip(keys, values))
1148 - def undelete_record(yo):
1149 "marks record as active" 1150 yo._data[0] = ' ' 1151 yo._dirty = True 1152 return yo
1153 - def write_record(yo, **kwargs):
1154 "write record data to disk" 1155 if kwargs: 1156 yo.gather_fields(kwargs) 1157 if yo._dirty: 1158 yo._update_disk() 1159 return 1 1160 return 0
1161 -class _DbfMemo():
1162 """Provides access to memo fields as dictionaries 1163 must override _init, _get_memo, and _put_memo to 1164 store memo contents to disk"""
1165 - def _init(yo):
1166 "initialize disk file usage"
1167 - def _get_memo(yo, block):
1168 "retrieve memo contents from disk"
1169 - def _put_memo(yo, data):
1170 "store memo contents to disk"
1171 - def __init__(yo, meta):
1172 "" 1173 yo.meta = meta 1174 yo.memory = {} 1175 yo.nextmemo = 1 1176 yo._init() 1177 yo.meta.newmemofile = False
1178 - def get_memo(yo, block, field):
1179 "gets the memo in block" 1180 if yo.meta.ignorememos or not block: 1181 return '' 1182 if yo.meta.ondisk: 1183 return yo._get_memo(block) 1184 else: 1185 return yo.memory[block]
1186 - def put_memo(yo, data):
1187 "stores data in memo file, returns block number" 1188 if yo.meta.ignorememos or data == '': 1189 return 0 1190 if yo.meta.inmemory: 1191 thismemo = yo.nextmemo 1192 yo.nextmemo += 1 1193 yo.memory[thismemo] = data 1194 else: 1195 thismemo = yo._put_memo(data) 1196 return thismemo
1197 -class _Db3Memo(_DbfMemo):
1198 - def _init(yo):
1199 "dBase III specific" 1200 yo.meta.memo_size= 512 1201 yo.record_header_length = 2 1202 if yo.meta.ondisk and not yo.meta.ignorememos: 1203 if yo.meta.newmemofile: 1204 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 1205 yo.meta.mfd.write(packLongInt(1) + '\x00' * 508) 1206 else: 1207 try: 1208 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 1209 yo.meta.mfd.seek(0) 1210 yo.nextmemo = unpackLongInt(yo.meta.mfd.read(4)) 1211 except: 1212 raise DbfError("memo file appears to be corrupt")
1213 - def _get_memo(yo, block):
1214 block = int(block) 1215 yo.meta.mfd.seek(block * yo.meta.memo_size) 1216 eom = -1 1217 data = '' 1218 while eom == -1: 1219 newdata = yo.meta.mfd.read(yo.meta.memo_size) 1220 if not newdata: 1221 return data 1222 data += newdata 1223 eom = data.find('\x1a\x1a') 1224 return data[:eom].rstrip()
1225 - def _put_memo(yo, data):
1226 data = data.rstrip() 1227 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 1228 blocks = length // yo.meta.memo_size 1229 if length % yo.meta.memo_size: 1230 blocks += 1 1231 thismemo = yo.nextmemo 1232 yo.nextmemo = thismemo + blocks 1233 yo.meta.mfd.seek(0) 1234 yo.meta.mfd.write(packLongInt(yo.nextmemo)) 1235 yo.meta.mfd.seek(thismemo * yo.meta.memo_size) 1236 yo.meta.mfd.write(data) 1237 yo.meta.mfd.write('\x1a\x1a') 1238 double_check = yo._get_memo(thismemo) 1239 if len(double_check) != len(data): 1240 uhoh = open('dbf_memo_dump.err','wb') 1241 uhoh.write('thismemo: %d' % thismemo) 1242 uhoh.write('nextmemo: %d' % yo.nextmemo) 1243 uhoh.write('saved: %d bytes' % len(data)) 1244 uhoh.write(data) 1245 uhoh.write('retrieved: %d bytes' % len(double_check)) 1246 uhoh.write(double_check) 1247 uhoh.close() 1248 raise DbfError("unknown error: memo not saved") 1249 return thismemo
1250 -class _VfpMemo(_DbfMemo):
1251 - def _init(yo):
1252 "Visual Foxpro 6 specific" 1253 if yo.meta.ondisk and not yo.meta.ignorememos: 1254 yo.record_header_length = 8 1255 if yo.meta.newmemofile: 1256 if yo.meta.memo_size == 0: 1257 yo.meta.memo_size = 1 1258 elif 1 < yo.meta.memo_size < 33: 1259 yo.meta.memo_size *= 512 1260 yo.meta.mfd = open(yo.meta.memoname, 'w+b') 1261 nextmemo = 512 // yo.meta.memo_size 1262 if nextmemo * yo.meta.memo_size < 512: 1263 nextmemo += 1 1264 yo.nextmemo = nextmemo 1265 yo.meta.mfd.write(packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \ 1266 packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504) 1267 else: 1268 try: 1269 yo.meta.mfd = open(yo.meta.memoname, 'r+b') 1270 yo.meta.mfd.seek(0) 1271 header = yo.meta.mfd.read(512) 1272 yo.nextmemo = unpackLongInt(header[:4], bigendian=True) 1273 yo.meta.memo_size = unpackShortInt(header[6:8], bigendian=True) 1274 except: 1275 raise DbfError("memo file appears to be corrupt")
1276 - def _get_memo(yo, block):
1277 yo.meta.mfd.seek(block * yo.meta.memo_size) 1278 header = yo.meta.mfd.read(8) 1279 length = unpackLongInt(header[4:], bigendian=True) 1280 return yo.meta.mfd.read(length)
1281 - def _put_memo(yo, data):
1282 data = data.rstrip() # no trailing whitespace 1283 yo.meta.mfd.seek(0) 1284 thismemo = unpackLongInt(yo.meta.mfd.read(4), bigendian=True) 1285 yo.meta.mfd.seek(0) 1286 length = len(data) + yo.record_header_length # room for two ^Z at end of memo 1287 blocks = length // yo.meta.memo_size 1288 if length % yo.meta.memo_size: 1289 blocks += 1 1290 yo.meta.mfd.write(packLongInt(thismemo+blocks, bigendian=True)) 1291 yo.meta.mfd.seek(thismemo*yo.meta.memo_size) 1292 yo.meta.mfd.write('\x00\x00\x00\x01' + packLongInt(len(data), bigendian=True) + data) 1293 return thismemo
1294 -class DbfCsv(csv.Dialect):
1295 "csv format for exporting tables" 1296 delimiter = ',' 1297 doublequote = True 1298 escapechar = None 1299 lineterminator = '\n' 1300 quotechar = '"' 1301 skipinitialspace = True 1302 quoting = csv.QUOTE_NONNUMERIC
1303 csv.register_dialect('dbf', DbfCsv) 1304 1305 # Routines for saving, retrieving, and creating fields 1306 1307 VFPTIME = 1721425
1308 1309 -def packShortInt(value, bigendian=False):
1310 "Returns a two-bye integer from the value, or raises DbfError" 1311 # 256 / 65,536 1312 if value > 65535: 1313 raise DateOverflow("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) 1314 if bigendian: 1315 return struct.pack('>H', value) 1316 else: 1317 return struct.pack('<H', value)
1318 -def packLongInt(value, bigendian=False):
1319 "Returns a four-bye integer from the value, or raises DbfError" 1320 # 256 / 65,536 / 16,777,216 1321 if value > 4294967295: 1322 raise DateOverflow("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) 1323 if bigendian: 1324 return struct.pack('>L', value) 1325 else: 1326 return struct.pack('<L', value)
1327 -def packDate(date):
1328 "Returns a group of three bytes, in integer form, of the date" 1329 return "%c%c%c" % (date.year-1900, date.month, date.day)
1330 -def packStr(string):
1331 "Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes" 1332 if len(string) > 10: 1333 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) 1334 return struct.pack('11s', string.upper())
1335 -def unpackShortInt(bytes, bigendian=False):
1336 "Returns the value in the two-byte integer passed in" 1337 if bigendian: 1338 return struct.unpack('>H', bytes)[0] 1339 else: 1340 return struct.unpack('<H', bytes)[0]
1341 -def unpackLongInt(bytes, bigendian=False):
1342 "Returns the value in the four-byte integer passed in" 1343 if bigendian: 1344 return int(struct.unpack('>L', bytes)[0]) 1345 else: 1346 return int(struct.unpack('<L', bytes)[0])
1347 -def unpackDate(bytestr):
1348 "Returns a Date() of the packed three-byte date passed in" 1349 year, month, day = struct.unpack('<BBB', bytestr) 1350 year += 1900 1351 return Date(year, month, day)
1352 -def unpackStr(chars):
1353 "Returns a normal, lower-cased string from a null-padded byte string" 1354 field = struct.unpack('%ds' % len(chars), chars)[0] 1355 name = [] 1356 for ch in field: 1357 if ch == '\x00': 1358 break 1359 name.append(ch.lower()) 1360 return ''.join(name)
1361 -def convertToBool(value):
1362 """Returns boolean true or false; normal rules apply to non-string values; string values 1363 must be 'y','t', 'yes', or 'true' (case insensitive) to be True""" 1364 if type(value) == str: 1365 return bool(value.lower() in ['t', 'y', 'true', 'yes']) 1366 else: 1367 return bool(value)
1368 -def unsupportedType(something, field, memo=None, typ=None):
1369 "called if a data type is not supported for that style of table" 1370 raise DbfError('field type is not supported.')
1371 -def retrieveCharacter(bytes, fielddef={}, memo=None, typ=None):
1372 "Returns the string in bytes with trailing white space removed" 1373 return typ(bytes.tostring().rstrip())
1374 -def updateCharacter(string, fielddef, memo=None):
1375 "returns the string, truncating if string is longer than it's field" 1376 string = str(string) 1377 return string.rstrip()
1378 -def retrieveCurrency(bytes, fielddef={}, memo=None, typ=None):
1379 value = struct.unpack('<q', bytes)[0] 1380 return typ(("%de-4" % value).strip())
1381 -def updateCurrency(value, fielddef={}, memo=None):
1382 currency = int(value * 10000) 1383 if not -9223372036854775808 < currency < 9223372036854775808: 1384 raise DataOverflow("value %s is out of bounds" % value) 1385 return struct.pack('<q', currency)
1386 -def retrieveDate(bytes, fielddef={}, memo=None):
1387 "Returns the ascii coded date as a Date object" 1388 return Date.fromymd(bytes.tostring())
1389 -def updateDate(moment, fielddef={}, memo=None):
1390 "returns the Date or datetime.date object ascii-encoded (yyyymmdd)" 1391 if moment: 1392 return "%04d%02d%02d" % moment.timetuple()[:3] 1393 return ' '
1394 -def retrieveDouble(bytes, fielddef={}, memo=None, typ=None):
1395 return float(struct.unpack('<d', bytes)[0])
1396 -def updateDouble(value, fielddef={}, memo=None):
1397 return struct.pack('<d', float(value))
1398 -def retrieveInteger(bytes, fielddef={}, memo=None, typ=None):
1399 "Returns the binary number stored in bytes in little-endian format" 1400 if typ is None or typ == 'default': 1401 return struct.unpack('<i', bytes)[0] 1402 else: 1403 return typ(struct.unpack('<i', bytes)[0])
1404 -def updateInteger(value, fielddef={}, memo=None):
1405 "returns value in little-endian binary format" 1406 try: 1407 value = int(value) 1408 except Exception: 1409 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) 1410 if not -2147483648 < value < 2147483647: 1411 raise DataOverflow("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value) 1412 return struct.pack('<i', int(value))
1413 -def retrieveLogical(bytes, fielddef={}, memo=None):
1414 "Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise" 1415 bytes = bytes.tostring() 1416 if bytes == '?': 1417 return None 1418 return bytes in ['t','T','y','Y']
1419 -def updateLogical(logical, fielddef={}, memo=None):
1420 "Returns 'T' if logical is True, 'F' otherwise" 1421 if type(logical) != bool: 1422 logical = convertToBool(logical) 1423 if type(logical) <> bool: 1424 raise DbfError('Value %s is not logical.' % logical) 1425 return logical and 'T' or 'F'
1426 -def retrieveMemo(bytes, fielddef, memo, typ):
1427 "Returns the block of data from a memo file" 1428 stringval = bytes.tostring() 1429 if stringval.strip(): 1430 block = int(stringval.strip()) 1431 else: 1432 block = 0 1433 return memo.get_memo(block, fielddef)
1434 -def updateMemo(string, fielddef, memo):
1435 "Writes string as a memo, returns the block number it was saved into" 1436 block = memo.put_memo(string) 1437 if block == 0: 1438 block = '' 1439 return "%*s" % (fielddef['length'], block)
1440 -def retrieveNumeric(bytes, fielddef, memo=None, typ=None):
1441 "Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise" 1442 string = bytes.tostring() 1443 if string[0:1] == '*': # value too big to store (Visual FoxPro idiocy) 1444 return None 1445 if not string.strip(): 1446 string = '0' 1447 if typ == 'default': 1448 if fielddef['decimals'] == 0: 1449 return int(string) 1450 else: 1451 return float(string) 1452 else: 1453 return typ(string.strip())
1454 -def updateNumeric(value, fielddef, memo=None):
1455 "returns value as ascii representation, rounding decimal portion as necessary" 1456 try: 1457 value = float(value) 1458 except Exception: 1459 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) 1460 decimalsize = fielddef['decimals'] 1461 if decimalsize: 1462 decimalsize += 1 1463 maxintegersize = fielddef['length']-decimalsize 1464 integersize = len("%.0f" % floor(value)) 1465 if integersize > maxintegersize: 1466 raise DataOverflow('Integer portion too big') 1467 return "%*.*f" % (fielddef['length'], fielddef['decimals'], value)
1468 -def retrieveVfpDateTime(bytes, fielddef={}, memo=None):
1469 """returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 1470 may not be accurate; BC dates are nulled.""" 1471 # two four-byte integers store the date and time. 1472 # millesecords are discarded from time 1473 time = retrieveInteger(bytes[4:]) 1474 microseconds = (time % 1000) * 1000 1475 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds 1476 hours = time // 3600 1477 mins = time % 3600 // 60 1478 secs = time % 3600 % 60 1479 time = Time(hours, mins, secs, microseconds) 1480 possible = retrieveInteger(bytes[:4]) 1481 possible -= VFPTIME 1482 possible = max(0, possible) 1483 date = Date.fromordinal(possible) 1484 return DateTime.combine(date, time)
1485 -def updateVfpDateTime(moment, fielddef={}, memo=None):
1486 """sets the date/time stored in moment 1487 moment must have fields year, month, day, hour, minute, second, microsecond""" 1488 bytes = [0] * 8 1489 hour = moment.hour 1490 minute = moment.minute 1491 second = moment.second 1492 millisecond = moment.microsecond // 1000 # convert from millionths to thousandths 1493 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond 1494 bytes[4:] = updateInteger(time) 1495 bytes[:4] = updateInteger(moment.toordinal() + VFPTIME) 1496 return ''.join(bytes)
1497 -def retrieveVfpMemo(bytes, fielddef, memo, typ=None):
1498 "Returns the block of data from a memo file" 1499 block = struct.unpack('<i', bytes)[0] 1500 return memo.get_memo(block, fielddef)
1501 -def updateVfpMemo(string, fielddef, memo):
1502 "Writes string as a memo, returns the block number it was saved into" 1503 block = memo.put_memo(string) 1504 return struct.pack('<i', block)
1505 -def addCharacter(format):
1506 if format[1] != '(' or format[-1] != ')': 1507 raise DbfError("Format for Character field creation is C(n), not %s" % format) 1508 length = int(format[2:-1]) 1509 if not 0 < length < 255: 1510 raise ValueError 1511 decimals = 0 1512 return length, decimals
1513 -def addDate(format):
1514 length = 8 1515 decimals = 0 1516 return length, decimals
1517 -def addLogical(format):
1518 length = 1 1519 decimals = 0 1520 return length, decimals
1521 -def addMemo(format):
1522 length = 10 1523 decimals = 0 1524 return length, decimals
1525 -def addNumeric(format):
1526 if format[1] != '(' or format[-1] != ')': 1527 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 1528 length, decimals = format[2:-1].split(',') 1529 length = int(length) 1530 decimals = int(decimals) 1531 if not 0 < length < 18: 1532 raise ValueError 1533 if decimals and not 0 < decimals <= length - 2: 1534 raise ValueError 1535 return length, decimals
1536 -def addVfpCurrency(format):
1537 length = 8 1538 decimals = 0 1539 return length, decimals
1540 -def addVfpDateTime(format):
1541 length = 8 1542 decimals = 8 1543 return length, decimals
1544 -def addVfpDouble(format):
1545 length = 8 1546 decimals = 0 1547 return length, decimals
1548 -def addVfpInteger(format):
1549 length = 4 1550 decimals = 0 1551 return length, decimals
1552 -def addVfpMemo(format):
1553 length = 4 1554 decimals = 0 1555 return length, decimals
1556 -def addVfpNumeric(format):
1557 if format[1] != '(' or format[-1] != ')': 1558 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format) 1559 length, decimals = format[2:-1].split(',') 1560 length = int(length) 1561 decimals = int(decimals) 1562 if not 0 < length < 21: 1563 raise ValueError 1564 if decimals and not 0 < decimals <= length - 2: 1565 raise ValueError 1566 return length, decimals
1567
1568 # Public classes 1569 -class DbfTable():
1570 """Provides a framework for dbf style tables.""" 1571 _version = 'basic memory table' 1572 _versionabbv = 'dbf' 1573 _fieldtypes = { 1574 'D' : { 'Type':'Date', 'Init':addDate, 'Blank':Date.today, 'Retrieve':retrieveDate, 'Update':updateDate, 'Class':None}, 1575 'L' : { 'Type':'Logical', 'Init':addLogical, 'Blank':bool, 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Class':None}, 1576 'M' : { 'Type':'Memo', 'Init':addMemo, 'Blank':str, 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Class':None} } 1577 _memoext = '' 1578 _memotypes = tuple('M', ) 1579 _memoClass = _DbfMemo 1580 _yesMemoMask = '' 1581 _noMemoMask = '' 1582 _fixed_fields = ('M','D','L') # always same length in table 1583 _variable_fields = tuple() # variable length in table 1584 _character_fields = tuple('M', ) # field representing character data 1585 _decimal_fields = tuple() # text-based numeric fields 1586 _numeric_fields = tuple() # fields representing a number 1587 _currency_fields = tuple() 1588 _dbfTableHeader = array('c', '\x00' * 32) 1589 _dbfTableHeader[0] = '\x00' # table type - none 1590 _dbfTableHeader[8:10] = array('c', packShortInt(33)) 1591 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 1592 _dbfTableHeader[29] = '\x00' # code page -- none, using plain ascii 1593 _dbfTableHeader = _dbfTableHeader.tostring() 1594 _dbfTableHeaderExtra = '' 1595 _supported_tables = [] 1596 _read_only = False 1597 _meta_only = False 1598 _use_deleted = True 1599 backup = False
1600 - class _DbfLists():
1601 "implements the weakref structure for DbfLists"
1602 - def __init__(yo):
1603 yo._lists = set()
1604 - def __iter__(yo):
1605 yo._lists = set([s for s in yo._lists if s() is not None]) 1606 return (s() for s in yo._lists if s() is not None)
1607 - def __len__(yo):
1608 yo._lists = set([s for s in yo._lists if s() is not None]) 1609 return len(yo._lists)
1610 - def add(yo, new_list):
1611 yo._lists.add(weakref.ref(new_list)) 1612 yo._lists = set([s for s in yo._lists if s() is not None])
1613 - class _Indexen():
1614 "implements the weakref structure for seperate indexes"
1615 - def __init__(yo):
1616 yo._indexen = set()
1617 - def __iter__(yo):
1618 yo._indexen = set([s for s in yo._indexen if s() is not None]) 1619 return (s() for s in yo._indexen if s() is not None)
1620 - def __len__(yo):
1621 yo._indexen = set([s for s in yo._indexen if s() is not None]) 1622 return len(yo._indexen)
1623 - def add(yo, new_list):
1624 yo._indexen.add(weakref.ref(new_list)) 1625 yo._indexen = set([s for s in yo._indexen if s() is not None])
1626 - class _MetaData(dict):
1627 blankrecord = None 1628 fields = None 1629 filename = None 1630 dfd = None 1631 memoname = None 1632 newmemofile = False 1633 memo = None 1634 mfd = None 1635 ignorememos = False 1636 memofields = None 1637 current = -1
1638 - class _TableHeader():
1639 - def __init__(yo, data):
1640 if len(data) != 32: 1641 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data)) 1642 yo._data = array('c', data + '\x0d')
1643 - def codepage(yo, cp=None):
1644 "get/set code page of table" 1645 if cp is None: 1646 return yo._data[29] 1647 else: 1648 cp, sd, ld = _codepage_lookup(cp) 1649 yo._data[29] = cp 1650 return cp
1651 @property
1652 - def data(yo):
1653 "main data structure" 1654 date = packDate(Date.today()) 1655 yo._data[1:4] = array('c', date) 1656 return yo._data.tostring()
1657 @data.setter
1658 - def data(yo, bytes):
1659 if len(bytes) < 32: 1660 raise DbfError("length for data of %d is less than 32" % len(bytes)) 1661 yo._data[:] = array('c', bytes)
1662 @property
1663 - def extra(yo):
1664 "extra dbf info (located after headers, before data records)" 1665 fieldblock = yo._data[32:] 1666 for i in range(len(fieldblock)//32+1): 1667 cr = i * 32 1668 if fieldblock[cr] == '\x0d': 1669 break 1670 else: 1671 raise DbfError("corrupt field structure") 1672 cr += 33 # skip past CR 1673 return yo._data[cr:].tostring()
1674 @extra.setter
1675 - def extra(yo, data):
1676 fieldblock = yo._data[32:] 1677 for i in range(len(fieldblock)//32+1): 1678 cr = i * 32 1679 if fieldblock[cr] == '\x0d': 1680 break 1681 else: 1682 raise DbfError("corrupt field structure") 1683 cr += 33 # skip past CR 1684 yo._data[cr:] = array('c', data) # extra 1685 yo._data[8:10] = array('c', packShortInt(len(yo._data))) # start
1686 @property
1687 - def field_count(yo):
1688 "number of fields (read-only)" 1689 fieldblock = yo._data[32:] 1690 for i in range(len(fieldblock)//32+1): 1691 cr = i * 32 1692 if fieldblock[cr] == '\x0d': 1693 break 1694 else: 1695 raise DbfError("corrupt field structure") 1696 return len(fieldblock[:cr]) // 32
1697 @property
1698 - def fields(yo):
1699 "field block structure" 1700 fieldblock = yo._data[32:] 1701 for i in range(len(fieldblock)//32+1): 1702 cr = i * 32 1703 if fieldblock[cr] == '\x0d': 1704 break 1705 else: 1706 raise DbfError("corrupt field structure") 1707 return fieldblock[:cr].tostring()
1708 @fields.setter
1709 - def fields(yo, block):
1710 fieldblock = yo._data[32:] 1711 for i in range(len(fieldblock)//32+1): 1712 cr = i * 32 1713 if fieldblock[cr] == '\x0d': 1714 break 1715 else: 1716 raise DbfError("corrupt field structure") 1717 cr += 32 # convert to indexing main structure 1718 fieldlen = len(block) 1719 if fieldlen % 32 != 0: 1720 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 1721 yo._data[32:cr] = array('c', block) # fields 1722 yo._data[8:10] = array('c', packShortInt(len(yo._data))) # start 1723 fieldlen = fieldlen // 32 1724 recordlen = 1 # deleted flag 1725 for i in range(fieldlen): 1726 recordlen += ord(block[i*32+16]) 1727 yo._data[10:12] = array('c', packShortInt(recordlen))
1728 @property
1729 - def record_count(yo):
1730 "number of records (maximum 16,777,215)" 1731 return unpackLongInt(yo._data[4:8].tostring())
1732 @record_count.setter
1733 - def record_count(yo, count):
1734 yo._data[4:8] = array('c', packLongInt(count))
1735 @property
1736 - def record_length(yo):
1737 "length of a record (read_only) (max of 65,535)" 1738 return unpackShortInt(yo._data[10:12].tostring())
1739 @property
1740 - def start(yo):
1741 "starting position of first record in file (must be within first 64K)" 1742 return unpackShortInt(yo._data[8:10].tostring())
1743 @start.setter
1744 - def start(yo, pos):
1745 yo._data[8:10] = array('c', packShortInt(pos))
1746 @property
1747 - def update(yo):
1748 "date of last table modification (read-only)" 1749 return unpackDate(yo._data[1:4].tostring())
1750 @property
1751 - def version(yo):
1752 "dbf version" 1753 return yo._data[0]
1754 @version.setter
1755 - def version(yo, ver):
1756 yo._data[0] = ver
1757 - class _Table():
1758 "implements the weakref table for records"
1759 - def __init__(yo, count, meta):
1760 yo._meta = meta 1761 yo._weakref_list = [weakref.ref(lambda x: None)] * count
1762 - def __getitem__(yo, index):
1763 maybe = yo._weakref_list[index]() 1764 if maybe is None: 1765 if index < 0: 1766 index += yo._meta.header.record_count 1767 size = yo._meta.header.record_length 1768 location = index * size + yo._meta.header.start 1769 yo._meta.dfd.seek(location) 1770 if yo._meta.dfd.tell() != location: 1771 raise ValueError("unable to seek to offset %d in file" % location) 1772 bytes = yo._meta.dfd.read(size) 1773 if not bytes: 1774 raise ValueError("unable to read record data from %s at location %d" % (yo._meta.filename, location)) 1775 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True) 1776 yo._weakref_list[index] = weakref.ref(maybe) 1777 return maybe
1778 - def append(yo, record):
1779 yo._weakref_list.append(weakref.ref(record))
1780 - def clear(yo):
1781 yo._weakref_list[:] = []
1782 - def pop(yo):
1783 return yo._weakref_list.pop()
1784 - class DbfIterator():
1785 "returns records using current index"
1786 - def __init__(yo, table):
1787 yo._table = table 1788 yo._index = -1 1789 yo._more_records = True
1790 - def __iter__(yo):
1791 return yo
1792 - def next(yo):
1793 while yo._more_records: 1794 yo._index += 1 1795 if yo._index >= len(yo._table): 1796 yo._more_records = False 1797 continue 1798 record = yo._table[yo._index] 1799 if not yo._table.use_deleted and record.has_been_deleted: 1800 continue 1801 return record 1802 else: 1803 raise StopIteration
1804 - def _buildHeaderFields(yo):
1805 "constructs fieldblock for disk table" 1806 fieldblock = array('c', '') 1807 memo = False 1808 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask)) 1809 for field in yo._meta.fields: 1810 if yo._meta.fields.count(field) > 1: 1811 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)") 1812 fielddef = array('c', '\x00' * 32) 1813 fielddef[:11] = array('c', packStr(field)) 1814 fielddef[11] = yo._meta[field]['type'] 1815 fielddef[12:16] = array('c', packLongInt(yo._meta[field]['start'])) 1816 fielddef[16] = chr(yo._meta[field]['length']) 1817 fielddef[17] = chr(yo._meta[field]['decimals']) 1818 fielddef[18] = chr(yo._meta[field]['flags']) 1819 fieldblock.extend(fielddef) 1820 if yo._meta[field]['type'] in yo._meta.memotypes: 1821 memo = True 1822 yo._meta.header.fields = fieldblock.tostring() 1823 if memo: 1824 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask)) 1825 if yo._meta.memo is None: 1826 yo._meta.memo = yo._memoClass(yo._meta)
1827 - def _checkMemoIntegrity(yo):
1828 "dBase III specific" 1829 if yo._meta.header.version == '\x83': 1830 try: 1831 yo._meta.memo = yo._memoClass(yo._meta) 1832 except: 1833 yo._meta.dfd.close() 1834 yo._meta.dfd = None 1835 raise 1836 if not yo._meta.ignorememos: 1837 for field in yo._meta.fields: 1838 if yo._meta[field]['type'] in yo._memotypes: 1839 if yo._meta.header.version != '\x83': 1840 yo._meta.dfd.close() 1841 yo._meta.dfd = None 1842 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 1843 elif not os.path.exists(yo._meta.memoname): 1844 yo._meta.dfd.close() 1845 yo._meta.dfd = None 1846 raise DbfError("Table structure corrupt: memo fields exist without memo file") 1847 break
1848 - def _initializeFields(yo):
1849 "builds the FieldList of names, types, and descriptions from the disk file" 1850 yo._meta.fields[:] = [] 1851 offset = 1 1852 fieldsdef = yo._meta.header.fields 1853 if len(fieldsdef) % 32 != 0: 1854 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 1855 if len(fieldsdef) // 32 != yo.field_count: 1856 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 1857 for i in range(yo.field_count): 1858 fieldblock = fieldsdef[i*32:(i+1)*32] 1859 name = unpackStr(fieldblock[:11]) 1860 type = fieldblock[11] 1861 if not type in yo._meta.fieldtypes: 1862 raise DbfError("Unknown field type: %s" % type) 1863 start = offset 1864 length = ord(fieldblock[16]) 1865 offset += length 1866 end = start + length 1867 decimals = ord(fieldblock[17]) 1868 flags = ord(fieldblock[18]) 1869 if name in yo._meta.fields: 1870 raise DbfError('Duplicate field name found: %s' % name) 1871 yo._meta.fields.append(name) 1872 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1873 - def _fieldLayout(yo, i):
1874 "Returns field information Name Type(Length[,Decimals])" 1875 name = yo._meta.fields[i] 1876 type = yo._meta[name]['type'] 1877 length = yo._meta[name]['length'] 1878 decimals = yo._meta[name]['decimals'] 1879 if type in yo._decimal_fields: 1880 description = "%s %s(%d,%d)" % (name, type, length, decimals) 1881 elif type in yo._fixed_fields: 1882 description = "%s %s" % (name, type) 1883 else: 1884 description = "%s %s(%d)" % (name, type, length) 1885 return description
1886 - def _loadtable(yo):
1887 "loads the records from disk to memory" 1888 if yo._meta_only: 1889 raise DbfError("%s has been closed, records are unavailable" % yo.filename) 1890 dfd = yo._meta.dfd 1891 header = yo._meta.header 1892 dfd.seek(header.start) 1893 allrecords = dfd.read() # kludge to get around mysterious errno 0 problems 1894 dfd.seek(0) 1895 length = header.record_length 1896 for i in range(header.record_count): 1897 record_data = allrecords[length*i:length*i+length] 1898 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True)) 1899 dfd.seek(0)
1900 - def _list_fields(yo, specs, sep=','):
1901 if specs is None: 1902 specs = yo.field_names 1903 elif isinstance(specs, str): 1904 specs = specs.split(sep) 1905 else: 1906 specs = list(specs) 1907 specs = [s.strip() for s in specs] 1908 return specs
1909 - def _update_disk(yo, headeronly=False):
1910 "synchronizes the disk file with current data" 1911 if yo._meta.inmemory: 1912 return 1913 fd = yo._meta.dfd 1914 fd.seek(0) 1915 fd.write(yo._meta.header.data) 1916 if not headeronly: 1917 for record in yo._table: 1918 record._update_disk() 1919 fd.flush() 1920 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length) 1921 if 'db3' in yo._versionabbv: 1922 fd.seek(0, os.SEEK_END) 1923 fd.write('\x1a') # required for dBase III 1924 fd.flush() 1925 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length + 1)
1926
1927 - def __contains__(yo, key):
1928 return key in yo.field_names
1929 - def __enter__(yo):
1930 return yo
1931 - def __exit__(yo, *exc_info):
1932 yo.close()
1933 - def __getattr__(yo, name):
1934 if name in ('_table'): 1935 if yo._meta.ondisk: 1936 yo._table = yo._Table(len(yo), yo._meta) 1937 else: 1938 yo._table = [] 1939 yo._loadtable() 1940 return object.__getattribute__(yo, name)
1941 - def __getitem__(yo, value):
1942 if type(value) == int: 1943 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count: 1944 raise IndexError("Record %d is not in table." % value) 1945 return yo._table[value] 1946 elif type(value) == slice: 1947 sequence = List(desc='%s --> %s' % (yo.filename, value), field_names=yo.field_names) 1948 yo._dbflists.add(sequence) 1949 for index in range(len(yo))[value]: 1950 record = yo._table[index] 1951 if yo.use_deleted is True or not record.has_been_deleted: 1952 sequence.append(record) 1953 return sequence 1954 else: 1955 raise TypeError('type <%s> not valid for indexing' % type(value))
1956 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False, 1957 read_only=False, keep_memos=False, meta_only=False, codepage=None, 1958 numbers='default', strings=str, currency=Decimal):
1959 """open/create dbf file 1960 filename should include path if needed 1961 field_specs can be either a ;-delimited string or a list of strings 1962 memo_size is always 512 for db3 memos 1963 ignore_memos is useful if the memo file is missing or corrupt 1964 read_only will load records into memory, then close the disk file 1965 keep_memos will also load any memo fields into memory 1966 meta_only will ignore all records, keeping only basic table information 1967 codepage will override whatever is set in the table itself""" 1968 if filename[0] == filename[-1] == ':': 1969 if field_specs is None: 1970 raise DbfError("field list must be specified for memory tables") 1971 elif type(yo) is DbfTable: 1972 raise DbfError("only memory tables supported") 1973 yo._dbflists = yo._DbfLists() 1974 yo._indexen = yo._Indexen() 1975 yo._meta = meta = yo._MetaData() 1976 for datatypes, classtype in ( 1977 (yo._character_fields, strings), 1978 (yo._numeric_fields, numbers), 1979 (yo._currency_fields, currency), 1980 ): 1981 for datatype in datatypes: 1982 yo._fieldtypes[datatype]['Class'] = classtype 1983 meta.numbers = numbers 1984 meta.strings = strings 1985 meta.currency = currency 1986 meta.table = weakref.ref(yo) 1987 meta.filename = filename 1988 meta.fields = [] 1989 meta.fieldtypes = yo._fieldtypes 1990 meta.fixed_fields = yo._fixed_fields 1991 meta.variable_fields = yo._variable_fields 1992 meta.character_fields = yo._character_fields 1993 meta.decimal_fields = yo._decimal_fields 1994 meta.numeric_fields = yo._numeric_fields 1995 meta.memotypes = yo._memotypes 1996 meta.ignorememos = ignore_memos 1997 meta.memo_size = memo_size 1998 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 1999 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 2000 meta.return_ascii = return_ascii 2001 meta.header = header = yo._TableHeader(yo._dbfTableHeader) 2002 header.extra = yo._dbfTableHeaderExtra 2003 header.data #force update of date 2004 if filename[0] == filename[-1] == ':': 2005 yo._table = [] 2006 meta.ondisk = False 2007 meta.inmemory = True 2008 meta.memoname = filename 2009 else: 2010 base, ext = os.path.splitext(filename) 2011 if ext == '': 2012 meta.filename = base + '.dbf' 2013 meta.memoname = base + yo._memoext 2014 meta.ondisk = True 2015 meta.inmemory = False 2016 if field_specs: 2017 if meta.ondisk: 2018 meta.dfd = open(meta.filename, 'w+b') 2019 meta.newmemofile = True 2020 yo.add_fields(field_specs) 2021 header.codepage(codepage or default_codepage) 2022 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 2023 meta.decoder = codecs.getdecoder(sd) 2024 meta.encoder = codecs.getencoder(sd) 2025 return 2026 try: 2027 dfd = meta.dfd = open(meta.filename, 'r+b') 2028 except IOError, e: 2029 raise DbfError(str(e)) 2030 dfd.seek(0) 2031 meta.header = header = yo._TableHeader(dfd.read(32)) 2032 if not header.version in yo._supported_tables: 2033 dfd.close() 2034 dfd = None 2035 raise DbfError( 2036 "%s does not support %s [%x]" % 2037 (yo._version, 2038 version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), 2039 ord(meta.header.version))) 2040 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 2041 yo._meta.decoder = codecs.getdecoder(sd) 2042 yo._meta.encoder = codecs.getencoder(sd) 2043 fieldblock = dfd.read(header.start - 32) 2044 for i in range(len(fieldblock)//32+1): 2045 fieldend = i * 32 2046 if fieldblock[fieldend] == '\x0d': 2047 break 2048 else: 2049 raise DbfError("corrupt field structure in header") 2050 if len(fieldblock[:fieldend]) % 32 != 0: 2051 raise DbfError("corrupt field structure in header") 2052 header.fields = fieldblock[:fieldend] 2053 header.extra = fieldblock[fieldend+1:] # skip trailing \r 2054 yo._initializeFields() 2055 yo._checkMemoIntegrity() 2056 meta.current = -1 2057 if len(yo) > 0: 2058 meta.current = 0 2059 dfd.seek(0) 2060 if meta_only: 2061 yo.close(keep_table=False, keep_memos=False) 2062 elif read_only: 2063 yo.close(keep_table=True, keep_memos=keep_memos) 2064 if codepage is not None: 2065 cp, sd, ld = _codepage_lookup(codepage) 2066 yo._meta.decoder = codecs.getdecoder(sd) 2067 yo._meta.encoder = codecs.getencoder(sd)
2068
2069 - def __iter__(yo):
2070 return yo.DbfIterator(yo)
2071 - def __len__(yo):
2072 return yo._meta.header.record_count
2073 - def __nonzero__(yo):
2074 return yo._meta.header.record_count != 0
2075 - def __repr__(yo):
2076 if yo._read_only: 2077 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename 2078 elif yo._meta_only: 2079 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename 2080 else: 2081 return __name__ + ".Table('%s')" % yo._meta.filename
2082 - def __str__(yo):
2083 if yo._read_only: 2084 status = "read-only" 2085 elif yo._meta_only: 2086 status = "meta-only" 2087 else: 2088 status = "read/write" 2089 str = """ 2090 Table: %s 2091 Type: %s 2092 Codepage: %s 2093 Status: %s 2094 Last updated: %s 2095 Record count: %d 2096 Field count: %d 2097 Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version, 2098 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status, 2099 yo.last_update, len(yo), yo.field_count, yo.record_length) 2100 str += "\n --Fields--\n" 2101 for i in range(len(yo._meta.fields)): 2102 str += "%11d) %s\n" % (i, yo._fieldLayout(i)) 2103 return str
2104 @property
2105 - def codepage(yo):
2106 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
2107 @codepage.setter
2108 - def codepage(yo, cp):
2109 cp = code_pages[yo._meta.header.codepage(cp)][0] 2110 yo._meta.decoder = codecs.getdecoder(cp) 2111 yo._meta.encoder = codecs.getencoder(cp) 2112 yo._update_disk(headeronly=True)
2113 @property
2114 - def field_count(yo):
2115 "the number of fields in the table" 2116 return yo._meta.header.field_count
2117 @property
2118 - def field_names(yo):
2119 "a list of the fields in the table" 2120 return yo._meta.fields[:]
2121 @property
2122 - def filename(yo):
2123 "table's file name, including path (if specified on open)" 2124 return yo._meta.filename
2125 @property
2126 - def last_update(yo):
2127 "date of last update" 2128 return yo._meta.header.update
2129 @property
2130 - def memoname(yo):
2131 "table's memo name (if path included in filename on open)" 2132 return yo._meta.memoname
2133 @property
2134 - def record_length(yo):
2135 "number of bytes in a record" 2136 return yo._meta.header.record_length
2137 @property
2138 - def record_number(yo):
2139 "index number of the current record" 2140 return yo._meta.current
2141 @property
2142 - def supported_tables(yo):
2143 "allowable table types" 2144 return yo._supported_tables
2145 @property
2146 - def use_deleted(yo):
2147 "process or ignore deleted records" 2148 return yo._use_deleted
2149 @use_deleted.setter
2150 - def use_deleted(yo, new_setting):
2151 yo._use_deleted = new_setting
2152 @property
2153 - def version(yo):
2154 "returns the dbf type of the table" 2155 return yo._version
2156 - def add_fields(yo, field_specs):
2157 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 2158 backup table is created with _backup appended to name 2159 then modifies current structure""" 2160 all_records = [record for record in yo] 2161 if yo: 2162 yo.create_backup() 2163 yo._meta.blankrecord = None 2164 meta = yo._meta 2165 offset = meta.header.record_length 2166 fields = yo._list_fields(field_specs, sep=';') 2167 for field in fields: 2168 try: 2169 name, format = field.split() 2170 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum(): 2171 raise DbfError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name) 2172 name = name.lower() 2173 if name in meta.fields: 2174 raise DbfError("Field '%s' already exists" % name) 2175 field_type = format[0].upper() 2176 if len(name) > 10: 2177 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 2178 if not field_type in meta.fieldtypes.keys(): 2179 raise DbfError("Unknown field type: %s" % field_type) 2180 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format) 2181 except ValueError: 2182 raise DbfError("invalid field specifier: %s (multiple fields should be separated with ';'" % field) 2183 start = offset 2184 end = offset + length 2185 offset = end 2186 meta.fields.append(name) 2187 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0} 2188 if meta[name]['type'] in yo._memotypes and meta.memo is None: 2189 meta.memo = yo._memoClass(meta) 2190 for record in yo: 2191 record[name] = meta.fieldtypes[field_type]['Blank']() 2192 yo._buildHeaderFields() 2193 yo._update_disk()
2194 - def append(yo, kamikaze='', drop=False, multiple=1):
2195 "adds <multiple> blank records, and fills fields with dict/tuple values if present" 2196 if not yo.field_count: 2197 raise DbfError("No fields defined, cannot append") 2198 empty_table = len(yo) == 0 2199 dictdata = False 2200 tupledata = False 2201 if not isinstance(kamikaze, _DbfRecord): 2202 if isinstance(kamikaze, dict): 2203 dictdata = kamikaze 2204 kamikaze = '' 2205 elif isinstance(kamikaze, tuple): 2206 tupledata = kamikaze 2207 kamikaze = '' 2208 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze) 2209 yo._table.append(newrecord) 2210 yo._meta.header.record_count += 1 2211 try: 2212 if dictdata: 2213 newrecord.gather_fields(dictdata, drop=drop) 2214 elif tupledata: 2215 for index, item in enumerate(tupledata): 2216 newrecord[index] = item 2217 elif kamikaze == str: 2218 for field in yo._meta.memofields: 2219 newrecord[field] = '' 2220 elif kamikaze: 2221 for field in yo._meta.memofields: 2222 newrecord[field] = kamikaze[field] 2223 newrecord.write_record() 2224 except Exception: 2225 yo._table.pop() # discard failed record 2226 yo._meta.header.record_count = yo._meta.header.record_count - 1 2227 yo._update_disk() 2228 raise 2229 multiple -= 1 2230 if multiple: 2231 data = newrecord._data 2232 single = yo._meta.header.record_count 2233 total = single + multiple 2234 while single < total: 2235 multi_record = _DbfRecord(single, yo._meta, kamikaze=data) 2236 yo._table.append(multi_record) 2237 for field in yo._meta.memofields: 2238 multi_record[field] = newrecord[field] 2239 single += 1 2240 multi_record.write_record() 2241 yo._meta.header.record_count = total # += multiple 2242 yo._meta.current = yo._meta.header.record_count - 1 2243 newrecord = multi_record 2244 yo._update_disk(headeronly=True) 2245 if empty_table: 2246 yo._meta.current = 0 2247 return newrecord
2248 - def bof(yo, _move=False):
2249 "moves record pointer to previous usable record; returns True if no more usable records" 2250 current = yo._meta.current 2251 try: 2252 while yo._meta.current > 0: 2253 yo._meta.current -= 1 2254 if yo.use_deleted or not yo.current().has_been_deleted: 2255 break 2256 else: 2257 yo._meta.current = -1 2258 return True 2259 return False 2260 finally: 2261 if not _move: 2262 yo._meta.current = current
2263 - def bottom(yo, get_record=False):
2264 """sets record pointer to bottom of table 2265 if get_record, seeks to and returns last (non-deleted) record 2266 DbfError if table is empty 2267 Bof if all records deleted and use_deleted is False""" 2268 yo._meta.current = yo._meta.header.record_count 2269 if get_record: 2270 try: 2271 return yo.prev() 2272 except Bof: 2273 yo._meta.current = yo._meta.header.record_count 2274 raise Eof()
2275 - def close(yo, keep_table=False, keep_memos=False):
2276 """closes disk files 2277 ensures table data is available if keep_table 2278 ensures memo data is available if keep_memos""" 2279 yo._meta.inmemory = True 2280 if keep_table: 2281 replacement_table = [] 2282 for record in yo._table: 2283 replacement_table.append(record) 2284 yo._table = replacement_table 2285 else: 2286 if yo._meta.ondisk: 2287 yo._meta_only = True 2288 if yo._meta.mfd is not None: 2289 if not keep_memos: 2290 yo._meta.ignorememos = True 2291 else: 2292 memo_fields = [] 2293 for field in yo.field_names: 2294 if yo.is_memotype(field): 2295 memo_fields.append(field) 2296 for record in yo: 2297 for field in memo_fields: 2298 record[field] = record[field] 2299 yo._meta.mfd.close() 2300 yo._meta.mfd = None 2301 if yo._meta.ondisk: 2302 yo._meta.dfd.close() 2303 yo._meta.dfd = None 2304 if keep_table: 2305 yo._read_only = True 2306 yo._meta.ondisk = False
2307 - def create_backup(yo, new_name=None, overwrite=False):
2308 "creates a backup table -- ignored if memory table" 2309 if yo.filename[0] == yo.filename[-1] == ':': 2310 return 2311 if new_name is None: 2312 upper = yo.filename.isupper() 2313 name, ext = os.path.splitext(os.path.split(yo.filename)[1]) 2314 extra = '_BACKUP' if upper else '_backup' 2315 new_name = os.path.join(temp_dir, name + extra + ext) 2316 else: 2317 overwrite = True 2318 if overwrite or not yo.backup: 2319 bkup = open(new_name, 'wb') 2320 try: 2321 yo._meta.dfd.seek(0) 2322 copyfileobj(yo._meta.dfd, bkup) 2323 yo.backup = new_name 2324 finally: 2325 bkup.close()
2326 - def create_index(yo, key):
2327 return Index(yo, key)
2328 - def current(yo, index=False):
2329 "returns current logical record, or its index" 2330 if yo._meta.current < 0: 2331 raise Bof() 2332 elif yo._meta.current >= yo._meta.header.record_count: 2333 raise Eof() 2334 if index: 2335 return yo._meta.current 2336 return yo._table[yo._meta.current]
2337 - def delete_fields(yo, doomed):
2338 """removes field(s) from the table 2339 creates backup files with _backup appended to the file name, 2340 then modifies current structure""" 2341 doomed = yo._list_fields(doomed) 2342 for victim in doomed: 2343 if victim not in yo._meta.fields: 2344 raise DbfError("field %s not in table -- delete aborted" % victim) 2345 all_records = [record for record in yo] 2346 yo.create_backup() 2347 for victim in doomed: 2348 yo._meta.fields.pop(yo._meta.fields.index(victim)) 2349 start = yo._meta[victim]['start'] 2350 end = yo._meta[victim]['end'] 2351 for record in yo: 2352 record._data = record._data[:start] + record._data[end:] 2353 for field in yo._meta.fields: 2354 if yo._meta[field]['start'] == end: 2355 end = yo._meta[field]['end'] 2356 yo._meta[field]['start'] = start 2357 yo._meta[field]['end'] = start + yo._meta[field]['length'] 2358 start = yo._meta[field]['end'] 2359 yo._buildHeaderFields() 2360 yo._update_disk()
2361 - def eof(yo, _move=False):
2362 "moves record pointer to next usable record; returns True if no more usable records" 2363 current = yo._meta.current 2364 try: 2365 while yo._meta.current < yo._meta.header.record_count - 1: 2366 yo._meta.current += 1 2367 if yo.use_deleted or not yo.current().has_been_deleted: 2368 break 2369 else: 2370 yo._meta.current = yo._meta.header.record_count 2371 return True 2372 return False 2373 finally: 2374 if not _move: 2375 yo._meta.current = current
2376 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
2377 """writes the table using CSV or tab-delimited format, using the filename 2378 given if specified, otherwise the table name""" 2379 if filename is not None: 2380 path, filename = os.path.split(filename) 2381 else: 2382 path, filename = os.path.split(yo.filename) 2383 filename = os.path.join(path, filename) 2384 field_specs = yo._list_fields(field_specs) 2385 if records is None: 2386 records = yo 2387 format = format.lower() 2388 if format not in ('csv', 'tab', 'fixed'): 2389 raise DbfError("export format: csv, tab, or fixed -- not %s" % format) 2390 if format == 'fixed': 2391 format = 'txt' 2392 base, ext = os.path.splitext(filename) 2393 if ext.lower() in ('', '.dbf'): 2394 filename = base + "." + format[:3] 2395 fd = open(filename, 'w') 2396 try: 2397 if format == 'csv': 2398 csvfile = csv.writer(fd, dialect='dbf') 2399 if header: 2400 csvfile.writerow(field_specs) 2401 for record in records: 2402 fields = [] 2403 for fieldname in field_specs: 2404 fields.append(record[fieldname]) 2405 csvfile.writerow(fields) 2406 elif format == 'tab': 2407 if header: 2408 fd.write('\t'.join(field_specs) + '\n') 2409 for record in records: 2410 fields = [] 2411 for fieldname in field_specs: 2412 fields.append(str(record[fieldname])) 2413 fd.write('\t'.join(fields) + '\n') 2414 else: # format == 'fixed' 2415 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w') 2416 header.write("%-15s Size\n" % "Field Name") 2417 header.write("%-15s ----\n" % ("-" * 15)) 2418 sizes = [] 2419 for field in field_specs: 2420 size = yo.size(field)[0] 2421 sizes.append(size) 2422 header.write("%-15s %3d\n" % (field, size)) 2423 header.write('\nTotal Records in file: %d\n' % len(records)) 2424 header.close() 2425 for record in records: 2426 fields = [] 2427 for i, field_name in enumerate(field_specs): 2428 fields.append("%-*s" % (sizes[i], record[field_name])) 2429 fd.write(''.join(fields) + '\n') 2430 finally: 2431 fd.close() 2432 fd = None 2433 return len(records)
2434 - def find(yo, command):
2435 "uses exec to perform queries on the table" 2436 possible = List(desc="%s --> %s" % (yo.filename, command), field_names=yo.field_names) 2437 yo._dbflists.add(possible) 2438 result = {} 2439 select = 'result["keep"] = %s' % command 2440 g = {} 2441 use_deleted = yo.use_deleted 2442 for record in yo: 2443 result['keep'] = False 2444 g['result'] = result 2445 exec select in g, record 2446 if result['keep']: 2447 possible.append(record) 2448 record.write_record() 2449 return possible
2450 - def get_record(yo, recno):
2451 "returns record at physical_index[recno]" 2452 return yo._table[recno]
2453 - def goto(yo, criteria):
2454 """changes the record pointer to the first matching (non-deleted) record 2455 criteria should be either a tuple of tuple(value, field, func) triples, 2456 or an integer to go to""" 2457 if isinstance(criteria, int): 2458 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count: 2459 raise IndexError("Record %d does not exist" % criteria) 2460 if criteria < 0: 2461 criteria += yo._meta.header.record_count 2462 yo._meta.current = criteria 2463 return yo.current() 2464 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop]) 2465 specs = tuple([(field, func) for value, field, func in criteria]) 2466 match = tuple([value for value, field, func in criteria]) 2467 current = yo.current(index=True) 2468 matchlen = len(match) 2469 while not yo.Eof(): 2470 record = yo.current() 2471 results = record(*specs) 2472 if results == match: 2473 return record 2474 return yo.goto(current)
2475 - def is_decimal(yo, name):
2476 "returns True if name is a variable-length field type" 2477 return yo._meta[name]['type'] in yo._decimal_fields
2478 - def is_memotype(yo, name):
2479 "returns True if name is a memo type field" 2480 return yo._meta[name]['type'] in yo._memotypes
2481 - def new(yo, filename, field_specs=None, codepage=None):
2482 "returns a new table of the same type" 2483 if field_specs is None: 2484 field_specs = yo.structure() 2485 if not (filename[0] == filename[-1] == ':'): 2486 path, name = os.path.split(filename) 2487 if path == "": 2488 filename = os.path.join(os.path.split(yo.filename)[0], filename) 2489 elif name == "": 2490 filename = os.path.join(path, os.path.split(yo.filename)[1]) 2491 if codepage is None: 2492 codepage = yo._meta.header.codepage()[0] 2493 return yo.__class__(filename, field_specs, codepage=codepage)
2494 - def next(yo):
2495 "set record pointer to next (non-deleted) record, and return it" 2496 if yo.eof(_move=True): 2497 raise Eof() 2498 return yo.current()
2499 - def open(yo):
2500 meta = yo._meta 2501 meta.inmemory = False 2502 meta.ondisk = True 2503 yo._read_only = False 2504 yo._meta_only = False 2505 if '_table' in dir(yo): 2506 del yo._table 2507 dfd = meta.dfd = open(meta.filename, 'r+b') 2508 dfd.seek(0) 2509 meta.header = header = yo._TableHeader(dfd.read(32)) 2510 if not header.version in yo._supported_tables: 2511 dfd.close() 2512 dfd = None 2513 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version))) 2514 cp, sd, ld = _codepage_lookup(meta.header.codepage()) 2515 meta.decoder = codecs.getdecoder(sd) 2516 meta.encoder = codecs.getencoder(sd) 2517 fieldblock = dfd.read(header.start - 32) 2518 for i in range(len(fieldblock)//32+1): 2519 fieldend = i * 32 2520 if fieldblock[fieldend] == '\x0d': 2521 break 2522 else: 2523 raise DbfError("corrupt field structure in header") 2524 if len(fieldblock[:fieldend]) % 32 != 0: 2525 raise DbfError("corrupt field structure in header") 2526 header.fields = fieldblock[:fieldend] 2527 header.extra = fieldblock[fieldend+1:] # skip trailing \r 2528 yo._initializeFields() 2529 yo._checkMemoIntegrity() 2530 meta.current = -1 2531 if len(yo) > 0: 2532 meta.current = 0 2533 dfd.seek(0)
2534
2535 - def pack(yo, _pack=True):
2536 "physically removes all deleted records" 2537 for dbfindex in yo._indexen: 2538 dbfindex.clear() 2539 newtable = [] 2540 index = 0 2541 offset = 0 # +1 for each purged record 2542 for record in yo._table: 2543 found = False 2544 if record.has_been_deleted and _pack: 2545 for dbflist in yo._dbflists: 2546 if dbflist._purge(record, record.record_number - offset, 1): 2547 found = True 2548 record._recnum = -1 2549 else: 2550 record._recnum = index 2551 newtable.append(record) 2552 index += 1 2553 if found: 2554 offset += 1 2555 found = False 2556 yo._table.clear() 2557 for record in newtable: 2558 yo._table.append(record) 2559 yo._meta.header.record_count = index 2560 yo._current = -1 2561 yo._update_disk() 2562 yo.reindex()
2563 - def prev(yo):
2564 "set record pointer to previous (non-deleted) record, and return it" 2565 if yo.bof(_move=True): 2566 raise Bof 2567 return yo.current()
2568 - def query(yo, sql_command=None, python=None):
2569 "deprecated: use .find or .sql" 2570 if sql_command: 2571 return yo.sql(sql_command) 2572 elif python: 2573 return yo.find(python) 2574 raise DbfError("query: python parameter must be specified")
2575 - def reindex(yo):
2576 for dbfindex in yo._indexen: 2577 dbfindex.reindex()
2578 - def rename_field(yo, oldname, newname):
2579 "renames an existing field" 2580 if yo: 2581 yo.create_backup() 2582 if not oldname in yo._meta.fields: 2583 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname) 2584 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum(): 2585 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 2586 newname = newname.lower() 2587 if newname in yo._meta.fields: 2588 raise DbfError("field --%s-- already exists" % newname) 2589 if len(newname) > 10: 2590 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 2591 yo._meta[newname] = yo._meta[oldname] 2592 yo._meta.fields[yo._meta.fields.index(oldname)] = newname 2593 yo._buildHeaderFields() 2594 yo._update_disk(headeronly=True)
2595 - def resize_field(yo, doomed, new_size):
2596 """resizes field (C only at this time) 2597 creates backup file, then modifies current structure""" 2598 if not 0 < new_size < 256: 2599 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)") 2600 doomed = yo._list_fields(doomed) 2601 for victim in doomed: 2602 if victim not in yo._meta.fields: 2603 raise DbfError("field %s not in table -- resize aborted" % victim) 2604 all_records = [record for record in yo] 2605 yo.create_backup() 2606 #pprint(yo._meta['c_unit']) 2607 #print repr(yo[0].c_unit) 2608 for victim in doomed: 2609 delta = new_size - yo._meta[victim]['length'] 2610 start = yo._meta[victim]['start'] 2611 end = yo._meta[victim]['end'] 2612 eff_end = min(yo._meta[victim]['length'], new_size) 2613 yo._meta[victim]['length'] = new_size 2614 yo._meta[victim]['end'] = start + new_size 2615 blank = array('c', ' ' * new_size) 2616 #print "\nstart=%s\nend=%s\neff_end=%s\nnew_size=%s\n\n" % (start, end, eff_end, new_size) 2617 for record in yo: 2618 new_data = blank[:] 2619 new_data[:eff_end] = record._data[start:start+eff_end] 2620 record._data = record._data[:start] + new_data + record._data[end:] 2621 for field in yo._meta.fields: 2622 if yo._meta[field]['start'] == end: 2623 end = yo._meta[field]['end'] 2624 yo._meta[field]['start'] += delta 2625 yo._meta[field]['end'] += delta #+ yo._meta[field]['length'] 2626 start = yo._meta[field]['end'] 2627 #pprint(yo._meta['c_unit']) 2628 #print repr(yo[0].c_unit) 2629 #raw_input('...') 2630 yo._buildHeaderFields() 2631 yo._update_disk()
2632 - def size(yo, field):
2633 "returns size of field as a tuple of (length, decimals)" 2634 if field in yo: 2635 return (yo._meta[field]['length'], yo._meta[field]['decimals']) 2636 raise DbfError("%s is not a field in %s" % (field, yo.filename))
2637 - def sql(yo, command):
2638 "passes calls through to module level sql function" 2639 return sql(yo, command)
2640 - def structure(yo, fields=None):
2641 """return list of fields suitable for creating same table layout 2642 @param fields: list of fields or None for all fields""" 2643 field_specs = [] 2644 fields = yo._list_fields(fields) 2645 try: 2646 for name in fields: 2647 field_specs.append(yo._fieldLayout(yo.field_names.index(name))) 2648 except ValueError: 2649 raise DbfError("field --%s-- does not exist" % name) 2650 return field_specs
2651 - def top(yo, get_record=False):
2652 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record 2653 DbfError if table is empty 2654 Eof if all records are deleted and use_deleted is False""" 2655 yo._meta.current = -1 2656 if get_record: 2657 try: 2658 return yo.next() 2659 except Eof: 2660 yo._meta.current = -1 2661 raise Bof()
2662 - def type(yo, field):
2663 "returns type of field" 2664 if field in yo: 2665 return yo._meta[field]['type'] 2666 raise DbfError("%s is not a field in %s" % (field, yo.filename))
2667 - def zap(yo, areyousure=False):
2668 """removes all records from table -- this cannot be undone! 2669 areyousure must be True, else error is raised""" 2670 if areyousure: 2671 if yo._meta.inmemory: 2672 yo._table = [] 2673 else: 2674 yo._table.clear() 2675 yo._meta.header.record_count = 0 2676 yo._current = -1 2677 yo._update_disk() 2678 else: 2679 raise DbfError("You must say you are sure to wipe the table")
2680 -class Db3Table(DbfTable):
2681 """Provides an interface for working with dBase III tables.""" 2682 _version = 'dBase III Plus' 2683 _versionabbv = 'db3' 2684 _fieldtypes = { 2685 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None}, 2686 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None}, 2687 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None}, 2688 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None}, 2689 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addNumeric, 'Class':None} } 2690 _memoext = '.dbt' 2691 _memotypes = ('M',) 2692 _memoClass = _Db3Memo 2693 _yesMemoMask = '\x80' 2694 _noMemoMask = '\x7f' 2695 _fixed_fields = ('D','L','M') 2696 _variable_fields = ('C','N') 2697 _character_fields = ('C','M') 2698 _decimal_fields = ('N',) 2699 _numeric_fields = ('N',) 2700 _currency_fields = tuple() 2701 _dbfTableHeader = array('c', '\x00' * 32) 2702 _dbfTableHeader[0] = '\x03' # version - dBase III w/o memo's 2703 _dbfTableHeader[8:10] = array('c', packShortInt(33)) 2704 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 2705 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 2706 _dbfTableHeader = _dbfTableHeader.tostring() 2707 _dbfTableHeaderExtra = '' 2708 _supported_tables = ['\x03', '\x83'] 2709 _read_only = False 2710 _meta_only = False 2711 _use_deleted = True
2712 - def _checkMemoIntegrity(yo):
2713 "dBase III specific" 2714 if yo._meta.header.version == '\x83': 2715 try: 2716 yo._meta.memo = yo._memoClass(yo._meta) 2717 except: 2718 yo._meta.dfd.close() 2719 yo._meta.dfd = None 2720 raise 2721 if not yo._meta.ignorememos: 2722 for field in yo._meta.fields: 2723 if yo._meta[field]['type'] in yo._memotypes: 2724 if yo._meta.header.version != '\x83': 2725 yo._meta.dfd.close() 2726 yo._meta.dfd = None 2727 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 2728 elif not os.path.exists(yo._meta.memoname): 2729 yo._meta.dfd.close() 2730 yo._meta.dfd = None 2731 raise DbfError("Table structure corrupt: memo fields exist without memo file") 2732 break
2733 - def _initializeFields(yo):
2734 "builds the FieldList of names, types, and descriptions" 2735 yo._meta.fields[:] = [] 2736 offset = 1 2737 fieldsdef = yo._meta.header.fields 2738 if len(fieldsdef) % 32 != 0: 2739 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 2740 if len(fieldsdef) // 32 != yo.field_count: 2741 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 2742 for i in range(yo.field_count): 2743 fieldblock = fieldsdef[i*32:(i+1)*32] 2744 name = unpackStr(fieldblock[:11]) 2745 type = fieldblock[11] 2746 if not type in yo._meta.fieldtypes: 2747 raise DbfError("Unknown field type: %s" % type) 2748 start = offset 2749 length = ord(fieldblock[16]) 2750 offset += length 2751 end = start + length 2752 decimals = ord(fieldblock[17]) 2753 flags = ord(fieldblock[18]) 2754 yo._meta.fields.append(name) 2755 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2756 -class FpTable(DbfTable):
2757 'Provides an interface for working with FoxPro 2 tables' 2758 _version = 'Foxpro' 2759 _versionabbv = 'fp' 2760 _fieldtypes = { 2761 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None}, 2762 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric, 'Class':None}, 2763 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric, 'Class':None}, 2764 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None}, 2765 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None}, 2766 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None}, 2767 'G' : {'Type':'General', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None}, 2768 'P' : {'Type':'Picture', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None}, 2769 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None, 'Class':None} } 2770 _memoext = '.fpt' 2771 _memotypes = ('G','M','P') 2772 _memoClass = _VfpMemo 2773 _yesMemoMask = '\xf5' # 1111 0101 2774 _noMemoMask = '\x03' # 0000 0011 2775 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 2776 _variable_fields = ('C','F','N') 2777 _character_fields = ('C','M') # field representing character data 2778 _decimal_fields = ('F','N') 2779 _numeric_fields = ('F','N') 2780 _currency_fields = tuple() 2781 _supported_tables = ('\x03', '\xf5') 2782 _dbfTableHeader = array('c', '\x00' * 32) 2783 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 2784 _dbfTableHeader[8:10] = array('c', packShortInt(33+263)) 2785 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 2786 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 2787 _dbfTableHeader = _dbfTableHeader.tostring() 2788 _dbfTableHeaderExtra = '\x00' * 263 2789 _use_deleted = True
2790 - def _checkMemoIntegrity(yo):
2791 if os.path.exists(yo._meta.memoname): 2792 try: 2793 yo._meta.memo = yo._memoClass(yo._meta) 2794 except: 2795 yo._meta.dfd.close() 2796 yo._meta.dfd = None 2797 raise 2798 if not yo._meta.ignorememos: 2799 for field in yo._meta.fields: 2800 if yo._meta[field]['type'] in yo._memotypes: 2801 if not os.path.exists(yo._meta.memoname): 2802 yo._meta.dfd.close() 2803 yo._meta.dfd = None 2804 raise DbfError("Table structure corrupt: memo fields exist without memo file") 2805 break
2806 - def _initializeFields(yo):
2807 "builds the FieldList of names, types, and descriptions" 2808 yo._meta.fields[:] = [] 2809 offset = 1 2810 fieldsdef = yo._meta.header.fields 2811 if len(fieldsdef) % 32 != 0: 2812 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 2813 if len(fieldsdef) // 32 != yo.field_count: 2814 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32)) 2815 for i in range(yo.field_count): 2816 fieldblock = fieldsdef[i*32:(i+1)*32] 2817 name = unpackStr(fieldblock[:11]) 2818 type = fieldblock[11] 2819 if not type in yo._meta.fieldtypes: 2820 raise DbfError("Unknown field type: %s" % type) 2821 elif type == '0': 2822 return # ignore nullflags 2823 start = offset 2824 length = ord(fieldblock[16]) 2825 offset += length 2826 end = start + length 2827 decimals = ord(fieldblock[17]) 2828 flags = ord(fieldblock[18]) 2829 yo._meta.fields.append(name) 2830 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2831
2832 -class VfpTable(DbfTable):
2833 'Provides an interface for working with Visual FoxPro 6 tables' 2834 _version = 'Visual Foxpro v6' 2835 _versionabbv = 'vfp' 2836 _fieldtypes = { 2837 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None}, 2838 'Y' : {'Type':'Currency', 'Retrieve':retrieveCurrency, 'Update':updateCurrency, 'Blank':Decimal(), 'Init':addVfpCurrency, 'Class':None}, 2839 'B' : {'Type':'Double', 'Retrieve':retrieveDouble, 'Update':updateDouble, 'Blank':float, 'Init':addVfpDouble, 'Class':None}, 2840 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric, 'Class':None}, 2841 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric, 'Class':None}, 2842 'I' : {'Type':'Integer', 'Retrieve':retrieveInteger, 'Update':updateInteger, 'Blank':int, 'Init':addVfpInteger, 'Class':None}, 2843 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None}, 2844 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None}, 2845 'T' : {'Type':'DateTime', 'Retrieve':retrieveVfpDateTime, 'Update':updateVfpDateTime, 'Blank':DateTime.now, 'Init':addVfpDateTime, 'Class':None}, 2846 'M' : {'Type':'Memo', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None}, 2847 'G' : {'Type':'General', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None}, 2848 'P' : {'Type':'Picture', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None}, 2849 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None, 'Class':None} } 2850 _memoext = '.fpt' 2851 _memotypes = ('G','M','P') 2852 _memoClass = _VfpMemo 2853 _yesMemoMask = '\x30' # 0011 0000 2854 _noMemoMask = '\x30' # 0011 0000 2855 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 2856 _variable_fields = ('C','F','N') 2857 _character_fields = ('C','M') # field representing character data 2858 _decimal_fields = ('F','N') 2859 _numeric_fields = ('B','F','I','N','Y') 2860 _currency_fields = ('Y',) 2861 _supported_tables = ('\x30',) 2862 _dbfTableHeader = array('c', '\x00' * 32) 2863 _dbfTableHeader[0] = '\x30' # version - Foxpro 6 0011 0000 2864 _dbfTableHeader[8:10] = array('c', packShortInt(33+263)) 2865 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 2866 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 2867 _dbfTableHeader = _dbfTableHeader.tostring() 2868 _dbfTableHeaderExtra = '\x00' * 263 2869 _use_deleted = True
2870 - def _checkMemoIntegrity(yo):
2871 if os.path.exists(yo._meta.memoname): 2872 try: 2873 yo._meta.memo = yo._memoClass(yo._meta) 2874 except: 2875 yo._meta.dfd.close() 2876 yo._meta.dfd = None 2877 raise 2878 if not yo._meta.ignorememos: 2879 for field in yo._meta.fields: 2880 if yo._meta[field]['type'] in yo._memotypes: 2881 if not os.path.exists(yo._meta.memoname): 2882 yo._meta.dfd.close() 2883 yo._meta.dfd = None 2884 raise DbfError("Table structure corrupt: memo fields exist without memo file") 2885 break
2886 - def _initializeFields(yo):
2887 "builds the FieldList of names, types, and descriptions" 2888 yo._meta.fields[:] = [] 2889 offset = 1 2890 fieldsdef = yo._meta.header.fields 2891 for i in range(yo.field_count): 2892 fieldblock = fieldsdef[i*32:(i+1)*32] 2893 name = unpackStr(fieldblock[:11]) 2894 type = fieldblock[11] 2895 if not type in yo._meta.fieldtypes: 2896 raise DbfError("Unknown field type: %s" % type) 2897 elif type == '0': 2898 return # ignore nullflags 2899 start = unpackLongInt(fieldblock[12:16]) 2900 length = ord(fieldblock[16]) 2901 offset += length 2902 end = start + length 2903 decimals = ord(fieldblock[17]) 2904 flags = ord(fieldblock[18]) 2905 yo._meta.fields.append(name) 2906 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2907 -class List():
2908 "list of Dbf records, with set-like behavior" 2909 _desc = ''
2910 - def __init__(yo, new_records=None, desc=None, key=None, field_names=None):
2911 yo.field_names = field_names 2912 yo._list = [] 2913 yo._set = set() 2914 if key is not None: 2915 yo.key = key 2916 if key.__doc__ is None: 2917 key.__doc__ = 'unknown' 2918 key = yo.key 2919 yo._current = -1 2920 if isinstance(new_records, yo.__class__) and key is new_records.key: 2921 yo._list = new_records._list[:] 2922 yo._set = new_records._set.copy() 2923 yo._current = 0 2924 elif new_records is not None: 2925 for record in new_records: 2926 value = key(record) 2927 item = (record.record_table, record.record_number, value) 2928 if value not in yo._set: 2929 yo._set.add(value) 2930 yo._list.append(item) 2931 yo._current = 0 2932 if desc is not None: 2933 yo._desc = desc
2934 - def __add__(yo, other):
2935 key = yo.key 2936 if isinstance(other, (DbfTable, list)): 2937 other = yo.__class__(other, key=key) 2938 if isinstance(other, yo.__class__): 2939 result = yo.__class__() 2940 result._set = yo._set.copy() 2941 result._list[:] = yo._list[:] 2942 result.key = yo.key 2943 if key is other.key: # same key? just compare key values 2944 for item in other._list: 2945 if item[2] not in result._set: 2946 result._set.add(item[2]) 2947 result._list.append(item) 2948 else: # different keys, use this list's key on other's records 2949 for rec in other: 2950 value = key(rec) 2951 if value not in result._set: 2952 result._set.add(value) 2953 result._list.append((rec.record_table, rec.record_number, value)) 2954 result._current = 0 if result else -1 2955 return result 2956 return NotImplemented
2957 - def __contains__(yo, record):
2958 if isinstance(record, tuple): 2959 item = record 2960 else: 2961 item = yo.key(record) 2962 return item in yo._set
2963 - def __delitem__(yo, key):
2964 if isinstance(key, int): 2965 item = yo._list.pop[key] 2966 yo._set.remove(item[2]) 2967 elif isinstance(key, slice): 2968 yo._set.difference_update([item[2] for item in yo._list[key]]) 2969 yo._list.__delitem__(key) 2970 elif isinstance(key, _DbfRecord): 2971 index = yo.index(key) 2972 item = yo._list.pop[index] 2973 yo._set.remove(item[2]) 2974 else: 2975 raise TypeError
2976 - def __getitem__(yo, key):
2977 if isinstance(key, int): 2978 count = len(yo._list) 2979 if not -count <= key < count: 2980 raise IndexError("Record %d is not in list." % key) 2981 return yo._get_record(*yo._list[key]) 2982 elif isinstance(key, slice): 2983 result = yo.__class__() 2984 result._list[:] = yo._list[key] 2985 result._set = set(result._list) 2986 result.key = yo.key 2987 result._current = 0 if result else -1 2988 return result 2989 elif isinstance(key, _DbfRecord): 2990 index = yo.index(key) 2991 return yo._get_record(*yo._list[index]) 2992 else: 2993 raise TypeError('indices must be integers')
2994 - def __iter__(yo):
2995 return (table.get_record(recno) for table, recno, value in yo._list)
2996 - def __len__(yo):
2997 return len(yo._list)
2998 - def __nonzero__(yo):
2999 return len(yo) > 0
3000 - def __radd__(yo, other):
3001 return yo.__add__(other)
3002 - def __repr__(yo):
3003 if yo._desc: 3004 return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list)) 3005 else: 3006 return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list))
3007 - def __rsub__(yo, other):
3008 key = yo.key 3009 if isinstance(other, (DbfTable, list)): 3010 other = yo.__class__(other, key=key) 3011 if isinstance(other, yo.__class__): 3012 result = yo.__class__() 3013 result._list[:] = other._list[:] 3014 result._set = other._set.copy() 3015 result.key = key 3016 lost = set() 3017 if key is other.key: 3018 for item in yo._list: 3019 if item[2] in result._list: 3020 result._set.remove(item[2]) 3021 lost.add(item) 3022 else: 3023 for rec in other: 3024 value = key(rec) 3025 if value in result._set: 3026 result._set.remove(value) 3027 lost.add((rec.record_table, rec.record_number, value)) 3028 result._list = [item for item in result._list if item not in lost] 3029 result._current = 0 if result else -1 3030 return result 3031 return NotImplemented
3032 - def __sub__(yo, other):
3033 key = yo.key 3034 if isinstance(other, (DbfTable, list)): 3035 other = yo.__class__(other, key=key) 3036 if isinstance(other, yo.__class__): 3037 result = yo.__class__() 3038 result._list[:] = yo._list[:] 3039 result._set = yo._set.copy() 3040 result.key = key 3041 lost = set() 3042 if key is other.key: 3043 for item in other._list: 3044 if item[2] in result._set: 3045 result._set.remove(item[2]) 3046 lost.add(item[2]) 3047 else: 3048 for rec in other: 3049 value = key(rec) 3050 if value in result._set: 3051 result._set.remove(value) 3052 lost.add(value) 3053 result._list = [item for item in result._list if item[2] not in lost] 3054 result._current = 0 if result else -1 3055 return result 3056 return NotImplemented
3057 - def _maybe_add(yo, item):
3058 if item[2] not in yo._set: 3059 yo._set.add(item[2]) 3060 yo._list.append(item)
3061 - def _get_record(yo, table=None, rec_no=None, value=None):
3062 if table is rec_no is None: 3063 table, rec_no, value = yo._list[yo._current] 3064 return table.get_record(rec_no)
3065 - def _purge(yo, record, old_record_number, offset):
3066 partial = record.record_table, old_record_number 3067 records = sorted(yo._list, key=lambda item: (item[0], item[1])) 3068 for item in records: 3069 if partial == item[:2]: 3070 found = True 3071 break 3072 elif partial[0] is item[0] and partial[1] < item[1]: 3073 found = False 3074 break 3075 else: 3076 found = False 3077 if found: 3078 yo._list.pop(yo._list.index(item)) 3079 yo._set.remove(item[2]) 3080 start = records.index(item) + found 3081 for item in records[start:]: 3082 if item[0] is not partial[0]: # into other table's records 3083 break 3084 i = yo._list.index(item) 3085 yo._set.remove(item[2]) 3086 item = item[0], (item[1] - offset), item[2] 3087 yo._list[i] = item 3088 yo._set.add(item[2]) 3089 return found
3090 - def append(yo, new_record):
3091 yo._maybe_add((new_record.record_table, new_record.record_number, yo.key(new_record))) 3092 if yo._current == -1 and yo._list: 3093 yo._current = 0
3094 #return new_record
3095 - def bottom(yo):
3096 if yo._list: 3097 yo._current = len(yo._list) - 1 3098 return yo._get_record() 3099 raise DbfError("dbf.List is empty")
3100 - def clear(yo):
3101 yo._list = [] 3102 yo._set = set() 3103 yo._current = -1
3104 - def current(yo):
3105 if yo._current < 0: 3106 raise Bof() 3107 elif yo._current == len(yo._list): 3108 raise Eof() 3109 return yo._get_record()
3110 - def extend(yo, new_records):
3111 key = yo.key 3112 if isinstance(new_records, yo.__class__): 3113 if key is new_records.key: # same key? just compare key values 3114 for item in new_records._list: 3115 yo._maybe_add(item) 3116 else: # different keys, use this list's key on other's records 3117 for rec in new_records: 3118 value = key(rec) 3119 yo._maybe_add((rec.record_table, rec.record_number, value)) 3120 else: 3121 for record in new_records: 3122 value = key(rec) 3123 yo._maybe_add((rec.record_table, rec.record_number, value)) 3124 if yo._current == -1 and yo._list: 3125 yo._current = 0
3126 - def goto(yo, index_number):
3127 if yo._list: 3128 if 0 <= index_number <= len(yo._list): 3129 yo._current = index_number 3130 return yo._get_record() 3131 raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list))) 3132 raise DbfError("dbf.List is empty")
3133 - def index(yo, sort=None, reverse=False):
3134 "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'" 3135 if sort is None: 3136 results = [] 3137 for field, func in yo._meta.index: 3138 results.append("%s(%s)" % (func.__name__, field)) 3139 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed]) 3140 yo._meta.index_reversed = reverse 3141 if sort == 'ORIGINAL': 3142 yo._index = range(yo._meta.header.record_count) 3143 yo._meta.index = 'ORIGINAL' 3144 if reverse: 3145 yo._index.reverse() 3146 return 3147 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop]) 3148 yo._meta.index = tuple(new_sort) 3149 yo._meta.orderresults = [''] * len(yo) 3150 for record in yo: 3151 yo._meta.orderresults[record.record_number] = record() 3152 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
3153 - def index(yo, record, start=None, stop=None):
3154 item = record.record_table, record.record_number, yo.key(record) 3155 key = yo.key(record) 3156 if start is None: 3157 start = 0 3158 if stop is None: 3159 stop = len(yo._list) 3160 for i in range(start, stop): 3161 if yo._list[i][2] == key: 3162 return i 3163 else: 3164 raise ValueError("dbf.List.index(x): <x=%r> not in list" % (key,))
3165 - def insert(yo, i, record):
3166 item = record.record_table, record.record_number, yo.key(record) 3167 if item not in yo._set: 3168 yo._set.add(item[2]) 3169 yo._list.insert(i, item)
3170 - def key(yo, record):
3171 "table_name, record_number" 3172 return record.record_table, record.record_number
3173 - def next(yo):
3174 if yo._current < len(yo._list): 3175 yo._current += 1 3176 if yo._current < len(yo._list): 3177 return yo._get_record() 3178 raise Eof()
3179 - def pop(yo, index=None):
3180 if index is None: 3181 table, recno, value = yo._list.pop() 3182 else: 3183 table, recno, value = yo._list.pop(index) 3184 yo._set.remove(value) 3185 return yo._get_record(table, recno, value)
3186 - def prev(yo):
3187 if yo._current >= 0: 3188 yo._current -= 1 3189 if yo._current > -1: 3190 return yo._get_record() 3191 raise Bof()
3192 - def remove(yo, record):
3193 item = record.record_table, record.record_number, yo.key(record) 3194 yo._list.remove(item) 3195 yo._set.remove(item[2])
3196 - def reverse(yo):
3197 return yo._list.reverse()
3198 - def top(yo):
3199 if yo._list: 3200 yo._current = 0 3201 return yo._get_record() 3202 raise DbfError("dbf.List is empty")
3203 - def sort(yo, key=None, reverse=False):
3204 if key is None: 3205 return yo._list.sort(reverse=reverse) 3206 return yo._list.sort(key=lambda item: key(item[0].get_record(item[1])), reverse=reverse)
3207
3208 -class Index():
3209 - class IndexIterator():
3210 "returns records using this index"
3211 - def __init__(yo, table, records):
3212 yo.table = table 3213 yo.records = records 3214 yo.index = 0
3215 - def __iter__(yo):
3216 return yo
3217 - def next(yo):
3218 while yo.index < len(yo.records): 3219 record = yo.table.get_record(yo.records[yo.index]) 3220 yo.index += 1 3221 if not yo.table.use_deleted and record.has_been_deleted: 3222 continue 3223 return record 3224 else: 3225 raise StopIteration
3226 - def __init__(yo, table, key, field_names=None):
3227 yo._table = table 3228 yo._values = [] # ordered list of values 3229 yo._rec_by_val = [] # matching record numbers 3230 yo._records = {} # record numbers:values 3231 yo.__doc__ = key.__doc__ or 'unknown' 3232 yo.key = key 3233 yo.field_names = field_names or table.field_names 3234 for record in table: 3235 value = key(record) 3236 if value is DoNotIndex: 3237 continue 3238 rec_num = record.record_number 3239 if not isinstance(value, tuple): 3240 value = (value, ) 3241 vindex = bisect_right(yo._values, value) 3242 yo._values.insert(vindex, value) 3243 yo._rec_by_val.insert(vindex, rec_num) 3244 yo._records[rec_num] = value 3245 table._indexen.add(yo)
3246 - def __call__(yo, record):
3247 rec_num = record.record_number 3248 if rec_num in yo._records: 3249 value = yo._records[rec_num] 3250 vindex = bisect_left(yo._values, value) 3251 yo._values.pop(vindex) 3252 yo._rec_by_val.pop(vindex) 3253 value = yo.key(record) 3254 if value is DoNotIndex: 3255 return 3256 if not isinstance(value, tuple): 3257 value = (value, ) 3258 vindex = bisect_right(yo._values, value) 3259 yo._values.insert(vindex, value) 3260 yo._rec_by_val.insert(vindex, rec_num) 3261 yo._records[rec_num] = value
3262 - def __contains__(yo, match):
3263 if isinstance(match, _DbfRecord): 3264 if match.record_table is yo._table: 3265 return match.record_number in yo._records 3266 match = yo.key(match) 3267 elif not isinstance(match, tuple): 3268 match = (match, ) 3269 return yo.find(match) != -1
3270 - def __getitem__(yo, key):
3271 if isinstance(key, int): 3272 count = len(yo._values) 3273 if not -count <= key < count: 3274 raise IndexError("Record %d is not in list." % key) 3275 rec_num = yo._rec_by_val[key] 3276 return yo._table.get_record(rec_num) 3277 elif isinstance(key, slice): 3278 result = List(field_names=yo._table.field_names) 3279 yo._table._dbflists.add(result) 3280 start, stop, step = key.start, key.stop, key.step 3281 if start is None: start = 0 3282 if stop is None: stop = len(yo._rec_by_val) 3283 if step is None: step = 1 3284 for loc in range(start, stop, step): 3285 record = yo._table.get_record(yo._rec_by_val[loc]) 3286 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 3287 result._current = 0 if result else -1 3288 return result 3289 elif isinstance (key, (str, unicode, tuple, _DbfRecord)): 3290 if isinstance(key, _DbfRecord): 3291 key = yo.key(key) 3292 elif not isinstance(key, tuple): 3293 key = (key, ) 3294 loc = yo.find(key) 3295 if loc == -1: 3296 raise KeyError(key) 3297 return yo._table.get_record(yo._rec_by_val[loc]) 3298 else: 3299 raise TypeError('indices must be integers, match objects must by strings or tuples')
3300 - def __enter__(yo):
3301 return yo
3302 - def __exit__(yo, *exc_info):
3303 yo._table.close() 3304 yo._values[:] = [] 3305 yo._rec_by_val[:] = [] 3306 yo._records.clear() 3307 return False
3308 - def __iter__(yo):
3309 return yo.IndexIterator(yo._table, yo._rec_by_val)
3310 - def __len__(yo):
3311 return len(yo._records)
3312 - def _partial_match(yo, target, match):
3313 target = target[:len(match)] 3314 if isinstance(match[-1], (str, unicode)): 3315 target = list(target) 3316 target[-1] = target[-1][:len(match[-1])] 3317 target = tuple(target) 3318 return target == match
3319 - def _purge(yo, rec_num):
3320 value = yo._records.get(rec_num) 3321 if value is not None: 3322 vindex = bisect_left(yo._values, value) 3323 del yo._records[rec_num] 3324 yo._values.pop(vindex) 3325 yo._rec_by_val.pop(vindex)
3326 - def _search(yo, match, lo=0, hi=None):
3327 if hi is None: 3328 hi = len(yo._values) 3329 return bisect_left(yo._values, match, lo, hi)
3330 - def clear(yo):
3331 "removes all entries from index" 3332 yo._values[:] = [] 3333 yo._rec_by_val[:] = [] 3334 yo._records.clear()
3335 - def close(yo):
3336 yo._table.close()
3337 - def find(yo, match, partial=False):
3338 "returns numeric index of (partial) match, or -1" 3339 if isinstance(match, _DbfRecord): 3340 if match.record_number in yo._records: 3341 return yo._values.index(yo._records[match.record_number]) 3342 else: 3343 return -1 3344 if not isinstance(match, tuple): 3345 match = (match, ) 3346 loc = yo._search(match) 3347 while loc < len(yo._values) and yo._values[loc] == match: 3348 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 3349 loc += 1 3350 continue 3351 return loc 3352 if partial: 3353 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 3354 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted: 3355 loc += 1 3356 continue 3357 return loc 3358 return -1
3359 - def find_index(yo, match):
3360 "returns numeric index of either (partial) match, or position of where match would be" 3361 if isinstance(match, _DbfRecord): 3362 if match.record_number in yo._records: 3363 return yo._values.index(yo._records[match.record_number]) 3364 else: 3365 match = yo.key(match) 3366 if not isinstance(match, tuple): 3367 match = (match, ) 3368 loc = yo._search(match) 3369 return loc
3370 - def index(yo, match, partial=False):
3371 "returns numeric index of (partial) match, or raises ValueError" 3372 loc = yo.find(match, partial) 3373 if loc == -1: 3374 if isinstance(match, _DbfRecord): 3375 raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__)) 3376 else: 3377 raise ValueError("match criteria <%s> not in index" % (match, )) 3378 return loc
3379 - def reindex(yo):
3380 "reindexes all records" 3381 for record in yo._table: 3382 yo(record)
3383 - def query(yo, sql_command=None, python=None):
3384 """recognized sql commands are SELECT, UPDATE, REPLACE, INSERT, DELETE, and RECALL""" 3385 if sql_command: 3386 return sql(yo, sql_command) 3387 elif python is None: 3388 raise DbfError("query: python parameter must be specified") 3389 possible = List(desc="%s --> %s" % (yo._table.filename, python), field_names=yo._table.field_names) 3390 yo._table._dbflists.add(possible) 3391 query_result = {} 3392 select = 'query_result["keep"] = %s' % python 3393 g = {} 3394 for record in yo: 3395 query_result['keep'] = False 3396 g['query_result'] = query_result 3397 exec select in g, record 3398 if query_result['keep']: 3399 possible.append(record) 3400 record.write_record() 3401 return possible
3402 - def search(yo, match, partial=False):
3403 "returns dbf.List of all (partially) matching records" 3404 result = List(field_names=yo._table.field_names) 3405 yo._table._dbflists.add(result) 3406 if not isinstance(match, tuple): 3407 match = (match, ) 3408 loc = yo._search(match) 3409 if loc == len(yo._values): 3410 return result 3411 while loc < len(yo._values) and yo._values[loc] == match: 3412 record = yo._table.get_record(yo._rec_by_val[loc]) 3413 if not yo._table.use_deleted and record.has_been_deleted: 3414 loc += 1 3415 continue 3416 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 3417 loc += 1 3418 if partial: 3419 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match): 3420 record = yo._table.get_record(yo._rec_by_val[loc]) 3421 if not yo._table.use_deleted and record.has_been_deleted: 3422 loc += 1 3423 continue 3424 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record))) 3425 loc += 1 3426 return result
3427 3428 # table meta 3429 table_types = { 3430 'db3' : Db3Table, 3431 'fp' : FpTable, 3432 'vfp' : VfpTable, 3433 'dbf' : DbfTable, 3434 } 3435 3436 version_map = { 3437 '\x02' : 'FoxBASE', 3438 '\x03' : 'dBase III Plus', 3439 '\x04' : 'dBase IV', 3440 '\x05' : 'dBase V', 3441 '\x30' : 'Visual FoxPro', 3442 '\x31' : 'Visual FoxPro (auto increment field)', 3443 '\x43' : 'dBase IV SQL', 3444 '\x7b' : 'dBase IV w/memos', 3445 '\x83' : 'dBase III Plus w/memos', 3446 '\x8b' : 'dBase IV w/memos', 3447 '\x8e' : 'dBase IV w/SQL table', 3448 '\xf5' : 'FoxPro w/memos'} 3449 3450 code_pages = { 3451 '\x00' : ('ascii', "plain ol' ascii"), 3452 '\x01' : ('cp437', 'U.S. MS-DOS'), 3453 '\x02' : ('cp850', 'International MS-DOS'), 3454 '\x03' : ('cp1252', 'Windows ANSI'), 3455 '\x04' : ('mac_roman', 'Standard Macintosh'), 3456 '\x08' : ('cp865', 'Danish OEM'), 3457 '\x09' : ('cp437', 'Dutch OEM'), 3458 '\x0A' : ('cp850', 'Dutch OEM (secondary)'), 3459 '\x0B' : ('cp437', 'Finnish OEM'), 3460 '\x0D' : ('cp437', 'French OEM'), 3461 '\x0E' : ('cp850', 'French OEM (secondary)'), 3462 '\x0F' : ('cp437', 'German OEM'), 3463 '\x10' : ('cp850', 'German OEM (secondary)'), 3464 '\x11' : ('cp437', 'Italian OEM'), 3465 '\x12' : ('cp850', 'Italian OEM (secondary)'), 3466 '\x13' : ('cp932', 'Japanese Shift-JIS'), 3467 '\x14' : ('cp850', 'Spanish OEM (secondary)'), 3468 '\x15' : ('cp437', 'Swedish OEM'), 3469 '\x16' : ('cp850', 'Swedish OEM (secondary)'), 3470 '\x17' : ('cp865', 'Norwegian OEM'), 3471 '\x18' : ('cp437', 'Spanish OEM'), 3472 '\x19' : ('cp437', 'English OEM (Britain)'), 3473 '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'), 3474 '\x1B' : ('cp437', 'English OEM (U.S.)'), 3475 '\x1C' : ('cp863', 'French OEM (Canada)'), 3476 '\x1D' : ('cp850', 'French OEM (secondary)'), 3477 '\x1F' : ('cp852', 'Czech OEM'), 3478 '\x22' : ('cp852', 'Hungarian OEM'), 3479 '\x23' : ('cp852', 'Polish OEM'), 3480 '\x24' : ('cp860', 'Portugese OEM'), 3481 '\x25' : ('cp850', 'Potugese OEM (secondary)'), 3482 '\x26' : ('cp866', 'Russian OEM'), 3483 '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'), 3484 '\x40' : ('cp852', 'Romanian OEM'), 3485 '\x4D' : ('cp936', 'Chinese GBK (PRC)'), 3486 '\x4E' : ('cp949', 'Korean (ANSI/OEM)'), 3487 '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'), 3488 '\x50' : ('cp874', 'Thai (ANSI/OEM)'), 3489 '\x57' : ('cp1252', 'ANSI'), 3490 '\x58' : ('cp1252', 'Western European ANSI'), 3491 '\x59' : ('cp1252', 'Spanish ANSI'), 3492 '\x64' : ('cp852', 'Eastern European MS-DOS'), 3493 '\x65' : ('cp866', 'Russian MS-DOS'), 3494 '\x66' : ('cp865', 'Nordic MS-DOS'), 3495 '\x67' : ('cp861', 'Icelandic MS-DOS'), 3496 '\x68' : (None, 'Kamenicky (Czech) MS-DOS'), 3497 '\x69' : (None, 'Mazovia (Polish) MS-DOS'), 3498 '\x6a' : ('cp737', 'Greek MS-DOS (437G)'), 3499 '\x6b' : ('cp857', 'Turkish MS-DOS'), 3500 '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 3501 '\x79' : ('cp949', 'Korean Windows'), 3502 '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 3503 '\x7b' : ('cp932', 'Japanese Windows'), 3504 '\x7c' : ('cp874', 'Thai Windows'), 3505 '\x7d' : ('cp1255', 'Hebrew Windows'), 3506 '\x7e' : ('cp1256', 'Arabic Windows'), 3507 '\xc8' : ('cp1250', 'Eastern European Windows'), 3508 '\xc9' : ('cp1251', 'Russian Windows'), 3509 '\xca' : ('cp1254', 'Turkish Windows'), 3510 '\xcb' : ('cp1253', 'Greek Windows'), 3511 '\x96' : ('mac_cyrillic', 'Russian Macintosh'), 3512 '\x97' : ('mac_latin2', 'Macintosh EE'), 3513 '\x98' : ('mac_greek', 'Greek Macintosh') }
3514 3515 # SQL functions 3516 3517 -def sql_select(records, chosen_fields, condition, field_names):
3518 if chosen_fields != '*': 3519 field_names = chosen_fields.replace(' ','').split(',') 3520 result = condition(records) 3521 result.modified = 0, 'record' + ('','s')[len(result)>1] 3522 result.field_names = field_names 3523 return result
3524
3525 -def sql_update(records, command, condition, field_names):
3526 possible = condition(records) 3527 modified = sql_cmd(command, field_names)(possible) 3528 possible.modified = modified, 'record' + ('','s')[modified>1] 3529 return possible
3530
3531 -def sql_delete(records, dead_fields, condition, field_names):
3532 deleted = condition(records) 3533 deleted.modified = len(deleted), 'record' + ('','s')[len(deleted)>1] 3534 deleted.field_names = field_names 3535 if dead_fields == '*': 3536 for record in deleted: 3537 record.delete_record() 3538 record.write_record() 3539 else: 3540 keep = [f for f in field_names if f not in dead_fields.replace(' ','').split(',')] 3541 for record in deleted: 3542 record.reset_record(keep_fields=keep) 3543 record.write_record() 3544 return deleted
3545
3546 -def sql_recall(records, all_fields, condition, field_names):
3547 if all_fields != '*': 3548 raise DbfError('SQL RECALL: fields must be * (only able to recover at the record level)') 3549 revivified = List() 3550 tables = set() 3551 for record in records: 3552 tables.add(record_table) 3553 old_setting = dict() 3554 for table in tables: 3555 old_setting[table] = table.use_deleted 3556 table.use_deleted = True 3557 for record in condition(records): 3558 if record.has_been_deleted: 3559 revivified.append(record) 3560 record.undelete_record() 3561 record.write_record() 3562 for table in tables: 3563 table.use_deleted = old_setting[table] 3564 revivified.modfied = len(revivified), 'record' + ('','s')[len(revivified)>1] 3565 revivified.field_names = field_names 3566 return revivified
3567
3568 -def sql_add(records, new_fields, condition, field_names):
3569 tables = set() 3570 possible = condition(records) 3571 for record in possible: 3572 tables.add(record.record_table) 3573 for table in tables: 3574 table.add_fields(new_fields) 3575 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1] 3576 possible.field_names = field_names 3577 return possible
3578
3579 -def sql_drop(records, dead_fields, condition, field_names):
3580 tables = set() 3581 possible = condition(records) 3582 for record in possible: 3583 tables.add(record.record_table) 3584 for table in tables: 3585 table.delete_fields(dead_fields) 3586 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1] 3587 possible.field_names = field_names 3588 return possible
3589
3590 -def sql_pack(records, command, condition, field_names):
3591 tables = set() 3592 possible = condition(records) 3593 for record in possible: 3594 tables.add(record.record_table) 3595 for table in tables: 3596 table.pack() 3597 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1] 3598 possible.field_names = field_names 3599 return possible
3600
3601 -def sql_resize(records, fieldname_newsize, condition, field_names):
3602 tables = set() 3603 possible = condition(records) 3604 for record in possible: 3605 tables.add(record.record_table) 3606 fieldname, newsize = fieldname_newsize.split() 3607 newsize = int(newsize) 3608 for table in tables: 3609 table.resize_field(fieldname, newsize) 3610 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1] 3611 possible.field_names = field_names 3612 return possible
3613
3614 -def sql_criteria(records, criteria):
3615 "creates a function matching the sql criteria" 3616 function = """def func(records): 3617 \"\"\"%s\"\"\" 3618 matched = List(field_names=records[0].field_names) 3619 for rec in records: 3620 %s 3621 3622 if %s: 3623 matched.append(rec) 3624 return matched""" 3625 fields = [] 3626 for field in records[0].field_names: 3627 if field in criteria: 3628 fields.append(field) 3629 fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) 3630 if 'record_number' in criteria: 3631 fields += '\n record_number = rec.record_number' 3632 g = sql_user_functions.copy() 3633 g['List'] = List 3634 function %= (criteria, fields, criteria) 3635 #print function 3636 exec function in g 3637 return g['func']
3638
3639 -def sql_cmd(command, field_names):
3640 "creates a function matching to apply command to each record in records" 3641 function = """def func(records): 3642 \"\"\"%s\"\"\" 3643 changed = 0 3644 for rec in records: 3645 %s 3646 3647 %s 3648 3649 %s 3650 changed += rec.write_record() 3651 return changed""" 3652 fields = [] 3653 for field in field_names: 3654 if field in command: 3655 fields.append(field) 3656 pre_fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields]) 3657 post_fields = '\n '.join(['rec.%s = %s' % (field, field) for field in fields]) 3658 g = sql_user_functions.copy() 3659 if ' with ' in command.lower(): 3660 offset = command.lower().index(' with ') 3661 command = command[:offset] + ' = ' + command[offset+6:] 3662 function %= (command, pre_fields, command, post_fields) 3663 #print function 3664 exec function in g 3665 return g['func']
3666
3667 -def sql(records, command):
3668 """recognized sql commands are SELECT, UPDATE | REPLACE, DELETE, RECALL, ADD, DROP""" 3669 close_table = False 3670 if isinstance(records, (str, unicode)): 3671 records = Table(records) 3672 close_table = True 3673 try: 3674 sql_command = command 3675 if ' where ' in command: 3676 command, condition = command.split(' where ', 1) 3677 condition = sql_criteria(records, condition) 3678 else: 3679 def condition(records): 3680 return records[:]
3681 name, command = command.split(' ', 1) 3682 command = command.strip() 3683 name = name.lower() 3684 field_names = records[0].field_names 3685 if sql_functions.get(name) is None: 3686 raise DbfError('unknown SQL command: %s' % name.upper()) 3687 result = sql_functions[name](records, command, condition, field_names) 3688 tables = set() 3689 for record in result: 3690 tables.add(record.record_table) 3691 for list_table in tables: 3692 list_table._dbflists.add(result) 3693 finally: 3694 if close_table: 3695 records.close() 3696 return result 3697 3698 sql_functions = { 3699 'select' : sql_select, 3700 'update' : sql_update, 3701 'replace': sql_update, 3702 'insert' : None, 3703 'delete' : sql_delete, 3704 'recall' : sql_recall, 3705 'add' : sql_add, 3706 'drop' : sql_drop, 3707 'count' : None, 3708 'pack' : sql_pack, 3709 'resize' : sql_resize, 3710 }
3711 3712 3713 -def _nop(value):
3714 "returns parameter unchanged" 3715 return value
3716 -def _normalize_tuples(tuples, length, filler):
3717 "ensures each tuple is the same length, using filler[-missing] for the gaps" 3718 final = [] 3719 for t in tuples: 3720 if len(t) < length: 3721 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 3722 else: 3723 final.append(t) 3724 return tuple(final)
3725 -def _codepage_lookup(cp):
3726 if cp not in code_pages: 3727 for code_page in sorted(code_pages.keys()): 3728 sd, ld = code_pages[code_page] 3729 if cp == sd or cp == ld: 3730 if sd is None: 3731 raise DbfError("Unsupported codepage: %s" % ld) 3732 cp = code_page 3733 break 3734 else: 3735 raise DbfError("Unsupported codepage: %s" % cp) 3736 sd, ld = code_pages[cp] 3737 return cp, sd, ld
3738 # miscellany
3739 3740 -def ascii(new_setting=None):
3741 "get/set return_ascii setting" 3742 global return_ascii 3743 if new_setting is None: 3744 return return_ascii 3745 else: 3746 return_ascii = new_setting
3747 -def codepage(cp=None):
3748 "get/set default codepage for any new tables" 3749 global default_codepage 3750 cp, sd, ld = _codepage_lookup(cp or default_codepage) 3751 default_codepage = sd 3752 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
3753 -def encoding(cp=None):
3754 "get/set default encoding for non-unicode strings passed into a table" 3755 global input_decoding 3756 cp, sd, ld = _codepage_lookup(cp or input_decoding) 3757 default_codepage = sd 3758 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
3759 -class _Db4Table(DbfTable):
3760 version = 'dBase IV w/memos (non-functional)' 3761 _versionabbv = 'db4' 3762 _fieldtypes = { 3763 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter}, 3764 'Y' : {'Type':'Currency', 'Retrieve':retrieveCurrency, 'Update':updateCurrency, 'Blank':Decimal(), 'Init':addVfpCurrency}, 3765 'B' : {'Type':'Double', 'Retrieve':retrieveDouble, 'Update':updateDouble, 'Blank':float, 'Init':addVfpDouble}, 3766 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric}, 3767 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric}, 3768 'I' : {'Type':'Integer', 'Retrieve':retrieveInteger, 'Update':updateInteger, 'Blank':int, 'Init':addVfpInteger}, 3769 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical}, 3770 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate}, 3771 'T' : {'Type':'DateTime', 'Retrieve':retrieveVfpDateTime, 'Update':updateVfpDateTime, 'Blank':DateTime.now, 'Init':addVfpDateTime}, 3772 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo}, 3773 'G' : {'Type':'General', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo}, 3774 'P' : {'Type':'Picture', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo}, 3775 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None} } 3776 _memoext = '.dbt' 3777 _memotypes = ('G','M','P') 3778 _memoClass = _VfpMemo 3779 _yesMemoMask = '\x8b' # 0011 0000 3780 _noMemoMask = '\x04' # 0011 0000 3781 _fixed_fields = ('B','D','G','I','L','M','P','T','Y') 3782 _variable_fields = ('C','F','N') 3783 _character_fields = ('C','M') # field representing character data 3784 _decimal_fields = ('F','N') 3785 _numeric_fields = ('B','F','I','N','Y') 3786 _currency_fields = ('Y',) 3787 _supported_tables = ('\x04', '\x8b') 3788 _dbfTableHeader = ['\x00'] * 32 3789 _dbfTableHeader[0] = '\x8b' # version - Foxpro 6 0011 0000 3790 _dbfTableHeader[10] = '\x01' # record length -- one for delete flag 3791 _dbfTableHeader[29] = '\x03' # code page -- 437 US-MS DOS 3792 _dbfTableHeader = ''.join(_dbfTableHeader) 3793 _dbfTableHeaderExtra = '' 3794 _use_deleted = True
3795 - def _checkMemoIntegrity(yo):
3796 "dBase III specific" 3797 if yo._meta.header.version == '\x8b': 3798 try: 3799 yo._meta.memo = yo._memoClass(yo._meta) 3800 except: 3801 yo._meta.dfd.close() 3802 yo._meta.dfd = None 3803 raise 3804 if not yo._meta.ignorememos: 3805 for field in yo._meta.fields: 3806 if yo._meta[field]['type'] in yo._memotypes: 3807 if yo._meta.header.version != '\x8b': 3808 yo._meta.dfd.close() 3809 yo._meta.dfd = None 3810 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos") 3811 elif not os.path.exists(yo._meta.memoname): 3812 yo._meta.dfd.close() 3813 yo._meta.dfd = None 3814 raise DbfError("Table structure corrupt: memo fields exist without memo file") 3815 break
3816
3817 # utility functions 3818 3819 -def Table( 3820 filename, 3821 field_specs='', 3822 memo_size=128, 3823 ignore_memos=False, 3824 read_only=False, 3825 keep_memos=False, 3826 meta_only=False, 3827 dbf_type=None, 3828 codepage=None, 3829 numbers='default', 3830 strings=str, 3831 currency=Decimal, 3832 ):
3833 "returns an open table of the correct dbf_type, or creates it if field_specs is given" 3834 if dbf_type is None and isinstance(filename, DbfTable): 3835 return filename 3836 if field_specs and dbf_type is None: 3837 dbf_type = default_type 3838 if dbf_type is not None: 3839 dbf_type = dbf_type.lower() 3840 table = table_types.get(dbf_type) 3841 if table is None: 3842 raise DbfError("Unknown table type: %s" % dbf_type) 3843 return table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage, numbers, strings, currency) 3844 else: 3845 possibles = guess_table_type(filename) 3846 if len(possibles) == 1: 3847 return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \ 3848 read_only, keep_memos, meta_only, codepage, numbers, strings, currency) 3849 else: 3850 for type, desc, cls in possibles: 3851 if type == default_type: 3852 return cls(filename, field_specs, memo_size, ignore_memos, \ 3853 read_only, keep_memos, meta_only, codepage, numbers, strings, currency) 3854 else: 3855 types = ', '.join(["%s" % item[1] for item in possibles]) 3856 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' 3857 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
3858 -def index(sequence):
3859 "returns integers 0 - len(sequence)" 3860 for i in xrange(len(sequence)): 3861 yield i
3862 -def guess_table_type(filename):
3863 reported = table_type(filename) 3864 possibles = [] 3865 version = reported[0] 3866 for tabletype in (Db3Table, FpTable, VfpTable): 3867 if version in tabletype._supported_tables: 3868 possibles.append((tabletype._versionabbv, tabletype._version, tabletype)) 3869 if not possibles: 3870 raise DbfError("Tables of type %s not supported" % str(reported)) 3871 return possibles
3872 -def table_type(filename):
3873 "returns text representation of a table's dbf version" 3874 base, ext = os.path.splitext(filename) 3875 if ext == '': 3876 filename = base + '.dbf' 3877 if not os.path.exists(filename): 3878 raise DbfError('File %s not found' % filename) 3879 fd = open(filename) 3880 version = fd.read(1) 3881 fd.close() 3882 fd = None 3883 if not version in version_map: 3884 raise DbfError("Unknown dbf type: %s (%x)" % (version, ord(version))) 3885 return version, version_map[version]
3886
3887 -def add_fields(table_name, field_specs):
3888 "adds fields to an existing table" 3889 table = Table(table_name) 3890 try: 3891 table.add_fields(field_specs) 3892 finally: 3893 table.close()
3894 -def delete_fields(table_name, field_names):
3895 "deletes fields from an existing table" 3896 table = Table(table_name) 3897 try: 3898 table.delete_fields(field_names) 3899 finally: 3900 table.close()
3901 -def export(table_name, filename='', fields='', format='csv', header=True):
3902 "creates a csv or tab-delimited file from an existing table" 3903 if fields is None: 3904 fields = [] 3905 table = Table(table_name) 3906 try: 3907 table.export(filename=filename, field_specs=fields, format=format, header=header) 3908 finally: 3909 table.close()
3910 -def first_record(table_name):
3911 "prints the first record of a table" 3912 table = Table(table_name) 3913 try: 3914 print str(table[0]) 3915 finally: 3916 table.close()
3917 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1):
3918 """creates a Character table from a csv file 3919 to_disk will create a table with the same name 3920 filename will be used if provided 3921 field_names default to f0, f1, f2, etc, unless specified (list) 3922 extra_fields can be used to add additional fields -- should be normal field specifiers (list)""" 3923 reader = csv.reader(open(csvfile)) 3924 if field_names: 3925 field_names = ['%s M' % fn for fn in field_names] 3926 else: 3927 field_names = ['f0 M'] 3928 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size) 3929 fields_so_far = 1 3930 for row in reader: 3931 while fields_so_far < len(row): 3932 if fields_so_far == len(field_names): 3933 field_names.append('f%d M' % fields_so_far) 3934 mtable.add_fields(field_names[fields_so_far]) 3935 fields_so_far += 1 3936 mtable.append(tuple(row)) 3937 if filename: 3938 to_disk = True 3939 if not to_disk: 3940 if extra_fields: 3941 mtable.add_fields(extra_fields) 3942 else: 3943 if not filename: 3944 filename = os.path.splitext(csvfile)[0] 3945 length = [min_field_size] * len(field_names) 3946 for record in mtable: 3947 for i in index(record.field_names): 3948 length[i] = max(length[i], len(record[i])) 3949 fields = mtable.field_names 3950 fielddef = [] 3951 for i in index(length): 3952 if length[i] < 255: 3953 fielddef.append('%s C(%d)' % (fields[i], length[i])) 3954 else: 3955 fielddef.append('%s M' % (fields[i])) 3956 if extra_fields: 3957 fielddef.extend(extra_fields) 3958 csvtable = Table(filename, fielddef, dbf_type=dbf_type) 3959 for record in mtable: 3960 csvtable.append(record.scatter_fields()) 3961 return csvtable 3962 return mtable
3963 -def get_fields(table_name):
3964 "returns the list of field names of a table" 3965 table = Table(table_name) 3966 return table.field_names
3967 -def info(table_name):
3968 "prints table info" 3969 table = Table(table_name) 3970 print str(table)
3971 -def rename_field(table_name, oldfield, newfield):
3972 "renames a field in a table" 3973 table = Table(table_name) 3974 try: 3975 table.rename_field(oldfield, newfield) 3976 finally: 3977 table.close()
3978 -def structure(table_name, field=None):
3979 "returns the definition of a field (or all fields)" 3980 table = Table(table_name) 3981 return table.structure(field)
3982 -def hex_dump(records):
3983 "just what it says ;)" 3984 for index,dummy in enumerate(records): 3985 chars = dummy._data 3986 print "%2d: " % index, 3987 for char in chars[1:]: 3988 print " %2x " % ord(char), 3989 print
3990