1 """
2 =========
3 Copyright
4 =========
5 - Copyright: 2008-2011 Ad-Mail, Inc -- All rights reserved.
6 - Author: Ethan Furman
7 - Contact: ethanf@admailinc.com
8 - Organization: Ad-Mail, Inc.
9 - Version: 0.88.022 as of 12 Jul 2011
10
11 Redistribution and use in source and binary forms, with or without
12 modification, are permitted provided that the following conditions are met:
13 - Redistributions of source code must retain the above copyright
14 notice, this list of conditions and the following disclaimer.
15 - Redistributions in binary form must reproduce the above copyright
16 notice, this list of conditions and the following disclaimer in the
17 documentation and/or other materials provided with the distribution.
18 - Neither the name of Ad-Mail, Inc nor the
19 names of its contributors may be used to endorse or promote products
20 derived from this software without specific prior written permission.
21
22 THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY
23 EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY
26 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
27 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
29 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
31 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 -------
34 Summary
35 -------
36
37 Python package for reading/writing dBase III and VFP 6 tables and memos
38
39 Goals: programming style with databases
40 - table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])
41 - fielddesc examples: name C(30); age N(3,0); wisdom M; marriage D
42 - record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]
43 - record.field | record['field'] accesses the field
44
45 NOTE: Of the VFP data types, auto-increment and null settings are not implemented.
46
47 Example:
48
49 Create a test table:
50 table = dbf.Table('temptable', 'name C(30); age N(3,0); birth D')
51
52 Populate it:
53 for datum in (
54 ('John Doe', 31, dbf.Date(1979, 9,13)),
55 ('Ethan Furman', 102, dbf.Date(1909, 4, 1)),
56 ('Jane Smith', 57, dbf.Date(1954, 7, 2)),
57 ):
58 table.append(datum)
59
60 Export to csv:
61 table.export(filename='filename', header=False)
62
63 Iterate over it:
64 for record in table:
65 print "%s was born on %s, so s/he is %d years of age" % (record.name, record.birth, record.date)
66
67 Create a new table from a csv file:
68 table = dbf.from_csv('filename.csv') # this has field names of f0, f1, f2, etc
69 or
70 table = dbf.from_csv('filename.csv', field_names="name age birth".split())
71
72 Sort it:
73 name_index = table.create_index(lambda rec: rec.name)
74 for record in name_index:
75 print record.name
76
77 Primitive SQL (work in progress):
78 records = table.sql("select * where name[0] == 'J'")
79 for rec in records:
80 print rec
81 print
82
83
84 """
85 __docformat__ = 'epytext'
86 version = (0, 88, 22)
87
88 __all__ = (
89 'Table', 'List', 'Date', 'DateTime', 'Time',
90 'DbfError', 'DataOverflow', 'FieldMissing', 'NonUnicode',
91 'DbfWarning', 'Eof', 'Bof', 'DoNotIndex',
92 )
93
94
95 import codecs
96 import csv
97 import datetime
98 import locale
99 import os
100 import struct
101 import sys
102 import time
103 import unicodedata
104 import weakref
105
106 from array import array
107 from bisect import bisect_left, bisect_right
108 from decimal import Decimal
109 from math import floor
110 from shutil import copyfileobj
111
112 __metaclass__ = type
113
114 input_decoding = locale.getdefaultlocale()[1]
115 default_codepage = input_decoding
116 return_ascii = False
117 temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TEMP") or ""
118
119 default_type = 'db3'
120 sql_user_functions = {}
121
122
123 if sys.version_info[:2] < (2, 6):
126 "Emulate PyProperty_Type() in Objects/descrobject.c"
127
128 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
129 self.fget = fget
130 self.fset = fset
131 self.fdel = fdel
132 self.__doc__ = doc or fget.__doc__
134 self.fget = func
135 if not self.__doc__:
136 self.__doc__ = fget.__doc__
137 - def __get__(self, obj, objtype=None):
138 if obj is None:
139 return self
140 if self.fget is None:
141 raise AttributeError("unreadable attribute")
142 return self.fget(obj)
144 if self.fset is None:
145 raise AttributeError("can't set attribute")
146 self.fset(obj, value)
148 if self.fdel is None:
149 raise AttributeError("can't delete attribute")
150 self.fdel(obj)
152 self.fset = func
153 return self
155 self.fdel = func
156 return self
157
161 "Fatal errors elicit this response."
162 pass
164 "Data too large for field"
169 "Field does not exist in table"
174 "Data for table not in unicode"
178 "Normal operations elicit this response"
179 -class Eof(DbfWarning, StopIteration):
180 "End of file reached"
181 message = 'End of file reached'
184 -class Bof(DbfWarning, StopIteration):
185 "Beginning of file reached"
186 message = 'Beginning of file reached'
190 "Returned by indexing functions to suppress a record from becoming part of the index"
191 message = 'Not indexing record'
194
197 "adds null capable datetime.date constructs"
198 __slots__ = ['_date']
199 - def __new__(cls, year=None, month=0, day=0):
200 """date should be either a datetime.date or date/month/day should all be appropriate integers"""
201 nd = object.__new__(cls)
202 nd._date = False
203 if type(year) == datetime.date:
204 nd._date = year
205 elif type(year) == Date:
206 nd._date = year._date
207 elif year == 'no date':
208 pass
209 elif year is not None:
210 nd._date = datetime.date(year, month, day)
211 return nd
213 if yo and type(other) == datetime.timedelta:
214 return Date(yo._date + other)
215 else:
216 return NotImplemented
218 if yo:
219 if type(other) == datetime.date:
220 return yo._date == other
221 elif type(other) == Date:
222 if other:
223 return yo._date == other._date
224 return False
225 else:
226 if type(other) == datetime.date:
227 return False
228 elif type(other) == Date:
229 if other:
230 return False
231 return True
232 return NotImplemented
234 if yo:
235 attribute = yo._date.__getattribute__(name)
236 return attribute
237 else:
238 raise AttributeError('null Date object has no attribute %s' % name)
240 if yo:
241 if type(other) == datetime.date:
242 return yo._date >= other
243 elif type(other) == Date:
244 if other:
245 return yo._date >= other._date
246 return False
247 else:
248 if type(other) == datetime.date:
249 return False
250 elif type(other) == Date:
251 if other:
252 return False
253 return True
254 return NotImplemented
256 if yo:
257 if type(other) == datetime.date:
258 return yo._date > other
259 elif type(other) == Date:
260 if other:
261 return yo._date > other._date
262 return True
263 else:
264 if type(other) == datetime.date:
265 return False
266 elif type(other) == Date:
267 if other:
268 return False
269 return False
270 return NotImplemented
274 if yo:
275 if type(other) == datetime.date:
276 return yo._date <= other
277 elif type(other) == Date:
278 if other:
279 return yo._date <= other._date
280 return False
281 else:
282 if type(other) == datetime.date:
283 return True
284 elif type(other) == Date:
285 if other:
286 return True
287 return True
288 return NotImplemented
290 if yo:
291 if type(other) == datetime.date:
292 return yo._date < other
293 elif type(other) == Date:
294 if other:
295 return yo._date < other._date
296 return False
297 else:
298 if type(other) == datetime.date:
299 return True
300 elif type(other) == Date:
301 if other:
302 return True
303 return False
304 return NotImplemented
306 if yo:
307 if type(other) == datetime.date:
308 return yo._date != other
309 elif type(other) == Date:
310 if other:
311 return yo._date != other._date
312 return True
313 else:
314 if type(other) == datetime.date:
315 return True
316 elif type(other) == Date:
317 if other:
318 return True
319 return False
320 return NotImplemented
322 if yo._date:
323 return True
324 return False
325 __radd__ = __add__
336 if yo:
337 return "Date(%d, %d, %d)" % yo.timetuple()[:3]
338 else:
339 return "Date()"
341 if yo:
342 return yo.isoformat()
343 return "no date"
354 if yo:
355 return yo._date
356 return None
357 @classmethod
362 @classmethod
365 @classmethod
367 if yyyymmdd in ('', ' ','no date'):
368 return cls()
369 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
371 if yo:
372 return yo._date.strftime(format)
373 return '<no date>'
374 @classmethod
378 if yo:
379 return "%04d%02d%02d" % yo.timetuple()[:3]
380 else:
381 return ' '
382 Date.max = Date(datetime.date.max)
383 Date.min = Date(datetime.date.min)
385 "adds null capable datetime.datetime constructs"
386 __slots__ = ['_datetime']
387 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsec=0):
420 if yo:
421 attribute = yo._datetime.__getattribute__(name)
422 return attribute
423 else:
424 raise AttributeError('null DateTime object has no attribute %s' % name)
508 if yo._datetime is not False:
509 return True
510 return False
511 __radd__ = __add__
522 if yo:
523 return "DateTime(%d, %d, %d, %d, %d, %d, %d, %d, %d)" % yo._datetime.timetuple()[:]
524 else:
525 return "DateTime()"
527 if yo:
528 return yo.isoformat()
529 return "no datetime"
539 @classmethod
545 if yo:
546 return Date(yo.year, yo.month, yo.day)
547 return Date()
549 if yo:
550 return yo._datetime
551 return None
552 @classmethod
558 @classmethod
561 @classmethod
565 if yo:
566 return Time(yo.hour, yo.minute, yo.second, yo.microsecond)
567 return Time()
568 @classmethod
571 @classmethod
574 DateTime.max = DateTime(datetime.datetime.max)
575 DateTime.min = DateTime(datetime.datetime.min)
577 "adds null capable datetime.time constructs"
578 __slots__ = ['_time']
579 - def __new__(cls, hour=None, minute=0, second=0, microsec=0):
591 if yo and type(other) == datetime.timedelta:
592 return Time(yo._time + other)
593 else:
594 return NotImplemented
596 if yo:
597 if type(other) == datetime.time:
598 return yo._time == other
599 elif type(other) == Time:
600 if other:
601 return yo._time == other._time
602 return False
603 else:
604 if type(other) == datetime.time:
605 return False
606 elif type(other) == Time:
607 if other:
608 return False
609 return True
610 return NotImplemented
612 if yo:
613 attribute = yo._time.__getattribute__(name)
614 return attribute
615 else:
616 raise AttributeError('null Time object has no attribute %s' % name)
618 if yo:
619 if type(other) == datetime.time:
620 return yo._time >= other
621 elif type(other) == Time:
622 if other:
623 return yo._time >= other._time
624 return False
625 else:
626 if type(other) == datetime.time:
627 return False
628 elif type(other) == Time:
629 if other:
630 return False
631 return True
632 return NotImplemented
634 if yo:
635 if type(other) == datetime.time:
636 return yo._time > other
637 elif type(other) == DateTime:
638 if other:
639 return yo._time > other._time
640 return True
641 else:
642 if type(other) == datetime.time:
643 return False
644 elif type(other) == Time:
645 if other:
646 return False
647 return False
648 return NotImplemented
652 if yo:
653 if type(other) == datetime.time:
654 return yo._time <= other
655 elif type(other) == Time:
656 if other:
657 return yo._time <= other._time
658 return False
659 else:
660 if type(other) == datetime.time:
661 return True
662 elif type(other) == Time:
663 if other:
664 return True
665 return True
666 return NotImplemented
668 if yo:
669 if type(other) == datetime.time:
670 return yo._time < other
671 elif type(other) == Time:
672 if other:
673 return yo._time < other._time
674 return False
675 else:
676 if type(other) == datetime.time:
677 return True
678 elif type(other) == Time:
679 if other:
680 return True
681 return False
682 return NotImplemented
684 if yo:
685 if type(other) == datetime.time:
686 return yo._time != other
687 elif type(other) == Time:
688 if other:
689 return yo._time != other._time
690 return True
691 else:
692 if type(other) == datetime.time:
693 return True
694 elif type(other) == Time:
695 if other:
696 return True
697 return False
698 return NotImplemented
700 if yo._time is not False:
701 return True
702 return False
703 __radd__ = __add__
714 if yo:
715 return "Time(%d, %d, %d, %d)" % (yo.hour, yo.minute, yo.second, yo.microsecond)
716 else:
717 return "Time()"
719 if yo:
720 return yo.isoformat()
721 return "no time"
731 Time.max = Time(datetime.time.max)
732 Time.min = Time(datetime.time.min)
735 "return type for Logical fields; implements boolean algebra"
736 _need_init = True
738 "OR (disjunction): x | y => True iff at least one of x, y is True"
739 if not isinstance(y, (x.__class__, bool, type(None))):
740 return NotImplemented
741 if x.value is None or y == None:
742 return x.unknown
743 elif x.value is True or y == True:
744 return x.true
745 return x.false
747 "IMP (material implication) x >> y => False iff x == True and y == False"
748 if not isinstance(y, (x.__class__, bool, type(None))):
749 return NotImplemented
750 if x.value is None or y == None:
751 return x.unknown
752 elif y == False and x.value is True:
753 return x.false
754 return x.true
756 "IMP (material implication) x >> y => False iff x = True and y = False"
757 if not isinstance(x, (y.__class__, bool, type(None))):
758 return NotImplemented
759 if x == None or y.value is None:
760 return y.unknown
761 elif x == True and y.value is False:
762 return y.false
763 return y.true
765 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Unknown if x is False"
766 if not isinstance(y, (x.__class__, bool, type(None))):
767 return NotImplemented
768 if x.value is True and y == True:
769 return x.true
770 if x.value is True and y == False:
771 return x.false
772 return x.unknown
774 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Unknown if y is False"
775 if not isinstance(x, (y.__class__, bool, type(None))):
776 return NotImplemented
777 if x == True and y.value is True:
778 return y.true
779 if x == True and y.value is False:
780 return y.false
781 return y.unknown
783 "NAND (negative AND) x.D(y): False iff x and y are both True"
784 if not isinstance(y, (x.__class__, bool, type(None))):
785 return NotImplemented
786 if x.value is None or y == None:
787 return x.unknown
788 elif x.value is True and y == True:
789 return x.false
790 return x.true
792 "EQV (equivalence) x.E(y): True iff x and y are the same"
793 if not isinstance(y, (x.__class__, bool, type(None))):
794 return NotImplemented
795 if x.value is None or y == None:
796 return x.unknown
797 elif y == True:
798 return (x.false, x.true)[x]
799 elif y == False:
800 return (x.true, x.false)[x]
802 "XOR (parity) x ^ y: True iff only one of x,y is True"
803 if not isinstance(y, (x.__class__, bool, type(None))):
804 return NotImplemented
805 if x.value is None or y == None:
806 return x.unknown
807 elif y == True:
808 return (x.true, x.false)[x]
809 elif y == False:
810 return (x.false, x.true)[x]
812 "AND (conjunction) x & y: True iff both x, y are True"
813 if not isinstance(y, (x.__class__, bool, type(None))):
814 return NotImplemented
815 if x.value is None or y == None:
816 return x.unknown
817 elif y == True:
818 return (x.false, x.true)[x]
819 elif y == False:
820 return x.false
822 "NEG (negation) -x: True iff x = False"
823 if x is x.true:
824 return x.false
825 elif x is x.false:
826 return x.true
827 else:
828 return x.unknown
829 @classmethod
843 if value is None:
844 return cls.unknown
845 elif isinstance(value, (str, unicode)):
846 if value.lower() in ('t','true','y','yes','on'):
847 return cls.true
848 elif value.lower() in ('f','false','n','no','off'):
849 return cls.false
850 elif value.lower() in ('?','unknown','null','none',' '):
851 return cls.unknown
852 else:
853 raise ValueError('unknown value for Logical: %s' % value)
854 else:
855 return (cls.false, cls.true)[bool(value)]
857 if isinstance(y, (bool, type(None))):
858 return x.__class__(x.value == y)
859 if isinstance(y, x.__class__):
860 return x.__class__(x.value == y.value)
861 return NotImplemented
865 if x.value is False:
866 return 0
867 if x.value is True:
868 return 1
869 if x.value is None:
870 return 2
872 if isinstance(y, (bool, type(None))):
873 return x.__class__(x.value != y)
874 if isinstance(y, x.__class__):
875 return x.__class__(x.value != y.value)
876 return NotImplemented
878 return x.value == True
880 return "Logical(%r)" % x.string
883 __add__ = A
884 __and__ = K
885 __mul__ = K
886 __neg__ = N
887 __or__ = A
888 __radd__ = A
889 __rand__ = K
890 __rshift__ = None
891 __rmul__ = K
892 __ror__ = A
893 __rrshift__ = None
894 __rxor__ = J
895 __xor__ = J
896 if hasattr(Logical, '_need_init'):
897 Logical.true = true = object.__new__(Logical)
898 true.value = True
899 true.string = 'T'
900 Logical.false = false = object.__new__(Logical)
901 false.value = False
902 false.string = 'F'
903 Logical.unknown = unknown = object.__new__(Logical)
904 unknown.value = None
905 unknown.string = '?'
906 Logical.set_implication('material')
907 del Logical._need_init
911 """Provides routines to extract and save data within the fields of a dbf record."""
912 __slots__ = ['_recnum', '_layout', '_data', '_dirty', '__weakref__']
914 """calls appropriate routine to fetch value stored in field from array
915 @param record_data: the data portion of the record
916 @type record_data: array of characters
917 @param fielddef: description of the field definition
918 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags'
919 @returns: python data stored in field"""
920
921 field_type = fielddef['type']
922 classtype = yo._layout.fieldtypes[field_type]['Class']
923 retrieve = yo._layout.fieldtypes[field_type]['Retrieve']
924 if classtype is not None:
925 datum = retrieve(record_data, fielddef, yo._layout.memo, classtype)
926 else:
927 datum = retrieve(record_data, fielddef, yo._layout.memo)
928 if field_type in yo._layout.character_fields:
929 datum = yo._layout.decoder(datum)[0]
930 if yo._layout.return_ascii:
931 try:
932 datum = yo._layout.output_encoder(datum)[0]
933 except UnicodeEncodeError:
934 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore')
935 return datum
937 "calls appropriate routine to convert value to ascii bytes, and save it in record"
938 field_type = fielddef['type']
939 update = yo._layout.fieldtypes[field_type]['Update']
940 if field_type in yo._layout.character_fields:
941 if not isinstance(value, unicode):
942 if yo._layout.input_decoder is None:
943 raise NonUnicode("String not in unicode format, no default encoding specified")
944 value = yo._layout.input_decoder(value)[0]
945 value = yo._layout.encoder(value)[0]
946 bytes = array('c', update(value, fielddef, yo._layout.memo))
947 size = fielddef['length']
948 if len(bytes) > size:
949 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size))
950 blank = array('c', ' ' * size)
951 start = fielddef['start']
952 end = start + size
953 blank[:len(bytes)] = bytes[:]
954 yo._data[start:end] = blank[:]
955 yo._dirty = True
974 if name[0:2] == '__' and name[-2:] == '__':
975 raise AttributeError, 'Method %s is not implemented.' % name
976 elif name == 'record_number':
977 return yo._recnum
978 elif name == 'delete_flag':
979 return yo._data[0] != ' '
980 elif not name in yo._layout.fields:
981 raise FieldMissing(name)
982 try:
983 fielddef = yo._layout[name]
984 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef)
985 return value
986 except DbfError, error:
987 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message)
988 raise
1005 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
1044 if type(name) == str:
1045 yo.__setattr__(name, value)
1046 elif type(name) in (int, long):
1047 yo.__setattr__(yo._layout.fields[name], value)
1048 elif type(name) == slice:
1049 sequence = []
1050 for field in yo._layout.fields[name]:
1051 sequence.append(field)
1052 if len(sequence) != len(value):
1053 raise DbfError("length of slices not equal")
1054 for field, val in zip(sequence, value):
1055 yo[field] = val
1056 else:
1057 raise TypeError("%s is not a field name" % name)
1059 result = []
1060 for seq, field in enumerate(yo.field_names):
1061 result.append("%3d - %-10s: %s" % (seq, field, yo[field]))
1062 return '\n'.join(result)
1064 return yo._data.tostring()
1066 "creates a blank record data chunk"
1067 layout = yo._layout
1068 ondisk = layout.ondisk
1069 layout.ondisk = False
1070 yo._data = array('c', ' ' * layout.header.record_length)
1071 layout.memofields = []
1072 for field in layout.fields:
1073 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']())
1074 if layout[field]['type'] in layout.memotypes:
1075 layout.memofields.append(field)
1076 layout.blankrecord = yo._data[:]
1077 layout.ondisk = ondisk
1079 "marks record as deleted"
1080 yo._data[0] = '*'
1081 yo._dirty = True
1082 return yo
1083 @property
1088 "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True"
1089 old_data = yo._data[:]
1090 try:
1091 for key in dictionary:
1092 if not key in yo.field_names:
1093 if drop:
1094 continue
1095 raise FieldMissing(key)
1096 yo.__setattr__(key, dictionary[key])
1097 except:
1098 yo._data[:] = old_data
1099 raise
1100 return yo
1101 @property
1103 "marked for deletion?"
1104 return yo._data[0] == '*'
1113 @property
1115 "physical record number"
1116 return yo._recnum
1117 @property
1119 table = yo._layout.table()
1120 if table is None:
1121 raise DbfError("table is no longer available")
1122 return table
1124 for dbfindex in yo._layout.table()._indexen:
1125 dbfindex(yo)
1127 "blanks record"
1128 if keep_fields is None:
1129 keep_fields = []
1130 keep = {}
1131 for field in keep_fields:
1132 keep[field] = yo[field]
1133 if yo._layout.blankrecord == None:
1134 yo._createBlankRecord()
1135 yo._data[:] = yo._layout.blankrecord[:]
1136 for field in keep_fields:
1137 yo[field] = keep[field]
1138 yo._dirty = True
1139 return yo
1141 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty."
1142 keys = yo._layout.fields
1143 if blank:
1144 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys]
1145 else:
1146 values = [yo[field] for field in keys]
1147 return dict(zip(keys, values))
1149 "marks record as active"
1150 yo._data[0] = ' '
1151 yo._dirty = True
1152 return yo
1162 """Provides access to memo fields as dictionaries
1163 must override _init, _get_memo, and _put_memo to
1164 store memo contents to disk"""
1166 "initialize disk file usage"
1168 "retrieve memo contents from disk"
1170 "store memo contents to disk"
1172 ""
1173 yo.meta = meta
1174 yo.memory = {}
1175 yo.nextmemo = 1
1176 yo._init()
1177 yo.meta.newmemofile = False
1179 "gets the memo in block"
1180 if yo.meta.ignorememos or not block:
1181 return ''
1182 if yo.meta.ondisk:
1183 return yo._get_memo(block)
1184 else:
1185 return yo.memory[block]
1187 "stores data in memo file, returns block number"
1188 if yo.meta.ignorememos or data == '':
1189 return 0
1190 if yo.meta.inmemory:
1191 thismemo = yo.nextmemo
1192 yo.nextmemo += 1
1193 yo.memory[thismemo] = data
1194 else:
1195 thismemo = yo._put_memo(data)
1196 return thismemo
1199 "dBase III specific"
1200 yo.meta.memo_size= 512
1201 yo.record_header_length = 2
1202 if yo.meta.ondisk and not yo.meta.ignorememos:
1203 if yo.meta.newmemofile:
1204 yo.meta.mfd = open(yo.meta.memoname, 'w+b')
1205 yo.meta.mfd.write(packLongInt(1) + '\x00' * 508)
1206 else:
1207 try:
1208 yo.meta.mfd = open(yo.meta.memoname, 'r+b')
1209 yo.meta.mfd.seek(0)
1210 yo.nextmemo = unpackLongInt(yo.meta.mfd.read(4))
1211 except:
1212 raise DbfError("memo file appears to be corrupt")
1214 block = int(block)
1215 yo.meta.mfd.seek(block * yo.meta.memo_size)
1216 eom = -1
1217 data = ''
1218 while eom == -1:
1219 newdata = yo.meta.mfd.read(yo.meta.memo_size)
1220 if not newdata:
1221 return data
1222 data += newdata
1223 eom = data.find('\x1a\x1a')
1224 return data[:eom].rstrip()
1226 data = data.rstrip()
1227 length = len(data) + yo.record_header_length
1228 blocks = length // yo.meta.memo_size
1229 if length % yo.meta.memo_size:
1230 blocks += 1
1231 thismemo = yo.nextmemo
1232 yo.nextmemo = thismemo + blocks
1233 yo.meta.mfd.seek(0)
1234 yo.meta.mfd.write(packLongInt(yo.nextmemo))
1235 yo.meta.mfd.seek(thismemo * yo.meta.memo_size)
1236 yo.meta.mfd.write(data)
1237 yo.meta.mfd.write('\x1a\x1a')
1238 double_check = yo._get_memo(thismemo)
1239 if len(double_check) != len(data):
1240 uhoh = open('dbf_memo_dump.err','wb')
1241 uhoh.write('thismemo: %d' % thismemo)
1242 uhoh.write('nextmemo: %d' % yo.nextmemo)
1243 uhoh.write('saved: %d bytes' % len(data))
1244 uhoh.write(data)
1245 uhoh.write('retrieved: %d bytes' % len(double_check))
1246 uhoh.write(double_check)
1247 uhoh.close()
1248 raise DbfError("unknown error: memo not saved")
1249 return thismemo
1252 "Visual Foxpro 6 specific"
1253 if yo.meta.ondisk and not yo.meta.ignorememos:
1254 yo.record_header_length = 8
1255 if yo.meta.newmemofile:
1256 if yo.meta.memo_size == 0:
1257 yo.meta.memo_size = 1
1258 elif 1 < yo.meta.memo_size < 33:
1259 yo.meta.memo_size *= 512
1260 yo.meta.mfd = open(yo.meta.memoname, 'w+b')
1261 nextmemo = 512 // yo.meta.memo_size
1262 if nextmemo * yo.meta.memo_size < 512:
1263 nextmemo += 1
1264 yo.nextmemo = nextmemo
1265 yo.meta.mfd.write(packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \
1266 packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504)
1267 else:
1268 try:
1269 yo.meta.mfd = open(yo.meta.memoname, 'r+b')
1270 yo.meta.mfd.seek(0)
1271 header = yo.meta.mfd.read(512)
1272 yo.nextmemo = unpackLongInt(header[:4], bigendian=True)
1273 yo.meta.memo_size = unpackShortInt(header[6:8], bigendian=True)
1274 except:
1275 raise DbfError("memo file appears to be corrupt")
1277 yo.meta.mfd.seek(block * yo.meta.memo_size)
1278 header = yo.meta.mfd.read(8)
1279 length = unpackLongInt(header[4:], bigendian=True)
1280 return yo.meta.mfd.read(length)
1282 data = data.rstrip()
1283 yo.meta.mfd.seek(0)
1284 thismemo = unpackLongInt(yo.meta.mfd.read(4), bigendian=True)
1285 yo.meta.mfd.seek(0)
1286 length = len(data) + yo.record_header_length
1287 blocks = length // yo.meta.memo_size
1288 if length % yo.meta.memo_size:
1289 blocks += 1
1290 yo.meta.mfd.write(packLongInt(thismemo+blocks, bigendian=True))
1291 yo.meta.mfd.seek(thismemo*yo.meta.memo_size)
1292 yo.meta.mfd.write('\x00\x00\x00\x01' + packLongInt(len(data), bigendian=True) + data)
1293 return thismemo
1303 csv.register_dialect('dbf', DbfCsv)
1304
1305
1306
1307 VFPTIME = 1721425
1310 "Returns a two-bye integer from the value, or raises DbfError"
1311
1312 if value > 65535:
1313 raise DateOverflow("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value)
1314 if bigendian:
1315 return struct.pack('>H', value)
1316 else:
1317 return struct.pack('<H', value)
1319 "Returns a four-bye integer from the value, or raises DbfError"
1320
1321 if value > 4294967295:
1322 raise DateOverflow("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value)
1323 if bigendian:
1324 return struct.pack('>L', value)
1325 else:
1326 return struct.pack('<L', value)
1328 "Returns a group of three bytes, in integer form, of the date"
1329 return "%c%c%c" % (date.year-1900, date.month, date.day)
1331 "Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes"
1332 if len(string) > 10:
1333 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string)))
1334 return struct.pack('11s', string.upper())
1336 "Returns the value in the two-byte integer passed in"
1337 if bigendian:
1338 return struct.unpack('>H', bytes)[0]
1339 else:
1340 return struct.unpack('<H', bytes)[0]
1342 "Returns the value in the four-byte integer passed in"
1343 if bigendian:
1344 return int(struct.unpack('>L', bytes)[0])
1345 else:
1346 return int(struct.unpack('<L', bytes)[0])
1348 "Returns a Date() of the packed three-byte date passed in"
1349 year, month, day = struct.unpack('<BBB', bytestr)
1350 year += 1900
1351 return Date(year, month, day)
1353 "Returns a normal, lower-cased string from a null-padded byte string"
1354 field = struct.unpack('%ds' % len(chars), chars)[0]
1355 name = []
1356 for ch in field:
1357 if ch == '\x00':
1358 break
1359 name.append(ch.lower())
1360 return ''.join(name)
1362 """Returns boolean true or false; normal rules apply to non-string values; string values
1363 must be 'y','t', 'yes', or 'true' (case insensitive) to be True"""
1364 if type(value) == str:
1365 return bool(value.lower() in ['t', 'y', 'true', 'yes'])
1366 else:
1367 return bool(value)
1369 "called if a data type is not supported for that style of table"
1370 raise DbfError('field type is not supported.')
1372 "Returns the string in bytes with trailing white space removed"
1373 return typ(bytes.tostring().rstrip())
1375 "returns the string, truncating if string is longer than it's field"
1376 string = str(string)
1377 return string.rstrip()
1379 value = struct.unpack('<q', bytes)[0]
1380 return typ(("%de-4" % value).strip())
1382 currency = int(value * 10000)
1383 if not -9223372036854775808 < currency < 9223372036854775808:
1384 raise DataOverflow("value %s is out of bounds" % value)
1385 return struct.pack('<q', currency)
1387 "Returns the ascii coded date as a Date object"
1388 return Date.fromymd(bytes.tostring())
1390 "returns the Date or datetime.date object ascii-encoded (yyyymmdd)"
1391 if moment:
1392 return "%04d%02d%02d" % moment.timetuple()[:3]
1393 return ' '
1395 return float(struct.unpack('<d', bytes)[0])
1397 return struct.pack('<d', float(value))
1399 "Returns the binary number stored in bytes in little-endian format"
1400 if typ is None or typ == 'default':
1401 return struct.unpack('<i', bytes)[0]
1402 else:
1403 return typ(struct.unpack('<i', bytes)[0])
1405 "returns value in little-endian binary format"
1406 try:
1407 value = int(value)
1408 except Exception:
1409 raise DbfError("incompatible type: %s(%s)" % (type(value), value))
1410 if not -2147483648 < value < 2147483647:
1411 raise DataOverflow("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value)
1412 return struct.pack('<i', int(value))
1414 "Returns True if bytes is 't', 'T', 'y', or 'Y', None if '?', and False otherwise"
1415 bytes = bytes.tostring()
1416 if bytes == '?':
1417 return None
1418 return bytes in ['t','T','y','Y']
1420 "Returns 'T' if logical is True, 'F' otherwise"
1421 if type(logical) != bool:
1422 logical = convertToBool(logical)
1423 if type(logical) <> bool:
1424 raise DbfError('Value %s is not logical.' % logical)
1425 return logical and 'T' or 'F'
1427 "Returns the block of data from a memo file"
1428 stringval = bytes.tostring()
1429 if stringval.strip():
1430 block = int(stringval.strip())
1431 else:
1432 block = 0
1433 return memo.get_memo(block, fielddef)
1435 "Writes string as a memo, returns the block number it was saved into"
1436 block = memo.put_memo(string)
1437 if block == 0:
1438 block = ''
1439 return "%*s" % (fielddef['length'], block)
1441 "Returns the number stored in bytes as integer if field spec for decimals is 0, float otherwise"
1442 string = bytes.tostring()
1443 if string[0:1] == '*':
1444 return None
1445 if not string.strip():
1446 string = '0'
1447 if typ == 'default':
1448 if fielddef['decimals'] == 0:
1449 return int(string)
1450 else:
1451 return float(string)
1452 else:
1453 return typ(string.strip())
1455 "returns value as ascii representation, rounding decimal portion as necessary"
1456 try:
1457 value = float(value)
1458 except Exception:
1459 raise DbfError("incompatible type: %s(%s)" % (type(value), value))
1460 decimalsize = fielddef['decimals']
1461 if decimalsize:
1462 decimalsize += 1
1463 maxintegersize = fielddef['length']-decimalsize
1464 integersize = len("%.0f" % floor(value))
1465 if integersize > maxintegersize:
1466 raise DataOverflow('Integer portion too big')
1467 return "%*.*f" % (fielddef['length'], fielddef['decimals'], value)
1486 """sets the date/time stored in moment
1487 moment must have fields year, month, day, hour, minute, second, microsecond"""
1488 bytes = [0] * 8
1489 hour = moment.hour
1490 minute = moment.minute
1491 second = moment.second
1492 millisecond = moment.microsecond // 1000
1493 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond
1494 bytes[4:] = updateInteger(time)
1495 bytes[:4] = updateInteger(moment.toordinal() + VFPTIME)
1496 return ''.join(bytes)
1498 "Returns the block of data from a memo file"
1499 block = struct.unpack('<i', bytes)[0]
1500 return memo.get_memo(block, fielddef)
1502 "Writes string as a memo, returns the block number it was saved into"
1503 block = memo.put_memo(string)
1504 return struct.pack('<i', block)
1506 if format[1] != '(' or format[-1] != ')':
1507 raise DbfError("Format for Character field creation is C(n), not %s" % format)
1508 length = int(format[2:-1])
1509 if not 0 < length < 255:
1510 raise ValueError
1511 decimals = 0
1512 return length, decimals
1514 length = 8
1515 decimals = 0
1516 return length, decimals
1518 length = 1
1519 decimals = 0
1520 return length, decimals
1522 length = 10
1523 decimals = 0
1524 return length, decimals
1526 if format[1] != '(' or format[-1] != ')':
1527 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format)
1528 length, decimals = format[2:-1].split(',')
1529 length = int(length)
1530 decimals = int(decimals)
1531 if not 0 < length < 18:
1532 raise ValueError
1533 if decimals and not 0 < decimals <= length - 2:
1534 raise ValueError
1535 return length, decimals
1537 length = 8
1538 decimals = 0
1539 return length, decimals
1541 length = 8
1542 decimals = 8
1543 return length, decimals
1545 length = 8
1546 decimals = 0
1547 return length, decimals
1549 length = 4
1550 decimals = 0
1551 return length, decimals
1553 length = 4
1554 decimals = 0
1555 return length, decimals
1557 if format[1] != '(' or format[-1] != ')':
1558 raise DbfError("Format for Numeric field creation is N(n,n), not %s" % format)
1559 length, decimals = format[2:-1].split(',')
1560 length = int(length)
1561 decimals = int(decimals)
1562 if not 0 < length < 21:
1563 raise ValueError
1564 if decimals and not 0 < decimals <= length - 2:
1565 raise ValueError
1566 return length, decimals
1567
1570 """Provides a framework for dbf style tables."""
1571 _version = 'basic memory table'
1572 _versionabbv = 'dbf'
1573 _fieldtypes = {
1574 'D' : { 'Type':'Date', 'Init':addDate, 'Blank':Date.today, 'Retrieve':retrieveDate, 'Update':updateDate, 'Class':None},
1575 'L' : { 'Type':'Logical', 'Init':addLogical, 'Blank':bool, 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Class':None},
1576 'M' : { 'Type':'Memo', 'Init':addMemo, 'Blank':str, 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Class':None} }
1577 _memoext = ''
1578 _memotypes = tuple('M', )
1579 _memoClass = _DbfMemo
1580 _yesMemoMask = ''
1581 _noMemoMask = ''
1582 _fixed_fields = ('M','D','L')
1583 _variable_fields = tuple()
1584 _character_fields = tuple('M', )
1585 _decimal_fields = tuple()
1586 _numeric_fields = tuple()
1587 _currency_fields = tuple()
1588 _dbfTableHeader = array('c', '\x00' * 32)
1589 _dbfTableHeader[0] = '\x00'
1590 _dbfTableHeader[8:10] = array('c', packShortInt(33))
1591 _dbfTableHeader[10] = '\x01'
1592 _dbfTableHeader[29] = '\x00'
1593 _dbfTableHeader = _dbfTableHeader.tostring()
1594 _dbfTableHeaderExtra = ''
1595 _supported_tables = []
1596 _read_only = False
1597 _meta_only = False
1598 _use_deleted = True
1599 backup = False
1601 "implements the weakref structure for DbfLists"
1605 yo._lists = set([s for s in yo._lists if s() is not None])
1606 return (s() for s in yo._lists if s() is not None)
1608 yo._lists = set([s for s in yo._lists if s() is not None])
1609 return len(yo._lists)
1610 - def add(yo, new_list):
1611 yo._lists.add(weakref.ref(new_list))
1612 yo._lists = set([s for s in yo._lists if s() is not None])
1614 "implements the weakref structure for seperate indexes"
1618 yo._indexen = set([s for s in yo._indexen if s() is not None])
1619 return (s() for s in yo._indexen if s() is not None)
1621 yo._indexen = set([s for s in yo._indexen if s() is not None])
1622 return len(yo._indexen)
1623 - def add(yo, new_list):
1624 yo._indexen.add(weakref.ref(new_list))
1625 yo._indexen = set([s for s in yo._indexen if s() is not None])
1640 if len(data) != 32:
1641 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data))
1642 yo._data = array('c', data + '\x0d')
1644 "get/set code page of table"
1645 if cp is None:
1646 return yo._data[29]
1647 else:
1648 cp, sd, ld = _codepage_lookup(cp)
1649 yo._data[29] = cp
1650 return cp
1651 @property
1657 @data.setter
1659 if len(bytes) < 32:
1660 raise DbfError("length for data of %d is less than 32" % len(bytes))
1661 yo._data[:] = array('c', bytes)
1662 @property
1664 "extra dbf info (located after headers, before data records)"
1665 fieldblock = yo._data[32:]
1666 for i in range(len(fieldblock)//32+1):
1667 cr = i * 32
1668 if fieldblock[cr] == '\x0d':
1669 break
1670 else:
1671 raise DbfError("corrupt field structure")
1672 cr += 33
1673 return yo._data[cr:].tostring()
1674 @extra.setter
1676 fieldblock = yo._data[32:]
1677 for i in range(len(fieldblock)//32+1):
1678 cr = i * 32
1679 if fieldblock[cr] == '\x0d':
1680 break
1681 else:
1682 raise DbfError("corrupt field structure")
1683 cr += 33
1684 yo._data[cr:] = array('c', data)
1685 yo._data[8:10] = array('c', packShortInt(len(yo._data)))
1686 @property
1688 "number of fields (read-only)"
1689 fieldblock = yo._data[32:]
1690 for i in range(len(fieldblock)//32+1):
1691 cr = i * 32
1692 if fieldblock[cr] == '\x0d':
1693 break
1694 else:
1695 raise DbfError("corrupt field structure")
1696 return len(fieldblock[:cr]) // 32
1697 @property
1699 "field block structure"
1700 fieldblock = yo._data[32:]
1701 for i in range(len(fieldblock)//32+1):
1702 cr = i * 32
1703 if fieldblock[cr] == '\x0d':
1704 break
1705 else:
1706 raise DbfError("corrupt field structure")
1707 return fieldblock[:cr].tostring()
1708 @fields.setter
1710 fieldblock = yo._data[32:]
1711 for i in range(len(fieldblock)//32+1):
1712 cr = i * 32
1713 if fieldblock[cr] == '\x0d':
1714 break
1715 else:
1716 raise DbfError("corrupt field structure")
1717 cr += 32
1718 fieldlen = len(block)
1719 if fieldlen % 32 != 0:
1720 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
1721 yo._data[32:cr] = array('c', block)
1722 yo._data[8:10] = array('c', packShortInt(len(yo._data)))
1723 fieldlen = fieldlen // 32
1724 recordlen = 1
1725 for i in range(fieldlen):
1726 recordlen += ord(block[i*32+16])
1727 yo._data[10:12] = array('c', packShortInt(recordlen))
1728 @property
1730 "number of records (maximum 16,777,215)"
1731 return unpackLongInt(yo._data[4:8].tostring())
1732 @record_count.setter
1735 @property
1737 "length of a record (read_only) (max of 65,535)"
1738 return unpackShortInt(yo._data[10:12].tostring())
1739 @property
1741 "starting position of first record in file (must be within first 64K)"
1742 return unpackShortInt(yo._data[8:10].tostring())
1743 @start.setter
1746 @property
1748 "date of last table modification (read-only)"
1749 return unpackDate(yo._data[1:4].tostring())
1750 @property
1752 "dbf version"
1753 return yo._data[0]
1754 @version.setter
1758 "implements the weakref table for records"
1760 yo._meta = meta
1761 yo._weakref_list = [weakref.ref(lambda x: None)] * count
1763 maybe = yo._weakref_list[index]()
1764 if maybe is None:
1765 if index < 0:
1766 index += yo._meta.header.record_count
1767 size = yo._meta.header.record_length
1768 location = index * size + yo._meta.header.start
1769 yo._meta.dfd.seek(location)
1770 if yo._meta.dfd.tell() != location:
1771 raise ValueError("unable to seek to offset %d in file" % location)
1772 bytes = yo._meta.dfd.read(size)
1773 if not bytes:
1774 raise ValueError("unable to read record data from %s at location %d" % (yo._meta.filename, location))
1775 maybe = _DbfRecord(recnum=index, layout=yo._meta, kamikaze=bytes, _fromdisk=True)
1776 yo._weakref_list[index] = weakref.ref(maybe)
1777 return maybe
1779 yo._weakref_list.append(weakref.ref(record))
1781 yo._weakref_list[:] = []
1783 return yo._weakref_list.pop()
1785 "returns records using current index"
1787 yo._table = table
1788 yo._index = -1
1789 yo._more_records = True
1793 while yo._more_records:
1794 yo._index += 1
1795 if yo._index >= len(yo._table):
1796 yo._more_records = False
1797 continue
1798 record = yo._table[yo._index]
1799 if not yo._table.use_deleted and record.has_been_deleted:
1800 continue
1801 return record
1802 else:
1803 raise StopIteration
1805 "constructs fieldblock for disk table"
1806 fieldblock = array('c', '')
1807 memo = False
1808 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask))
1809 for field in yo._meta.fields:
1810 if yo._meta.fields.count(field) > 1:
1811 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)")
1812 fielddef = array('c', '\x00' * 32)
1813 fielddef[:11] = array('c', packStr(field))
1814 fielddef[11] = yo._meta[field]['type']
1815 fielddef[12:16] = array('c', packLongInt(yo._meta[field]['start']))
1816 fielddef[16] = chr(yo._meta[field]['length'])
1817 fielddef[17] = chr(yo._meta[field]['decimals'])
1818 fielddef[18] = chr(yo._meta[field]['flags'])
1819 fieldblock.extend(fielddef)
1820 if yo._meta[field]['type'] in yo._meta.memotypes:
1821 memo = True
1822 yo._meta.header.fields = fieldblock.tostring()
1823 if memo:
1824 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask))
1825 if yo._meta.memo is None:
1826 yo._meta.memo = yo._memoClass(yo._meta)
1828 "dBase III specific"
1829 if yo._meta.header.version == '\x83':
1830 try:
1831 yo._meta.memo = yo._memoClass(yo._meta)
1832 except:
1833 yo._meta.dfd.close()
1834 yo._meta.dfd = None
1835 raise
1836 if not yo._meta.ignorememos:
1837 for field in yo._meta.fields:
1838 if yo._meta[field]['type'] in yo._memotypes:
1839 if yo._meta.header.version != '\x83':
1840 yo._meta.dfd.close()
1841 yo._meta.dfd = None
1842 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
1843 elif not os.path.exists(yo._meta.memoname):
1844 yo._meta.dfd.close()
1845 yo._meta.dfd = None
1846 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1847 break
1849 "builds the FieldList of names, types, and descriptions from the disk file"
1850 yo._meta.fields[:] = []
1851 offset = 1
1852 fieldsdef = yo._meta.header.fields
1853 if len(fieldsdef) % 32 != 0:
1854 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
1855 if len(fieldsdef) // 32 != yo.field_count:
1856 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
1857 for i in range(yo.field_count):
1858 fieldblock = fieldsdef[i*32:(i+1)*32]
1859 name = unpackStr(fieldblock[:11])
1860 type = fieldblock[11]
1861 if not type in yo._meta.fieldtypes:
1862 raise DbfError("Unknown field type: %s" % type)
1863 start = offset
1864 length = ord(fieldblock[16])
1865 offset += length
1866 end = start + length
1867 decimals = ord(fieldblock[17])
1868 flags = ord(fieldblock[18])
1869 if name in yo._meta.fields:
1870 raise DbfError('Duplicate field name found: %s' % name)
1871 yo._meta.fields.append(name)
1872 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1874 "Returns field information Name Type(Length[,Decimals])"
1875 name = yo._meta.fields[i]
1876 type = yo._meta[name]['type']
1877 length = yo._meta[name]['length']
1878 decimals = yo._meta[name]['decimals']
1879 if type in yo._decimal_fields:
1880 description = "%s %s(%d,%d)" % (name, type, length, decimals)
1881 elif type in yo._fixed_fields:
1882 description = "%s %s" % (name, type)
1883 else:
1884 description = "%s %s(%d)" % (name, type, length)
1885 return description
1887 "loads the records from disk to memory"
1888 if yo._meta_only:
1889 raise DbfError("%s has been closed, records are unavailable" % yo.filename)
1890 dfd = yo._meta.dfd
1891 header = yo._meta.header
1892 dfd.seek(header.start)
1893 allrecords = dfd.read()
1894 dfd.seek(0)
1895 length = header.record_length
1896 for i in range(header.record_count):
1897 record_data = allrecords[length*i:length*i+length]
1898 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True))
1899 dfd.seek(0)
1901 if specs is None:
1902 specs = yo.field_names
1903 elif isinstance(specs, str):
1904 specs = specs.split(sep)
1905 else:
1906 specs = list(specs)
1907 specs = [s.strip() for s in specs]
1908 return specs
1910 "synchronizes the disk file with current data"
1911 if yo._meta.inmemory:
1912 return
1913 fd = yo._meta.dfd
1914 fd.seek(0)
1915 fd.write(yo._meta.header.data)
1916 if not headeronly:
1917 for record in yo._table:
1918 record._update_disk()
1919 fd.flush()
1920 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length)
1921 if 'db3' in yo._versionabbv:
1922 fd.seek(0, os.SEEK_END)
1923 fd.write('\x1a')
1924 fd.flush()
1925 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length + 1)
1926
1934 if name in ('_table'):
1935 if yo._meta.ondisk:
1936 yo._table = yo._Table(len(yo), yo._meta)
1937 else:
1938 yo._table = []
1939 yo._loadtable()
1940 return object.__getattribute__(yo, name)
1942 if type(value) == int:
1943 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count:
1944 raise IndexError("Record %d is not in table." % value)
1945 return yo._table[value]
1946 elif type(value) == slice:
1947 sequence = List(desc='%s --> %s' % (yo.filename, value), field_names=yo.field_names)
1948 yo._dbflists.add(sequence)
1949 for index in range(len(yo))[value]:
1950 record = yo._table[index]
1951 if yo.use_deleted is True or not record.has_been_deleted:
1952 sequence.append(record)
1953 return sequence
1954 else:
1955 raise TypeError('type <%s> not valid for indexing' % type(value))
1956 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False,
1957 read_only=False, keep_memos=False, meta_only=False, codepage=None,
1958 numbers='default', strings=str, currency=Decimal):
1959 """open/create dbf file
1960 filename should include path if needed
1961 field_specs can be either a ;-delimited string or a list of strings
1962 memo_size is always 512 for db3 memos
1963 ignore_memos is useful if the memo file is missing or corrupt
1964 read_only will load records into memory, then close the disk file
1965 keep_memos will also load any memo fields into memory
1966 meta_only will ignore all records, keeping only basic table information
1967 codepage will override whatever is set in the table itself"""
1968 if filename[0] == filename[-1] == ':':
1969 if field_specs is None:
1970 raise DbfError("field list must be specified for memory tables")
1971 elif type(yo) is DbfTable:
1972 raise DbfError("only memory tables supported")
1973 yo._dbflists = yo._DbfLists()
1974 yo._indexen = yo._Indexen()
1975 yo._meta = meta = yo._MetaData()
1976 for datatypes, classtype in (
1977 (yo._character_fields, strings),
1978 (yo._numeric_fields, numbers),
1979 (yo._currency_fields, currency),
1980 ):
1981 for datatype in datatypes:
1982 yo._fieldtypes[datatype]['Class'] = classtype
1983 meta.numbers = numbers
1984 meta.strings = strings
1985 meta.currency = currency
1986 meta.table = weakref.ref(yo)
1987 meta.filename = filename
1988 meta.fields = []
1989 meta.fieldtypes = yo._fieldtypes
1990 meta.fixed_fields = yo._fixed_fields
1991 meta.variable_fields = yo._variable_fields
1992 meta.character_fields = yo._character_fields
1993 meta.decimal_fields = yo._decimal_fields
1994 meta.numeric_fields = yo._numeric_fields
1995 meta.memotypes = yo._memotypes
1996 meta.ignorememos = ignore_memos
1997 meta.memo_size = memo_size
1998 meta.input_decoder = codecs.getdecoder(input_decoding)
1999 meta.output_encoder = codecs.getencoder(input_decoding)
2000 meta.return_ascii = return_ascii
2001 meta.header = header = yo._TableHeader(yo._dbfTableHeader)
2002 header.extra = yo._dbfTableHeaderExtra
2003 header.data
2004 if filename[0] == filename[-1] == ':':
2005 yo._table = []
2006 meta.ondisk = False
2007 meta.inmemory = True
2008 meta.memoname = filename
2009 else:
2010 base, ext = os.path.splitext(filename)
2011 if ext == '':
2012 meta.filename = base + '.dbf'
2013 meta.memoname = base + yo._memoext
2014 meta.ondisk = True
2015 meta.inmemory = False
2016 if field_specs:
2017 if meta.ondisk:
2018 meta.dfd = open(meta.filename, 'w+b')
2019 meta.newmemofile = True
2020 yo.add_fields(field_specs)
2021 header.codepage(codepage or default_codepage)
2022 cp, sd, ld = _codepage_lookup(meta.header.codepage())
2023 meta.decoder = codecs.getdecoder(sd)
2024 meta.encoder = codecs.getencoder(sd)
2025 return
2026 try:
2027 dfd = meta.dfd = open(meta.filename, 'r+b')
2028 except IOError, e:
2029 raise DbfError(str(e))
2030 dfd.seek(0)
2031 meta.header = header = yo._TableHeader(dfd.read(32))
2032 if not header.version in yo._supported_tables:
2033 dfd.close()
2034 dfd = None
2035 raise DbfError(
2036 "%s does not support %s [%x]" %
2037 (yo._version,
2038 version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version),
2039 ord(meta.header.version)))
2040 cp, sd, ld = _codepage_lookup(meta.header.codepage())
2041 yo._meta.decoder = codecs.getdecoder(sd)
2042 yo._meta.encoder = codecs.getencoder(sd)
2043 fieldblock = dfd.read(header.start - 32)
2044 for i in range(len(fieldblock)//32+1):
2045 fieldend = i * 32
2046 if fieldblock[fieldend] == '\x0d':
2047 break
2048 else:
2049 raise DbfError("corrupt field structure in header")
2050 if len(fieldblock[:fieldend]) % 32 != 0:
2051 raise DbfError("corrupt field structure in header")
2052 header.fields = fieldblock[:fieldend]
2053 header.extra = fieldblock[fieldend+1:]
2054 yo._initializeFields()
2055 yo._checkMemoIntegrity()
2056 meta.current = -1
2057 if len(yo) > 0:
2058 meta.current = 0
2059 dfd.seek(0)
2060 if meta_only:
2061 yo.close(keep_table=False, keep_memos=False)
2062 elif read_only:
2063 yo.close(keep_table=True, keep_memos=keep_memos)
2064 if codepage is not None:
2065 cp, sd, ld = _codepage_lookup(codepage)
2066 yo._meta.decoder = codecs.getdecoder(sd)
2067 yo._meta.encoder = codecs.getencoder(sd)
2068
2076 if yo._read_only:
2077 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename
2078 elif yo._meta_only:
2079 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename
2080 else:
2081 return __name__ + ".Table('%s')" % yo._meta.filename
2083 if yo._read_only:
2084 status = "read-only"
2085 elif yo._meta_only:
2086 status = "meta-only"
2087 else:
2088 status = "read/write"
2089 str = """
2090 Table: %s
2091 Type: %s
2092 Codepage: %s
2093 Status: %s
2094 Last updated: %s
2095 Record count: %d
2096 Field count: %d
2097 Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version,
2098 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status,
2099 yo.last_update, len(yo), yo.field_count, yo.record_length)
2100 str += "\n --Fields--\n"
2101 for i in range(len(yo._meta.fields)):
2102 str += "%11d) %s\n" % (i, yo._fieldLayout(i))
2103 return str
2104 @property
2106 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
2107 @codepage.setter
2108 - def codepage(yo, cp):
2109 cp = code_pages[yo._meta.header.codepage(cp)][0]
2110 yo._meta.decoder = codecs.getdecoder(cp)
2111 yo._meta.encoder = codecs.getencoder(cp)
2112 yo._update_disk(headeronly=True)
2113 @property
2115 "the number of fields in the table"
2116 return yo._meta.header.field_count
2117 @property
2119 "a list of the fields in the table"
2120 return yo._meta.fields[:]
2121 @property
2123 "table's file name, including path (if specified on open)"
2124 return yo._meta.filename
2125 @property
2127 "date of last update"
2128 return yo._meta.header.update
2129 @property
2131 "table's memo name (if path included in filename on open)"
2132 return yo._meta.memoname
2133 @property
2135 "number of bytes in a record"
2136 return yo._meta.header.record_length
2137 @property
2139 "index number of the current record"
2140 return yo._meta.current
2141 @property
2145 @property
2147 "process or ignore deleted records"
2148 return yo._use_deleted
2149 @use_deleted.setter
2152 @property
2154 "returns the dbf type of the table"
2155 return yo._version
2157 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]]
2158 backup table is created with _backup appended to name
2159 then modifies current structure"""
2160 all_records = [record for record in yo]
2161 if yo:
2162 yo.create_backup()
2163 yo._meta.blankrecord = None
2164 meta = yo._meta
2165 offset = meta.header.record_length
2166 fields = yo._list_fields(field_specs, sep=';')
2167 for field in fields:
2168 try:
2169 name, format = field.split()
2170 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum():
2171 raise DbfError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name)
2172 name = name.lower()
2173 if name in meta.fields:
2174 raise DbfError("Field '%s' already exists" % name)
2175 field_type = format[0].upper()
2176 if len(name) > 10:
2177 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name)))
2178 if not field_type in meta.fieldtypes.keys():
2179 raise DbfError("Unknown field type: %s" % field_type)
2180 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format)
2181 except ValueError:
2182 raise DbfError("invalid field specifier: %s (multiple fields should be separated with ';'" % field)
2183 start = offset
2184 end = offset + length
2185 offset = end
2186 meta.fields.append(name)
2187 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0}
2188 if meta[name]['type'] in yo._memotypes and meta.memo is None:
2189 meta.memo = yo._memoClass(meta)
2190 for record in yo:
2191 record[name] = meta.fieldtypes[field_type]['Blank']()
2192 yo._buildHeaderFields()
2193 yo._update_disk()
2194 - def append(yo, kamikaze='', drop=False, multiple=1):
2195 "adds <multiple> blank records, and fills fields with dict/tuple values if present"
2196 if not yo.field_count:
2197 raise DbfError("No fields defined, cannot append")
2198 empty_table = len(yo) == 0
2199 dictdata = False
2200 tupledata = False
2201 if not isinstance(kamikaze, _DbfRecord):
2202 if isinstance(kamikaze, dict):
2203 dictdata = kamikaze
2204 kamikaze = ''
2205 elif isinstance(kamikaze, tuple):
2206 tupledata = kamikaze
2207 kamikaze = ''
2208 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze)
2209 yo._table.append(newrecord)
2210 yo._meta.header.record_count += 1
2211 try:
2212 if dictdata:
2213 newrecord.gather_fields(dictdata, drop=drop)
2214 elif tupledata:
2215 for index, item in enumerate(tupledata):
2216 newrecord[index] = item
2217 elif kamikaze == str:
2218 for field in yo._meta.memofields:
2219 newrecord[field] = ''
2220 elif kamikaze:
2221 for field in yo._meta.memofields:
2222 newrecord[field] = kamikaze[field]
2223 newrecord.write_record()
2224 except Exception:
2225 yo._table.pop()
2226 yo._meta.header.record_count = yo._meta.header.record_count - 1
2227 yo._update_disk()
2228 raise
2229 multiple -= 1
2230 if multiple:
2231 data = newrecord._data
2232 single = yo._meta.header.record_count
2233 total = single + multiple
2234 while single < total:
2235 multi_record = _DbfRecord(single, yo._meta, kamikaze=data)
2236 yo._table.append(multi_record)
2237 for field in yo._meta.memofields:
2238 multi_record[field] = newrecord[field]
2239 single += 1
2240 multi_record.write_record()
2241 yo._meta.header.record_count = total
2242 yo._meta.current = yo._meta.header.record_count - 1
2243 newrecord = multi_record
2244 yo._update_disk(headeronly=True)
2245 if empty_table:
2246 yo._meta.current = 0
2247 return newrecord
2248 - def bof(yo, _move=False):
2263 - def bottom(yo, get_record=False):
2264 """sets record pointer to bottom of table
2265 if get_record, seeks to and returns last (non-deleted) record
2266 DbfError if table is empty
2267 Bof if all records deleted and use_deleted is False"""
2268 yo._meta.current = yo._meta.header.record_count
2269 if get_record:
2270 try:
2271 return yo.prev()
2272 except Bof:
2273 yo._meta.current = yo._meta.header.record_count
2274 raise Eof()
2275 - def close(yo, keep_table=False, keep_memos=False):
2276 """closes disk files
2277 ensures table data is available if keep_table
2278 ensures memo data is available if keep_memos"""
2279 yo._meta.inmemory = True
2280 if keep_table:
2281 replacement_table = []
2282 for record in yo._table:
2283 replacement_table.append(record)
2284 yo._table = replacement_table
2285 else:
2286 if yo._meta.ondisk:
2287 yo._meta_only = True
2288 if yo._meta.mfd is not None:
2289 if not keep_memos:
2290 yo._meta.ignorememos = True
2291 else:
2292 memo_fields = []
2293 for field in yo.field_names:
2294 if yo.is_memotype(field):
2295 memo_fields.append(field)
2296 for record in yo:
2297 for field in memo_fields:
2298 record[field] = record[field]
2299 yo._meta.mfd.close()
2300 yo._meta.mfd = None
2301 if yo._meta.ondisk:
2302 yo._meta.dfd.close()
2303 yo._meta.dfd = None
2304 if keep_table:
2305 yo._read_only = True
2306 yo._meta.ondisk = False
2308 "creates a backup table -- ignored if memory table"
2309 if yo.filename[0] == yo.filename[-1] == ':':
2310 return
2311 if new_name is None:
2312 upper = yo.filename.isupper()
2313 name, ext = os.path.splitext(os.path.split(yo.filename)[1])
2314 extra = '_BACKUP' if upper else '_backup'
2315 new_name = os.path.join(temp_dir, name + extra + ext)
2316 else:
2317 overwrite = True
2318 if overwrite or not yo.backup:
2319 bkup = open(new_name, 'wb')
2320 try:
2321 yo._meta.dfd.seek(0)
2322 copyfileobj(yo._meta.dfd, bkup)
2323 yo.backup = new_name
2324 finally:
2325 bkup.close()
2329 "returns current logical record, or its index"
2330 if yo._meta.current < 0:
2331 raise Bof()
2332 elif yo._meta.current >= yo._meta.header.record_count:
2333 raise Eof()
2334 if index:
2335 return yo._meta.current
2336 return yo._table[yo._meta.current]
2338 """removes field(s) from the table
2339 creates backup files with _backup appended to the file name,
2340 then modifies current structure"""
2341 doomed = yo._list_fields(doomed)
2342 for victim in doomed:
2343 if victim not in yo._meta.fields:
2344 raise DbfError("field %s not in table -- delete aborted" % victim)
2345 all_records = [record for record in yo]
2346 yo.create_backup()
2347 for victim in doomed:
2348 yo._meta.fields.pop(yo._meta.fields.index(victim))
2349 start = yo._meta[victim]['start']
2350 end = yo._meta[victim]['end']
2351 for record in yo:
2352 record._data = record._data[:start] + record._data[end:]
2353 for field in yo._meta.fields:
2354 if yo._meta[field]['start'] == end:
2355 end = yo._meta[field]['end']
2356 yo._meta[field]['start'] = start
2357 yo._meta[field]['end'] = start + yo._meta[field]['length']
2358 start = yo._meta[field]['end']
2359 yo._buildHeaderFields()
2360 yo._update_disk()
2361 - def eof(yo, _move=False):
2376 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
2377 """writes the table using CSV or tab-delimited format, using the filename
2378 given if specified, otherwise the table name"""
2379 if filename is not None:
2380 path, filename = os.path.split(filename)
2381 else:
2382 path, filename = os.path.split(yo.filename)
2383 filename = os.path.join(path, filename)
2384 field_specs = yo._list_fields(field_specs)
2385 if records is None:
2386 records = yo
2387 format = format.lower()
2388 if format not in ('csv', 'tab', 'fixed'):
2389 raise DbfError("export format: csv, tab, or fixed -- not %s" % format)
2390 if format == 'fixed':
2391 format = 'txt'
2392 base, ext = os.path.splitext(filename)
2393 if ext.lower() in ('', '.dbf'):
2394 filename = base + "." + format[:3]
2395 fd = open(filename, 'w')
2396 try:
2397 if format == 'csv':
2398 csvfile = csv.writer(fd, dialect='dbf')
2399 if header:
2400 csvfile.writerow(field_specs)
2401 for record in records:
2402 fields = []
2403 for fieldname in field_specs:
2404 fields.append(record[fieldname])
2405 csvfile.writerow(fields)
2406 elif format == 'tab':
2407 if header:
2408 fd.write('\t'.join(field_specs) + '\n')
2409 for record in records:
2410 fields = []
2411 for fieldname in field_specs:
2412 fields.append(str(record[fieldname]))
2413 fd.write('\t'.join(fields) + '\n')
2414 else:
2415 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w')
2416 header.write("%-15s Size\n" % "Field Name")
2417 header.write("%-15s ----\n" % ("-" * 15))
2418 sizes = []
2419 for field in field_specs:
2420 size = yo.size(field)[0]
2421 sizes.append(size)
2422 header.write("%-15s %3d\n" % (field, size))
2423 header.write('\nTotal Records in file: %d\n' % len(records))
2424 header.close()
2425 for record in records:
2426 fields = []
2427 for i, field_name in enumerate(field_specs):
2428 fields.append("%-*s" % (sizes[i], record[field_name]))
2429 fd.write(''.join(fields) + '\n')
2430 finally:
2431 fd.close()
2432 fd = None
2433 return len(records)
2434 - def find(yo, command):
2435 "uses exec to perform queries on the table"
2436 possible = List(desc="%s --> %s" % (yo.filename, command), field_names=yo.field_names)
2437 yo._dbflists.add(possible)
2438 result = {}
2439 select = 'result["keep"] = %s' % command
2440 g = {}
2441 use_deleted = yo.use_deleted
2442 for record in yo:
2443 result['keep'] = False
2444 g['result'] = result
2445 exec select in g, record
2446 if result['keep']:
2447 possible.append(record)
2448 record.write_record()
2449 return possible
2451 "returns record at physical_index[recno]"
2452 return yo._table[recno]
2453 - def goto(yo, criteria):
2454 """changes the record pointer to the first matching (non-deleted) record
2455 criteria should be either a tuple of tuple(value, field, func) triples,
2456 or an integer to go to"""
2457 if isinstance(criteria, int):
2458 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count:
2459 raise IndexError("Record %d does not exist" % criteria)
2460 if criteria < 0:
2461 criteria += yo._meta.header.record_count
2462 yo._meta.current = criteria
2463 return yo.current()
2464 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop])
2465 specs = tuple([(field, func) for value, field, func in criteria])
2466 match = tuple([value for value, field, func in criteria])
2467 current = yo.current(index=True)
2468 matchlen = len(match)
2469 while not yo.Eof():
2470 record = yo.current()
2471 results = record(*specs)
2472 if results == match:
2473 return record
2474 return yo.goto(current)
2476 "returns True if name is a variable-length field type"
2477 return yo._meta[name]['type'] in yo._decimal_fields
2479 "returns True if name is a memo type field"
2480 return yo._meta[name]['type'] in yo._memotypes
2481 - def new(yo, filename, field_specs=None, codepage=None):
2495 "set record pointer to next (non-deleted) record, and return it"
2496 if yo.eof(_move=True):
2497 raise Eof()
2498 return yo.current()
2500 meta = yo._meta
2501 meta.inmemory = False
2502 meta.ondisk = True
2503 yo._read_only = False
2504 yo._meta_only = False
2505 if '_table' in dir(yo):
2506 del yo._table
2507 dfd = meta.dfd = open(meta.filename, 'r+b')
2508 dfd.seek(0)
2509 meta.header = header = yo._TableHeader(dfd.read(32))
2510 if not header.version in yo._supported_tables:
2511 dfd.close()
2512 dfd = None
2513 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version)))
2514 cp, sd, ld = _codepage_lookup(meta.header.codepage())
2515 meta.decoder = codecs.getdecoder(sd)
2516 meta.encoder = codecs.getencoder(sd)
2517 fieldblock = dfd.read(header.start - 32)
2518 for i in range(len(fieldblock)//32+1):
2519 fieldend = i * 32
2520 if fieldblock[fieldend] == '\x0d':
2521 break
2522 else:
2523 raise DbfError("corrupt field structure in header")
2524 if len(fieldblock[:fieldend]) % 32 != 0:
2525 raise DbfError("corrupt field structure in header")
2526 header.fields = fieldblock[:fieldend]
2527 header.extra = fieldblock[fieldend+1:]
2528 yo._initializeFields()
2529 yo._checkMemoIntegrity()
2530 meta.current = -1
2531 if len(yo) > 0:
2532 meta.current = 0
2533 dfd.seek(0)
2534
2535 - def pack(yo, _pack=True):
2536 "physically removes all deleted records"
2537 for dbfindex in yo._indexen:
2538 dbfindex.clear()
2539 newtable = []
2540 index = 0
2541 offset = 0
2542 for record in yo._table:
2543 found = False
2544 if record.has_been_deleted and _pack:
2545 for dbflist in yo._dbflists:
2546 if dbflist._purge(record, record.record_number - offset, 1):
2547 found = True
2548 record._recnum = -1
2549 else:
2550 record._recnum = index
2551 newtable.append(record)
2552 index += 1
2553 if found:
2554 offset += 1
2555 found = False
2556 yo._table.clear()
2557 for record in newtable:
2558 yo._table.append(record)
2559 yo._meta.header.record_count = index
2560 yo._current = -1
2561 yo._update_disk()
2562 yo.reindex()
2564 "set record pointer to previous (non-deleted) record, and return it"
2565 if yo.bof(_move=True):
2566 raise Bof
2567 return yo.current()
2568 - def query(yo, sql_command=None, python=None):
2569 "deprecated: use .find or .sql"
2570 if sql_command:
2571 return yo.sql(sql_command)
2572 elif python:
2573 return yo.find(python)
2574 raise DbfError("query: python parameter must be specified")
2576 for dbfindex in yo._indexen:
2577 dbfindex.reindex()
2579 "renames an existing field"
2580 if yo:
2581 yo.create_backup()
2582 if not oldname in yo._meta.fields:
2583 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname)
2584 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum():
2585 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits")
2586 newname = newname.lower()
2587 if newname in yo._meta.fields:
2588 raise DbfError("field --%s-- already exists" % newname)
2589 if len(newname) > 10:
2590 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname)))
2591 yo._meta[newname] = yo._meta[oldname]
2592 yo._meta.fields[yo._meta.fields.index(oldname)] = newname
2593 yo._buildHeaderFields()
2594 yo._update_disk(headeronly=True)
2596 """resizes field (C only at this time)
2597 creates backup file, then modifies current structure"""
2598 if not 0 < new_size < 256:
2599 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)")
2600 doomed = yo._list_fields(doomed)
2601 for victim in doomed:
2602 if victim not in yo._meta.fields:
2603 raise DbfError("field %s not in table -- resize aborted" % victim)
2604 all_records = [record for record in yo]
2605 yo.create_backup()
2606
2607
2608 for victim in doomed:
2609 delta = new_size - yo._meta[victim]['length']
2610 start = yo._meta[victim]['start']
2611 end = yo._meta[victim]['end']
2612 eff_end = min(yo._meta[victim]['length'], new_size)
2613 yo._meta[victim]['length'] = new_size
2614 yo._meta[victim]['end'] = start + new_size
2615 blank = array('c', ' ' * new_size)
2616
2617 for record in yo:
2618 new_data = blank[:]
2619 new_data[:eff_end] = record._data[start:start+eff_end]
2620 record._data = record._data[:start] + new_data + record._data[end:]
2621 for field in yo._meta.fields:
2622 if yo._meta[field]['start'] == end:
2623 end = yo._meta[field]['end']
2624 yo._meta[field]['start'] += delta
2625 yo._meta[field]['end'] += delta
2626 start = yo._meta[field]['end']
2627
2628
2629
2630 yo._buildHeaderFields()
2631 yo._update_disk()
2632 - def size(yo, field):
2633 "returns size of field as a tuple of (length, decimals)"
2634 if field in yo:
2635 return (yo._meta[field]['length'], yo._meta[field]['decimals'])
2636 raise DbfError("%s is not a field in %s" % (field, yo.filename))
2637 - def sql(yo, command):
2638 "passes calls through to module level sql function"
2639 return sql(yo, command)
2641 """return list of fields suitable for creating same table layout
2642 @param fields: list of fields or None for all fields"""
2643 field_specs = []
2644 fields = yo._list_fields(fields)
2645 try:
2646 for name in fields:
2647 field_specs.append(yo._fieldLayout(yo.field_names.index(name)))
2648 except ValueError:
2649 raise DbfError("field --%s-- does not exist" % name)
2650 return field_specs
2651 - def top(yo, get_record=False):
2652 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record
2653 DbfError if table is empty
2654 Eof if all records are deleted and use_deleted is False"""
2655 yo._meta.current = -1
2656 if get_record:
2657 try:
2658 return yo.next()
2659 except Eof:
2660 yo._meta.current = -1
2661 raise Bof()
2662 - def type(yo, field):
2663 "returns type of field"
2664 if field in yo:
2665 return yo._meta[field]['type']
2666 raise DbfError("%s is not a field in %s" % (field, yo.filename))
2667 - def zap(yo, areyousure=False):
2668 """removes all records from table -- this cannot be undone!
2669 areyousure must be True, else error is raised"""
2670 if areyousure:
2671 if yo._meta.inmemory:
2672 yo._table = []
2673 else:
2674 yo._table.clear()
2675 yo._meta.header.record_count = 0
2676 yo._current = -1
2677 yo._update_disk()
2678 else:
2679 raise DbfError("You must say you are sure to wipe the table")
2681 """Provides an interface for working with dBase III tables."""
2682 _version = 'dBase III Plus'
2683 _versionabbv = 'db3'
2684 _fieldtypes = {
2685 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None},
2686 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None},
2687 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None},
2688 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None},
2689 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addNumeric, 'Class':None} }
2690 _memoext = '.dbt'
2691 _memotypes = ('M',)
2692 _memoClass = _Db3Memo
2693 _yesMemoMask = '\x80'
2694 _noMemoMask = '\x7f'
2695 _fixed_fields = ('D','L','M')
2696 _variable_fields = ('C','N')
2697 _character_fields = ('C','M')
2698 _decimal_fields = ('N',)
2699 _numeric_fields = ('N',)
2700 _currency_fields = tuple()
2701 _dbfTableHeader = array('c', '\x00' * 32)
2702 _dbfTableHeader[0] = '\x03'
2703 _dbfTableHeader[8:10] = array('c', packShortInt(33))
2704 _dbfTableHeader[10] = '\x01'
2705 _dbfTableHeader[29] = '\x03'
2706 _dbfTableHeader = _dbfTableHeader.tostring()
2707 _dbfTableHeaderExtra = ''
2708 _supported_tables = ['\x03', '\x83']
2709 _read_only = False
2710 _meta_only = False
2711 _use_deleted = True
2713 "dBase III specific"
2714 if yo._meta.header.version == '\x83':
2715 try:
2716 yo._meta.memo = yo._memoClass(yo._meta)
2717 except:
2718 yo._meta.dfd.close()
2719 yo._meta.dfd = None
2720 raise
2721 if not yo._meta.ignorememos:
2722 for field in yo._meta.fields:
2723 if yo._meta[field]['type'] in yo._memotypes:
2724 if yo._meta.header.version != '\x83':
2725 yo._meta.dfd.close()
2726 yo._meta.dfd = None
2727 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
2728 elif not os.path.exists(yo._meta.memoname):
2729 yo._meta.dfd.close()
2730 yo._meta.dfd = None
2731 raise DbfError("Table structure corrupt: memo fields exist without memo file")
2732 break
2734 "builds the FieldList of names, types, and descriptions"
2735 yo._meta.fields[:] = []
2736 offset = 1
2737 fieldsdef = yo._meta.header.fields
2738 if len(fieldsdef) % 32 != 0:
2739 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
2740 if len(fieldsdef) // 32 != yo.field_count:
2741 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
2742 for i in range(yo.field_count):
2743 fieldblock = fieldsdef[i*32:(i+1)*32]
2744 name = unpackStr(fieldblock[:11])
2745 type = fieldblock[11]
2746 if not type in yo._meta.fieldtypes:
2747 raise DbfError("Unknown field type: %s" % type)
2748 start = offset
2749 length = ord(fieldblock[16])
2750 offset += length
2751 end = start + length
2752 decimals = ord(fieldblock[17])
2753 flags = ord(fieldblock[18])
2754 yo._meta.fields.append(name)
2755 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2757 'Provides an interface for working with FoxPro 2 tables'
2758 _version = 'Foxpro'
2759 _versionabbv = 'fp'
2760 _fieldtypes = {
2761 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None},
2762 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric, 'Class':None},
2763 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric, 'Class':None},
2764 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None},
2765 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None},
2766 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None},
2767 'G' : {'Type':'General', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None},
2768 'P' : {'Type':'Picture', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo, 'Class':None},
2769 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None, 'Class':None} }
2770 _memoext = '.fpt'
2771 _memotypes = ('G','M','P')
2772 _memoClass = _VfpMemo
2773 _yesMemoMask = '\xf5'
2774 _noMemoMask = '\x03'
2775 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
2776 _variable_fields = ('C','F','N')
2777 _character_fields = ('C','M')
2778 _decimal_fields = ('F','N')
2779 _numeric_fields = ('F','N')
2780 _currency_fields = tuple()
2781 _supported_tables = ('\x03', '\xf5')
2782 _dbfTableHeader = array('c', '\x00' * 32)
2783 _dbfTableHeader[0] = '\x30'
2784 _dbfTableHeader[8:10] = array('c', packShortInt(33+263))
2785 _dbfTableHeader[10] = '\x01'
2786 _dbfTableHeader[29] = '\x03'
2787 _dbfTableHeader = _dbfTableHeader.tostring()
2788 _dbfTableHeaderExtra = '\x00' * 263
2789 _use_deleted = True
2791 if os.path.exists(yo._meta.memoname):
2792 try:
2793 yo._meta.memo = yo._memoClass(yo._meta)
2794 except:
2795 yo._meta.dfd.close()
2796 yo._meta.dfd = None
2797 raise
2798 if not yo._meta.ignorememos:
2799 for field in yo._meta.fields:
2800 if yo._meta[field]['type'] in yo._memotypes:
2801 if not os.path.exists(yo._meta.memoname):
2802 yo._meta.dfd.close()
2803 yo._meta.dfd = None
2804 raise DbfError("Table structure corrupt: memo fields exist without memo file")
2805 break
2807 "builds the FieldList of names, types, and descriptions"
2808 yo._meta.fields[:] = []
2809 offset = 1
2810 fieldsdef = yo._meta.header.fields
2811 if len(fieldsdef) % 32 != 0:
2812 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
2813 if len(fieldsdef) // 32 != yo.field_count:
2814 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
2815 for i in range(yo.field_count):
2816 fieldblock = fieldsdef[i*32:(i+1)*32]
2817 name = unpackStr(fieldblock[:11])
2818 type = fieldblock[11]
2819 if not type in yo._meta.fieldtypes:
2820 raise DbfError("Unknown field type: %s" % type)
2821 elif type == '0':
2822 return
2823 start = offset
2824 length = ord(fieldblock[16])
2825 offset += length
2826 end = start + length
2827 decimals = ord(fieldblock[17])
2828 flags = ord(fieldblock[18])
2829 yo._meta.fields.append(name)
2830 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2831
2833 'Provides an interface for working with Visual FoxPro 6 tables'
2834 _version = 'Visual Foxpro v6'
2835 _versionabbv = 'vfp'
2836 _fieldtypes = {
2837 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter, 'Class':None},
2838 'Y' : {'Type':'Currency', 'Retrieve':retrieveCurrency, 'Update':updateCurrency, 'Blank':Decimal(), 'Init':addVfpCurrency, 'Class':None},
2839 'B' : {'Type':'Double', 'Retrieve':retrieveDouble, 'Update':updateDouble, 'Blank':float, 'Init':addVfpDouble, 'Class':None},
2840 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric, 'Class':None},
2841 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric, 'Class':None},
2842 'I' : {'Type':'Integer', 'Retrieve':retrieveInteger, 'Update':updateInteger, 'Blank':int, 'Init':addVfpInteger, 'Class':None},
2843 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical, 'Class':None},
2844 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate, 'Class':None},
2845 'T' : {'Type':'DateTime', 'Retrieve':retrieveVfpDateTime, 'Update':updateVfpDateTime, 'Blank':DateTime.now, 'Init':addVfpDateTime, 'Class':None},
2846 'M' : {'Type':'Memo', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None},
2847 'G' : {'Type':'General', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None},
2848 'P' : {'Type':'Picture', 'Retrieve':retrieveVfpMemo, 'Update':updateVfpMemo, 'Blank':str, 'Init':addVfpMemo, 'Class':None},
2849 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None, 'Class':None} }
2850 _memoext = '.fpt'
2851 _memotypes = ('G','M','P')
2852 _memoClass = _VfpMemo
2853 _yesMemoMask = '\x30'
2854 _noMemoMask = '\x30'
2855 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
2856 _variable_fields = ('C','F','N')
2857 _character_fields = ('C','M')
2858 _decimal_fields = ('F','N')
2859 _numeric_fields = ('B','F','I','N','Y')
2860 _currency_fields = ('Y',)
2861 _supported_tables = ('\x30',)
2862 _dbfTableHeader = array('c', '\x00' * 32)
2863 _dbfTableHeader[0] = '\x30'
2864 _dbfTableHeader[8:10] = array('c', packShortInt(33+263))
2865 _dbfTableHeader[10] = '\x01'
2866 _dbfTableHeader[29] = '\x03'
2867 _dbfTableHeader = _dbfTableHeader.tostring()
2868 _dbfTableHeaderExtra = '\x00' * 263
2869 _use_deleted = True
2871 if os.path.exists(yo._meta.memoname):
2872 try:
2873 yo._meta.memo = yo._memoClass(yo._meta)
2874 except:
2875 yo._meta.dfd.close()
2876 yo._meta.dfd = None
2877 raise
2878 if not yo._meta.ignorememos:
2879 for field in yo._meta.fields:
2880 if yo._meta[field]['type'] in yo._memotypes:
2881 if not os.path.exists(yo._meta.memoname):
2882 yo._meta.dfd.close()
2883 yo._meta.dfd = None
2884 raise DbfError("Table structure corrupt: memo fields exist without memo file")
2885 break
2887 "builds the FieldList of names, types, and descriptions"
2888 yo._meta.fields[:] = []
2889 offset = 1
2890 fieldsdef = yo._meta.header.fields
2891 for i in range(yo.field_count):
2892 fieldblock = fieldsdef[i*32:(i+1)*32]
2893 name = unpackStr(fieldblock[:11])
2894 type = fieldblock[11]
2895 if not type in yo._meta.fieldtypes:
2896 raise DbfError("Unknown field type: %s" % type)
2897 elif type == '0':
2898 return
2899 start = unpackLongInt(fieldblock[12:16])
2900 length = ord(fieldblock[16])
2901 offset += length
2902 end = start + length
2903 decimals = ord(fieldblock[17])
2904 flags = ord(fieldblock[18])
2905 yo._meta.fields.append(name)
2906 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
2908 "list of Dbf records, with set-like behavior"
2909 _desc = ''
2910 - def __init__(yo, new_records=None, desc=None, key=None, field_names=None):
2911 yo.field_names = field_names
2912 yo._list = []
2913 yo._set = set()
2914 if key is not None:
2915 yo.key = key
2916 if key.__doc__ is None:
2917 key.__doc__ = 'unknown'
2918 key = yo.key
2919 yo._current = -1
2920 if isinstance(new_records, yo.__class__) and key is new_records.key:
2921 yo._list = new_records._list[:]
2922 yo._set = new_records._set.copy()
2923 yo._current = 0
2924 elif new_records is not None:
2925 for record in new_records:
2926 value = key(record)
2927 item = (record.record_table, record.record_number, value)
2928 if value not in yo._set:
2929 yo._set.add(value)
2930 yo._list.append(item)
2931 yo._current = 0
2932 if desc is not None:
2933 yo._desc = desc
2935 key = yo.key
2936 if isinstance(other, (DbfTable, list)):
2937 other = yo.__class__(other, key=key)
2938 if isinstance(other, yo.__class__):
2939 result = yo.__class__()
2940 result._set = yo._set.copy()
2941 result._list[:] = yo._list[:]
2942 result.key = yo.key
2943 if key is other.key:
2944 for item in other._list:
2945 if item[2] not in result._set:
2946 result._set.add(item[2])
2947 result._list.append(item)
2948 else:
2949 for rec in other:
2950 value = key(rec)
2951 if value not in result._set:
2952 result._set.add(value)
2953 result._list.append((rec.record_table, rec.record_number, value))
2954 result._current = 0 if result else -1
2955 return result
2956 return NotImplemented
2958 if isinstance(record, tuple):
2959 item = record
2960 else:
2961 item = yo.key(record)
2962 return item in yo._set
2964 if isinstance(key, int):
2965 item = yo._list.pop[key]
2966 yo._set.remove(item[2])
2967 elif isinstance(key, slice):
2968 yo._set.difference_update([item[2] for item in yo._list[key]])
2969 yo._list.__delitem__(key)
2970 elif isinstance(key, _DbfRecord):
2971 index = yo.index(key)
2972 item = yo._list.pop[index]
2973 yo._set.remove(item[2])
2974 else:
2975 raise TypeError
2977 if isinstance(key, int):
2978 count = len(yo._list)
2979 if not -count <= key < count:
2980 raise IndexError("Record %d is not in list." % key)
2981 return yo._get_record(*yo._list[key])
2982 elif isinstance(key, slice):
2983 result = yo.__class__()
2984 result._list[:] = yo._list[key]
2985 result._set = set(result._list)
2986 result.key = yo.key
2987 result._current = 0 if result else -1
2988 return result
2989 elif isinstance(key, _DbfRecord):
2990 index = yo.index(key)
2991 return yo._get_record(*yo._list[index])
2992 else:
2993 raise TypeError('indices must be integers')
2995 return (table.get_record(recno) for table, recno, value in yo._list)
2997 return len(yo._list)
3003 if yo._desc:
3004 return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list))
3005 else:
3006 return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list))
3008 key = yo.key
3009 if isinstance(other, (DbfTable, list)):
3010 other = yo.__class__(other, key=key)
3011 if isinstance(other, yo.__class__):
3012 result = yo.__class__()
3013 result._list[:] = other._list[:]
3014 result._set = other._set.copy()
3015 result.key = key
3016 lost = set()
3017 if key is other.key:
3018 for item in yo._list:
3019 if item[2] in result._list:
3020 result._set.remove(item[2])
3021 lost.add(item)
3022 else:
3023 for rec in other:
3024 value = key(rec)
3025 if value in result._set:
3026 result._set.remove(value)
3027 lost.add((rec.record_table, rec.record_number, value))
3028 result._list = [item for item in result._list if item not in lost]
3029 result._current = 0 if result else -1
3030 return result
3031 return NotImplemented
3033 key = yo.key
3034 if isinstance(other, (DbfTable, list)):
3035 other = yo.__class__(other, key=key)
3036 if isinstance(other, yo.__class__):
3037 result = yo.__class__()
3038 result._list[:] = yo._list[:]
3039 result._set = yo._set.copy()
3040 result.key = key
3041 lost = set()
3042 if key is other.key:
3043 for item in other._list:
3044 if item[2] in result._set:
3045 result._set.remove(item[2])
3046 lost.add(item[2])
3047 else:
3048 for rec in other:
3049 value = key(rec)
3050 if value in result._set:
3051 result._set.remove(value)
3052 lost.add(value)
3053 result._list = [item for item in result._list if item[2] not in lost]
3054 result._current = 0 if result else -1
3055 return result
3056 return NotImplemented
3058 if item[2] not in yo._set:
3059 yo._set.add(item[2])
3060 yo._list.append(item)
3061 - def _get_record(yo, table=None, rec_no=None, value=None):
3062 if table is rec_no is None:
3063 table, rec_no, value = yo._list[yo._current]
3064 return table.get_record(rec_no)
3065 - def _purge(yo, record, old_record_number, offset):
3066 partial = record.record_table, old_record_number
3067 records = sorted(yo._list, key=lambda item: (item[0], item[1]))
3068 for item in records:
3069 if partial == item[:2]:
3070 found = True
3071 break
3072 elif partial[0] is item[0] and partial[1] < item[1]:
3073 found = False
3074 break
3075 else:
3076 found = False
3077 if found:
3078 yo._list.pop(yo._list.index(item))
3079 yo._set.remove(item[2])
3080 start = records.index(item) + found
3081 for item in records[start:]:
3082 if item[0] is not partial[0]:
3083 break
3084 i = yo._list.index(item)
3085 yo._set.remove(item[2])
3086 item = item[0], (item[1] - offset), item[2]
3087 yo._list[i] = item
3088 yo._set.add(item[2])
3089 return found
3094
3096 if yo._list:
3097 yo._current = len(yo._list) - 1
3098 return yo._get_record()
3099 raise DbfError("dbf.List is empty")
3101 yo._list = []
3102 yo._set = set()
3103 yo._current = -1
3105 if yo._current < 0:
3106 raise Bof()
3107 elif yo._current == len(yo._list):
3108 raise Eof()
3109 return yo._get_record()
3110 - def extend(yo, new_records):
3126 - def goto(yo, index_number):
3127 if yo._list:
3128 if 0 <= index_number <= len(yo._list):
3129 yo._current = index_number
3130 return yo._get_record()
3131 raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list)))
3132 raise DbfError("dbf.List is empty")
3133 - def index(yo, sort=None, reverse=False):
3134 "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'"
3135 if sort is None:
3136 results = []
3137 for field, func in yo._meta.index:
3138 results.append("%s(%s)" % (func.__name__, field))
3139 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed])
3140 yo._meta.index_reversed = reverse
3141 if sort == 'ORIGINAL':
3142 yo._index = range(yo._meta.header.record_count)
3143 yo._meta.index = 'ORIGINAL'
3144 if reverse:
3145 yo._index.reverse()
3146 return
3147 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop])
3148 yo._meta.index = tuple(new_sort)
3149 yo._meta.orderresults = [''] * len(yo)
3150 for record in yo:
3151 yo._meta.orderresults[record.record_number] = record()
3152 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
3153 - def index(yo, record, start=None, stop=None):
3154 item = record.record_table, record.record_number, yo.key(record)
3155 key = yo.key(record)
3156 if start is None:
3157 start = 0
3158 if stop is None:
3159 stop = len(yo._list)
3160 for i in range(start, stop):
3161 if yo._list[i][2] == key:
3162 return i
3163 else:
3164 raise ValueError("dbf.List.index(x): <x=%r> not in list" % (key,))
3170 - def key(yo, record):
3174 if yo._current < len(yo._list):
3175 yo._current += 1
3176 if yo._current < len(yo._list):
3177 return yo._get_record()
3178 raise Eof()
3179 - def pop(yo, index=None):
3180 if index is None:
3181 table, recno, value = yo._list.pop()
3182 else:
3183 table, recno, value = yo._list.pop(index)
3184 yo._set.remove(value)
3185 return yo._get_record(table, recno, value)
3187 if yo._current >= 0:
3188 yo._current -= 1
3189 if yo._current > -1:
3190 return yo._get_record()
3191 raise Bof()
3199 if yo._list:
3200 yo._current = 0
3201 return yo._get_record()
3202 raise DbfError("dbf.List is empty")
3203 - def sort(yo, key=None, reverse=False):
3207
3210 "returns records using this index"
3212 yo.table = table
3213 yo.records = records
3214 yo.index = 0
3226 - def __init__(yo, table, key, field_names=None):
3227 yo._table = table
3228 yo._values = []
3229 yo._rec_by_val = []
3230 yo._records = {}
3231 yo.__doc__ = key.__doc__ or 'unknown'
3232 yo.key = key
3233 yo.field_names = field_names or table.field_names
3234 for record in table:
3235 value = key(record)
3236 if value is DoNotIndex:
3237 continue
3238 rec_num = record.record_number
3239 if not isinstance(value, tuple):
3240 value = (value, )
3241 vindex = bisect_right(yo._values, value)
3242 yo._values.insert(vindex, value)
3243 yo._rec_by_val.insert(vindex, rec_num)
3244 yo._records[rec_num] = value
3245 table._indexen.add(yo)
3247 rec_num = record.record_number
3248 if rec_num in yo._records:
3249 value = yo._records[rec_num]
3250 vindex = bisect_left(yo._values, value)
3251 yo._values.pop(vindex)
3252 yo._rec_by_val.pop(vindex)
3253 value = yo.key(record)
3254 if value is DoNotIndex:
3255 return
3256 if not isinstance(value, tuple):
3257 value = (value, )
3258 vindex = bisect_right(yo._values, value)
3259 yo._values.insert(vindex, value)
3260 yo._rec_by_val.insert(vindex, rec_num)
3261 yo._records[rec_num] = value
3263 if isinstance(match, _DbfRecord):
3264 if match.record_table is yo._table:
3265 return match.record_number in yo._records
3266 match = yo.key(match)
3267 elif not isinstance(match, tuple):
3268 match = (match, )
3269 return yo.find(match) != -1
3271 if isinstance(key, int):
3272 count = len(yo._values)
3273 if not -count <= key < count:
3274 raise IndexError("Record %d is not in list." % key)
3275 rec_num = yo._rec_by_val[key]
3276 return yo._table.get_record(rec_num)
3277 elif isinstance(key, slice):
3278 result = List(field_names=yo._table.field_names)
3279 yo._table._dbflists.add(result)
3280 start, stop, step = key.start, key.stop, key.step
3281 if start is None: start = 0
3282 if stop is None: stop = len(yo._rec_by_val)
3283 if step is None: step = 1
3284 for loc in range(start, stop, step):
3285 record = yo._table.get_record(yo._rec_by_val[loc])
3286 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
3287 result._current = 0 if result else -1
3288 return result
3289 elif isinstance (key, (str, unicode, tuple, _DbfRecord)):
3290 if isinstance(key, _DbfRecord):
3291 key = yo.key(key)
3292 elif not isinstance(key, tuple):
3293 key = (key, )
3294 loc = yo.find(key)
3295 if loc == -1:
3296 raise KeyError(key)
3297 return yo._table.get_record(yo._rec_by_val[loc])
3298 else:
3299 raise TypeError('indices must be integers, match objects must by strings or tuples')
3303 yo._table.close()
3304 yo._values[:] = []
3305 yo._rec_by_val[:] = []
3306 yo._records.clear()
3307 return False
3311 return len(yo._records)
3313 target = target[:len(match)]
3314 if isinstance(match[-1], (str, unicode)):
3315 target = list(target)
3316 target[-1] = target[-1][:len(match[-1])]
3317 target = tuple(target)
3318 return target == match
3320 value = yo._records.get(rec_num)
3321 if value is not None:
3322 vindex = bisect_left(yo._values, value)
3323 del yo._records[rec_num]
3324 yo._values.pop(vindex)
3325 yo._rec_by_val.pop(vindex)
3326 - def _search(yo, match, lo=0, hi=None):
3327 if hi is None:
3328 hi = len(yo._values)
3329 return bisect_left(yo._values, match, lo, hi)
3331 "removes all entries from index"
3332 yo._values[:] = []
3333 yo._rec_by_val[:] = []
3334 yo._records.clear()
3337 - def find(yo, match, partial=False):
3338 "returns numeric index of (partial) match, or -1"
3339 if isinstance(match, _DbfRecord):
3340 if match.record_number in yo._records:
3341 return yo._values.index(yo._records[match.record_number])
3342 else:
3343 return -1
3344 if not isinstance(match, tuple):
3345 match = (match, )
3346 loc = yo._search(match)
3347 while loc < len(yo._values) and yo._values[loc] == match:
3348 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted:
3349 loc += 1
3350 continue
3351 return loc
3352 if partial:
3353 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match):
3354 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted:
3355 loc += 1
3356 continue
3357 return loc
3358 return -1
3360 "returns numeric index of either (partial) match, or position of where match would be"
3361 if isinstance(match, _DbfRecord):
3362 if match.record_number in yo._records:
3363 return yo._values.index(yo._records[match.record_number])
3364 else:
3365 match = yo.key(match)
3366 if not isinstance(match, tuple):
3367 match = (match, )
3368 loc = yo._search(match)
3369 return loc
3370 - def index(yo, match, partial=False):
3371 "returns numeric index of (partial) match, or raises ValueError"
3372 loc = yo.find(match, partial)
3373 if loc == -1:
3374 if isinstance(match, _DbfRecord):
3375 raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__))
3376 else:
3377 raise ValueError("match criteria <%s> not in index" % (match, ))
3378 return loc
3380 "reindexes all records"
3381 for record in yo._table:
3382 yo(record)
3383 - def query(yo, sql_command=None, python=None):
3384 """recognized sql commands are SELECT, UPDATE, REPLACE, INSERT, DELETE, and RECALL"""
3385 if sql_command:
3386 return sql(yo, sql_command)
3387 elif python is None:
3388 raise DbfError("query: python parameter must be specified")
3389 possible = List(desc="%s --> %s" % (yo._table.filename, python), field_names=yo._table.field_names)
3390 yo._table._dbflists.add(possible)
3391 query_result = {}
3392 select = 'query_result["keep"] = %s' % python
3393 g = {}
3394 for record in yo:
3395 query_result['keep'] = False
3396 g['query_result'] = query_result
3397 exec select in g, record
3398 if query_result['keep']:
3399 possible.append(record)
3400 record.write_record()
3401 return possible
3402 - def search(yo, match, partial=False):
3403 "returns dbf.List of all (partially) matching records"
3404 result = List(field_names=yo._table.field_names)
3405 yo._table._dbflists.add(result)
3406 if not isinstance(match, tuple):
3407 match = (match, )
3408 loc = yo._search(match)
3409 if loc == len(yo._values):
3410 return result
3411 while loc < len(yo._values) and yo._values[loc] == match:
3412 record = yo._table.get_record(yo._rec_by_val[loc])
3413 if not yo._table.use_deleted and record.has_been_deleted:
3414 loc += 1
3415 continue
3416 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
3417 loc += 1
3418 if partial:
3419 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match):
3420 record = yo._table.get_record(yo._rec_by_val[loc])
3421 if not yo._table.use_deleted and record.has_been_deleted:
3422 loc += 1
3423 continue
3424 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
3425 loc += 1
3426 return result
3427
3428
3429 table_types = {
3430 'db3' : Db3Table,
3431 'fp' : FpTable,
3432 'vfp' : VfpTable,
3433 'dbf' : DbfTable,
3434 }
3435
3436 version_map = {
3437 '\x02' : 'FoxBASE',
3438 '\x03' : 'dBase III Plus',
3439 '\x04' : 'dBase IV',
3440 '\x05' : 'dBase V',
3441 '\x30' : 'Visual FoxPro',
3442 '\x31' : 'Visual FoxPro (auto increment field)',
3443 '\x43' : 'dBase IV SQL',
3444 '\x7b' : 'dBase IV w/memos',
3445 '\x83' : 'dBase III Plus w/memos',
3446 '\x8b' : 'dBase IV w/memos',
3447 '\x8e' : 'dBase IV w/SQL table',
3448 '\xf5' : 'FoxPro w/memos'}
3449
3450 code_pages = {
3451 '\x00' : ('ascii', "plain ol' ascii"),
3452 '\x01' : ('cp437', 'U.S. MS-DOS'),
3453 '\x02' : ('cp850', 'International MS-DOS'),
3454 '\x03' : ('cp1252', 'Windows ANSI'),
3455 '\x04' : ('mac_roman', 'Standard Macintosh'),
3456 '\x08' : ('cp865', 'Danish OEM'),
3457 '\x09' : ('cp437', 'Dutch OEM'),
3458 '\x0A' : ('cp850', 'Dutch OEM (secondary)'),
3459 '\x0B' : ('cp437', 'Finnish OEM'),
3460 '\x0D' : ('cp437', 'French OEM'),
3461 '\x0E' : ('cp850', 'French OEM (secondary)'),
3462 '\x0F' : ('cp437', 'German OEM'),
3463 '\x10' : ('cp850', 'German OEM (secondary)'),
3464 '\x11' : ('cp437', 'Italian OEM'),
3465 '\x12' : ('cp850', 'Italian OEM (secondary)'),
3466 '\x13' : ('cp932', 'Japanese Shift-JIS'),
3467 '\x14' : ('cp850', 'Spanish OEM (secondary)'),
3468 '\x15' : ('cp437', 'Swedish OEM'),
3469 '\x16' : ('cp850', 'Swedish OEM (secondary)'),
3470 '\x17' : ('cp865', 'Norwegian OEM'),
3471 '\x18' : ('cp437', 'Spanish OEM'),
3472 '\x19' : ('cp437', 'English OEM (Britain)'),
3473 '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'),
3474 '\x1B' : ('cp437', 'English OEM (U.S.)'),
3475 '\x1C' : ('cp863', 'French OEM (Canada)'),
3476 '\x1D' : ('cp850', 'French OEM (secondary)'),
3477 '\x1F' : ('cp852', 'Czech OEM'),
3478 '\x22' : ('cp852', 'Hungarian OEM'),
3479 '\x23' : ('cp852', 'Polish OEM'),
3480 '\x24' : ('cp860', 'Portugese OEM'),
3481 '\x25' : ('cp850', 'Potugese OEM (secondary)'),
3482 '\x26' : ('cp866', 'Russian OEM'),
3483 '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'),
3484 '\x40' : ('cp852', 'Romanian OEM'),
3485 '\x4D' : ('cp936', 'Chinese GBK (PRC)'),
3486 '\x4E' : ('cp949', 'Korean (ANSI/OEM)'),
3487 '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'),
3488 '\x50' : ('cp874', 'Thai (ANSI/OEM)'),
3489 '\x57' : ('cp1252', 'ANSI'),
3490 '\x58' : ('cp1252', 'Western European ANSI'),
3491 '\x59' : ('cp1252', 'Spanish ANSI'),
3492 '\x64' : ('cp852', 'Eastern European MS-DOS'),
3493 '\x65' : ('cp866', 'Russian MS-DOS'),
3494 '\x66' : ('cp865', 'Nordic MS-DOS'),
3495 '\x67' : ('cp861', 'Icelandic MS-DOS'),
3496 '\x68' : (None, 'Kamenicky (Czech) MS-DOS'),
3497 '\x69' : (None, 'Mazovia (Polish) MS-DOS'),
3498 '\x6a' : ('cp737', 'Greek MS-DOS (437G)'),
3499 '\x6b' : ('cp857', 'Turkish MS-DOS'),
3500 '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'),
3501 '\x79' : ('cp949', 'Korean Windows'),
3502 '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'),
3503 '\x7b' : ('cp932', 'Japanese Windows'),
3504 '\x7c' : ('cp874', 'Thai Windows'),
3505 '\x7d' : ('cp1255', 'Hebrew Windows'),
3506 '\x7e' : ('cp1256', 'Arabic Windows'),
3507 '\xc8' : ('cp1250', 'Eastern European Windows'),
3508 '\xc9' : ('cp1251', 'Russian Windows'),
3509 '\xca' : ('cp1254', 'Turkish Windows'),
3510 '\xcb' : ('cp1253', 'Greek Windows'),
3511 '\x96' : ('mac_cyrillic', 'Russian Macintosh'),
3512 '\x97' : ('mac_latin2', 'Macintosh EE'),
3513 '\x98' : ('mac_greek', 'Greek Macintosh') }
3514
3515
3516
3517 -def sql_select(records, chosen_fields, condition, field_names):
3518 if chosen_fields != '*':
3519 field_names = chosen_fields.replace(' ','').split(',')
3520 result = condition(records)
3521 result.modified = 0, 'record' + ('','s')[len(result)>1]
3522 result.field_names = field_names
3523 return result
3524
3525 -def sql_update(records, command, condition, field_names):
3526 possible = condition(records)
3527 modified = sql_cmd(command, field_names)(possible)
3528 possible.modified = modified, 'record' + ('','s')[modified>1]
3529 return possible
3530
3531 -def sql_delete(records, dead_fields, condition, field_names):
3532 deleted = condition(records)
3533 deleted.modified = len(deleted), 'record' + ('','s')[len(deleted)>1]
3534 deleted.field_names = field_names
3535 if dead_fields == '*':
3536 for record in deleted:
3537 record.delete_record()
3538 record.write_record()
3539 else:
3540 keep = [f for f in field_names if f not in dead_fields.replace(' ','').split(',')]
3541 for record in deleted:
3542 record.reset_record(keep_fields=keep)
3543 record.write_record()
3544 return deleted
3545
3546 -def sql_recall(records, all_fields, condition, field_names):
3547 if all_fields != '*':
3548 raise DbfError('SQL RECALL: fields must be * (only able to recover at the record level)')
3549 revivified = List()
3550 tables = set()
3551 for record in records:
3552 tables.add(record_table)
3553 old_setting = dict()
3554 for table in tables:
3555 old_setting[table] = table.use_deleted
3556 table.use_deleted = True
3557 for record in condition(records):
3558 if record.has_been_deleted:
3559 revivified.append(record)
3560 record.undelete_record()
3561 record.write_record()
3562 for table in tables:
3563 table.use_deleted = old_setting[table]
3564 revivified.modfied = len(revivified), 'record' + ('','s')[len(revivified)>1]
3565 revivified.field_names = field_names
3566 return revivified
3567
3568 -def sql_add(records, new_fields, condition, field_names):
3569 tables = set()
3570 possible = condition(records)
3571 for record in possible:
3572 tables.add(record.record_table)
3573 for table in tables:
3574 table.add_fields(new_fields)
3575 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1]
3576 possible.field_names = field_names
3577 return possible
3578
3579 -def sql_drop(records, dead_fields, condition, field_names):
3580 tables = set()
3581 possible = condition(records)
3582 for record in possible:
3583 tables.add(record.record_table)
3584 for table in tables:
3585 table.delete_fields(dead_fields)
3586 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1]
3587 possible.field_names = field_names
3588 return possible
3589
3590 -def sql_pack(records, command, condition, field_names):
3591 tables = set()
3592 possible = condition(records)
3593 for record in possible:
3594 tables.add(record.record_table)
3595 for table in tables:
3596 table.pack()
3597 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1]
3598 possible.field_names = field_names
3599 return possible
3600
3601 -def sql_resize(records, fieldname_newsize, condition, field_names):
3602 tables = set()
3603 possible = condition(records)
3604 for record in possible:
3605 tables.add(record.record_table)
3606 fieldname, newsize = fieldname_newsize.split()
3607 newsize = int(newsize)
3608 for table in tables:
3609 table.resize_field(fieldname, newsize)
3610 possible.modified = len(tables), 'table' + ('','s')[len(tables)>1]
3611 possible.field_names = field_names
3612 return possible
3613
3615 "creates a function matching the sql criteria"
3616 function = """def func(records):
3617 \"\"\"%s\"\"\"
3618 matched = List(field_names=records[0].field_names)
3619 for rec in records:
3620 %s
3621
3622 if %s:
3623 matched.append(rec)
3624 return matched"""
3625 fields = []
3626 for field in records[0].field_names:
3627 if field in criteria:
3628 fields.append(field)
3629 fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields])
3630 if 'record_number' in criteria:
3631 fields += '\n record_number = rec.record_number'
3632 g = sql_user_functions.copy()
3633 g['List'] = List
3634 function %= (criteria, fields, criteria)
3635
3636 exec function in g
3637 return g['func']
3638
3639 -def sql_cmd(command, field_names):
3640 "creates a function matching to apply command to each record in records"
3641 function = """def func(records):
3642 \"\"\"%s\"\"\"
3643 changed = 0
3644 for rec in records:
3645 %s
3646
3647 %s
3648
3649 %s
3650 changed += rec.write_record()
3651 return changed"""
3652 fields = []
3653 for field in field_names:
3654 if field in command:
3655 fields.append(field)
3656 pre_fields = '\n '.join(['%s = rec.%s' % (field, field) for field in fields])
3657 post_fields = '\n '.join(['rec.%s = %s' % (field, field) for field in fields])
3658 g = sql_user_functions.copy()
3659 if ' with ' in command.lower():
3660 offset = command.lower().index(' with ')
3661 command = command[:offset] + ' = ' + command[offset+6:]
3662 function %= (command, pre_fields, command, post_fields)
3663
3664 exec function in g
3665 return g['func']
3666
3667 -def sql(records, command):
3668 """recognized sql commands are SELECT, UPDATE | REPLACE, DELETE, RECALL, ADD, DROP"""
3669 close_table = False
3670 if isinstance(records, (str, unicode)):
3671 records = Table(records)
3672 close_table = True
3673 try:
3674 sql_command = command
3675 if ' where ' in command:
3676 command, condition = command.split(' where ', 1)
3677 condition = sql_criteria(records, condition)
3678 else:
3679 def condition(records):
3680 return records[:]
3681 name, command = command.split(' ', 1)
3682 command = command.strip()
3683 name = name.lower()
3684 field_names = records[0].field_names
3685 if sql_functions.get(name) is None:
3686 raise DbfError('unknown SQL command: %s' % name.upper())
3687 result = sql_functions[name](records, command, condition, field_names)
3688 tables = set()
3689 for record in result:
3690 tables.add(record.record_table)
3691 for list_table in tables:
3692 list_table._dbflists.add(result)
3693 finally:
3694 if close_table:
3695 records.close()
3696 return result
3697
3698 sql_functions = {
3699 'select' : sql_select,
3700 'update' : sql_update,
3701 'replace': sql_update,
3702 'insert' : None,
3703 'delete' : sql_delete,
3704 'recall' : sql_recall,
3705 'add' : sql_add,
3706 'drop' : sql_drop,
3707 'count' : None,
3708 'pack' : sql_pack,
3709 'resize' : sql_resize,
3710 }
3711
3712
3713 -def _nop(value):
3714 "returns parameter unchanged"
3715 return value
3717 "ensures each tuple is the same length, using filler[-missing] for the gaps"
3718 final = []
3719 for t in tuples:
3720 if len(t) < length:
3721 final.append( tuple([item for item in t] + filler[len(t)-length:]) )
3722 else:
3723 final.append(t)
3724 return tuple(final)
3726 if cp not in code_pages:
3727 for code_page in sorted(code_pages.keys()):
3728 sd, ld = code_pages[code_page]
3729 if cp == sd or cp == ld:
3730 if sd is None:
3731 raise DbfError("Unsupported codepage: %s" % ld)
3732 cp = code_page
3733 break
3734 else:
3735 raise DbfError("Unsupported codepage: %s" % cp)
3736 sd, ld = code_pages[cp]
3737 return cp, sd, ld
3738
3739
3740 -def ascii(new_setting=None):
3747 -def codepage(cp=None):
3748 "get/set default codepage for any new tables"
3749 global default_codepage
3750 cp, sd, ld = _codepage_lookup(cp or default_codepage)
3751 default_codepage = sd
3752 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
3760 version = 'dBase IV w/memos (non-functional)'
3761 _versionabbv = 'db4'
3762 _fieldtypes = {
3763 'C' : {'Type':'Character', 'Retrieve':retrieveCharacter, 'Update':updateCharacter, 'Blank':str, 'Init':addCharacter},
3764 'Y' : {'Type':'Currency', 'Retrieve':retrieveCurrency, 'Update':updateCurrency, 'Blank':Decimal(), 'Init':addVfpCurrency},
3765 'B' : {'Type':'Double', 'Retrieve':retrieveDouble, 'Update':updateDouble, 'Blank':float, 'Init':addVfpDouble},
3766 'F' : {'Type':'Float', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':float, 'Init':addVfpNumeric},
3767 'N' : {'Type':'Numeric', 'Retrieve':retrieveNumeric, 'Update':updateNumeric, 'Blank':int, 'Init':addVfpNumeric},
3768 'I' : {'Type':'Integer', 'Retrieve':retrieveInteger, 'Update':updateInteger, 'Blank':int, 'Init':addVfpInteger},
3769 'L' : {'Type':'Logical', 'Retrieve':retrieveLogical, 'Update':updateLogical, 'Blank':bool, 'Init':addLogical},
3770 'D' : {'Type':'Date', 'Retrieve':retrieveDate, 'Update':updateDate, 'Blank':Date.today, 'Init':addDate},
3771 'T' : {'Type':'DateTime', 'Retrieve':retrieveVfpDateTime, 'Update':updateVfpDateTime, 'Blank':DateTime.now, 'Init':addVfpDateTime},
3772 'M' : {'Type':'Memo', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo},
3773 'G' : {'Type':'General', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo},
3774 'P' : {'Type':'Picture', 'Retrieve':retrieveMemo, 'Update':updateMemo, 'Blank':str, 'Init':addMemo},
3775 '0' : {'Type':'_NullFlags', 'Retrieve':unsupportedType, 'Update':unsupportedType, 'Blank':int, 'Init':None} }
3776 _memoext = '.dbt'
3777 _memotypes = ('G','M','P')
3778 _memoClass = _VfpMemo
3779 _yesMemoMask = '\x8b'
3780 _noMemoMask = '\x04'
3781 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
3782 _variable_fields = ('C','F','N')
3783 _character_fields = ('C','M')
3784 _decimal_fields = ('F','N')
3785 _numeric_fields = ('B','F','I','N','Y')
3786 _currency_fields = ('Y',)
3787 _supported_tables = ('\x04', '\x8b')
3788 _dbfTableHeader = ['\x00'] * 32
3789 _dbfTableHeader[0] = '\x8b'
3790 _dbfTableHeader[10] = '\x01'
3791 _dbfTableHeader[29] = '\x03'
3792 _dbfTableHeader = ''.join(_dbfTableHeader)
3793 _dbfTableHeaderExtra = ''
3794 _use_deleted = True
3796 "dBase III specific"
3797 if yo._meta.header.version == '\x8b':
3798 try:
3799 yo._meta.memo = yo._memoClass(yo._meta)
3800 except:
3801 yo._meta.dfd.close()
3802 yo._meta.dfd = None
3803 raise
3804 if not yo._meta.ignorememos:
3805 for field in yo._meta.fields:
3806 if yo._meta[field]['type'] in yo._memotypes:
3807 if yo._meta.header.version != '\x8b':
3808 yo._meta.dfd.close()
3809 yo._meta.dfd = None
3810 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
3811 elif not os.path.exists(yo._meta.memoname):
3812 yo._meta.dfd.close()
3813 yo._meta.dfd = None
3814 raise DbfError("Table structure corrupt: memo fields exist without memo file")
3815 break
3816
3817
3818
3819 -def Table(
3820 filename,
3821 field_specs='',
3822 memo_size=128,
3823 ignore_memos=False,
3824 read_only=False,
3825 keep_memos=False,
3826 meta_only=False,
3827 dbf_type=None,
3828 codepage=None,
3829 numbers='default',
3830 strings=str,
3831 currency=Decimal,
3832 ):
3833 "returns an open table of the correct dbf_type, or creates it if field_specs is given"
3834 if dbf_type is None and isinstance(filename, DbfTable):
3835 return filename
3836 if field_specs and dbf_type is None:
3837 dbf_type = default_type
3838 if dbf_type is not None:
3839 dbf_type = dbf_type.lower()
3840 table = table_types.get(dbf_type)
3841 if table is None:
3842 raise DbfError("Unknown table type: %s" % dbf_type)
3843 return table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage, numbers, strings, currency)
3844 else:
3845 possibles = guess_table_type(filename)
3846 if len(possibles) == 1:
3847 return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \
3848 read_only, keep_memos, meta_only, codepage, numbers, strings, currency)
3849 else:
3850 for type, desc, cls in possibles:
3851 if type == default_type:
3852 return cls(filename, field_specs, memo_size, ignore_memos, \
3853 read_only, keep_memos, meta_only, codepage, numbers, strings, currency)
3854 else:
3855 types = ', '.join(["%s" % item[1] for item in possibles])
3856 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']'
3857 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
3859 "returns integers 0 - len(sequence)"
3860 for i in xrange(len(sequence)):
3861 yield i
3886
3888 "adds fields to an existing table"
3889 table = Table(table_name)
3890 try:
3891 table.add_fields(field_specs)
3892 finally:
3893 table.close()
3901 -def export(table_name, filename='', fields='', format='csv', header=True):
3902 "creates a csv or tab-delimited file from an existing table"
3903 if fields is None:
3904 fields = []
3905 table = Table(table_name)
3906 try:
3907 table.export(filename=filename, field_specs=fields, format=format, header=header)
3908 finally:
3909 table.close()
3911 "prints the first record of a table"
3912 table = Table(table_name)
3913 try:
3914 print str(table[0])
3915 finally:
3916 table.close()
3917 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1):
3918 """creates a Character table from a csv file
3919 to_disk will create a table with the same name
3920 filename will be used if provided
3921 field_names default to f0, f1, f2, etc, unless specified (list)
3922 extra_fields can be used to add additional fields -- should be normal field specifiers (list)"""
3923 reader = csv.reader(open(csvfile))
3924 if field_names:
3925 field_names = ['%s M' % fn for fn in field_names]
3926 else:
3927 field_names = ['f0 M']
3928 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size)
3929 fields_so_far = 1
3930 for row in reader:
3931 while fields_so_far < len(row):
3932 if fields_so_far == len(field_names):
3933 field_names.append('f%d M' % fields_so_far)
3934 mtable.add_fields(field_names[fields_so_far])
3935 fields_so_far += 1
3936 mtable.append(tuple(row))
3937 if filename:
3938 to_disk = True
3939 if not to_disk:
3940 if extra_fields:
3941 mtable.add_fields(extra_fields)
3942 else:
3943 if not filename:
3944 filename = os.path.splitext(csvfile)[0]
3945 length = [min_field_size] * len(field_names)
3946 for record in mtable:
3947 for i in index(record.field_names):
3948 length[i] = max(length[i], len(record[i]))
3949 fields = mtable.field_names
3950 fielddef = []
3951 for i in index(length):
3952 if length[i] < 255:
3953 fielddef.append('%s C(%d)' % (fields[i], length[i]))
3954 else:
3955 fielddef.append('%s M' % (fields[i]))
3956 if extra_fields:
3957 fielddef.extend(extra_fields)
3958 csvtable = Table(filename, fielddef, dbf_type=dbf_type)
3959 for record in mtable:
3960 csvtable.append(record.scatter_fields())
3961 return csvtable
3962 return mtable
3964 "returns the list of field names of a table"
3965 table = Table(table_name)
3966 return table.field_names
3967 -def info(table_name):
3968 "prints table info"
3969 table = Table(table_name)
3970 print str(table)
3972 "renames a field in a table"
3973 table = Table(table_name)
3974 try:
3975 table.rename_field(oldfield, newfield)
3976 finally:
3977 table.close()
3979 "returns the definition of a field (or all fields)"
3980 table = Table(table_name)
3981 return table.structure(field)
3983 "just what it says ;)"
3984 for index,dummy in enumerate(records):
3985 chars = dummy._data
3986 print "%2d: " % index,
3987 for char in chars[1:]:
3988 print " %2x " % ord(char),
3989 print
3990