1 from __future__ import with_statement
2 """
3 =========
4 Copyright
5 =========
6
7 - Portions copyright: 2008-2012 Ad-Mail, Inc -- All rights reserved.
8 - Portions copyright: 2012-2014 Ethan Furman -- All rights reserved.
9 - Author: Ethan Furman
10 - Contact: ethan@stoneleaf.us
11
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14 - Redistributions of source code must retain the above copyright
15 notice, this list of conditions and the following disclaimer.
16 - Redistributions in binary form must reproduce the above copyright
17 notice, this list of conditions and the following disclaimer in the
18 documentation and/or other materials provided with the distribution.
19 - Neither the name of Ad-Mail, Inc nor the
20 names of its contributors may be used to endorse or promote products
21 derived from this software without specific prior written permission.
22
23 THIS SOFTWARE IS PROVIDED ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
24 INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
25 AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
26 ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
27 EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
28 PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
29 OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
31 OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
32 ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 """
34
35 import codecs
36 import collections
37 import csv
38 import datetime
39 import os
40 import struct
41 import sys
42 import time
43 import weakref
44
45 from array import array
46 from bisect import bisect_left, bisect_right
47 import decimal
48 from decimal import Decimal
49 from enum import Enum, IntEnum
50 from glob import glob
51 from math import floor
52 import types
53 from types import NoneType
54
55 py_ver = sys.version_info[:2]
56
57
58
59 LOGICAL_BAD_IS_NONE = True
60
61
62 input_decoding = 'ascii'
63
64
65 default_codepage = 'ascii'
66
67
68 default_type = 'db3'
69
70 temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TMP") or os.environ.get("TEMP") or ""
71
72
73
74
75 pql_user_functions = dict()
76
77
78 _Template_Records = dict()
79
80
81 days_per_month = [31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31]
82 days_per_leap_month = [31, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31]
85 MONDAY = 1
86 TUESDAY = 2
87 WEDNESDAY = 3
88 THURSDAY = 4
89 FRIDAY = 5
90 SATURDAY = 6
91 SUNDAY = 7
92
94 """Return number of days needed to get from self to day."""
95 if self == day:
96 return 7
97 delta = day - self
98 if delta < 0:
99 delta += 7
100 return delta
101
103 """Return number of days needed to get from self to day."""
104 if self == day:
105 return -7
106 delta = day - self
107 if delta > 0:
108 delta -= 7
109 return delta
110
137 globals().update(RelativeDay.__members__)
140 JANUARY = 1
141 FEBRUARY = 2
142 MARCH = 3
143 APRIL = 4
144 MAY = 5
145 JUNE = 6
146 JULY = 7
147 AUGUST = 8
148 SEPTEMBER = 9
149 OCTOBER = 10
150 NOVEMBER = 11
151 DECEMBER = 12
152
154 """Return number of months needed to get from self to month."""
155 if self == month:
156 return 12
157 delta = month - self
158 if delta < 0:
159 delta += 12
160 return delta
161
163 """Return number of months needed to get from self to month."""
164 if self == month:
165 return -12
166 delta = month - self
167 if delta > 0:
168 delta -= 12
169 return delta
170
207 globals().update(RelativeMonth.__members__)
210 if year % 400 == 0:
211 return True
212 elif year % 100 == 0:
213 return False
214 elif year % 4 == 0:
215 return True
216 else:
217 return False
218
219
220 if py_ver < (2, 7):
226
227 bytes = str
228
229
230
231 if py_ver < (2, 6):
232
233
234 - def next(iterator):
235 return iterator.next()
236
241 """
242 2.6 properties for 2.5-
243 """
244
245 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
246 self.fget = fget
247 self.fset = fset
248 self.fdel = fdel
249 self.__doc__ = doc or fget.__doc__
250
252 self.fget = func
253 if not self.__doc__:
254 self.__doc__ = func.__doc__
255
256 - def __get__(self, obj, objtype=None):
257 if obj is None:
258 return self
259 if self.fget is None:
260 raise AttributeError("unreadable attribute")
261 return self.fget(obj)
262
264 if self.fset is None:
265 raise AttributeError("can't set attribute")
266 self.fset(obj, value)
267
269 if self.fdel is None:
270 raise AttributeError("can't delete attribute")
271 self.fdel(obj)
272
274 self.fset = func
275 return self
276
278 self.fdel = func
279 return self
280
281
282
283
284 try:
285 all
286 except NameError:
287
288 - def all(iterable):
289 for element in iterable:
290 if not element:
291 return False
292 return True
293
295 for element in iterable:
296 if element:
297 return True
298 return False
299
300 SEEK_SET, SEEK_CUR, SEEK_END = range(3)
301
302 else:
303
304 SEEK_SET, SEEK_CUR, SEEK_END = os.SEEK_SET, os.SEEK_CUR, os.SEEK_END
305
306
307 try:
308 from collections import defaultdict
309 except ImportError:
312
313 - def __init__(self, default_factory=None, *a, **kw):
314 if (default_factory is not None and
315 not hasattr(default_factory, '__call__')):
316 raise TypeError('first argument must be callable')
317 dict.__init__(self, *a, **kw)
318 self.default_factory = default_factory
319
321 try:
322 return dict.__getitem__(self, key)
323 except KeyError:
324 return self.__missing__(key)
325
327 if self.default_factory is None:
328 raise KeyError(key)
329 self[key] = value = self.default_factory()
330 return value
331
333 if self.default_factory is None:
334 args = tuple()
335 else:
336 args = self.default_factory,
337 return type(self), args, None, None, self.iteritems()
338
340 return self.__copy__()
341
343 return type(self)(self.default_factory, self)
344
346 import copy
347 return type(self)(self.default_factory,
348 copy.deepcopy(self.items()))
349
351 return 'defaultdict(%s, %s)' % (self.default_factory,
352 dict.__repr__(self))
353
357 """
358 doesn't create object until actually accessed
359 """
360
362 yo.fget = func
363 yo.__doc__ = doc or func.__doc__
364
367
369 if instance is None:
370 return yo
371 return yo.fget(instance)
372
375 """
376 Lives in the class, and on first access calls the supplied factory and
377 maps the result into the instance it was called on
378 """
379
381 self._name = func.__name__
382 self.func = func
383
386
387 - def __get__(self, instance, owner):
388 result = self.func()
389 if instance is not None:
390 setattr(instance, self._name, result)
391 return result
392
394 result = self.func()
395 return "MutableDefault(%r)" % (result, )
396
397
398
399 -def none(*args, **kwargs):
400 """
401 because we can't do `NoneType()`
402 """
403 return None
404
405
406 SYSTEM = 0x01
407 NULLABLE = 0x02
408 BINARY = 0x04
409
410
411 TYPE = 0
412 START = 1
413 LENGTH = 2
414 END = 3
415 DECIMALS = 4
416 FLAGS = 5
417 CLASS = 6
418 EMPTY = 7
419 NULL = 8
420
421 FIELD_FLAGS = {
422 'null' : NULLABLE,
423 'binary' : BINARY,
424 'nocptrans' : BINARY,
425
426
427 NULLABLE : 'null',
428 BINARY : 'binary',
429 SYSTEM : 'system',
430
431 }
432
433 IN_MEMORY = 0
434 ON_DISK = 1
435
436 CLOSED = 'closed'
437 READ_ONLY = 'read-only'
438 READ_WRITE = 'read-write'
439
440
441
442
443 -class DbfError(Exception):
444 """
445 Fatal errors elicit this response.
446 """
447
450 """
451 Data too large for field
452 """
453
454 - def __init__(self, message, data=None):
457
460 """
461 bad data in table
462 """
463
464 - def __init__(self, message, data=None):
467
470 """
471 Field does not exist in table
472 """
473
475 KeyError.__init__(self, '%s: no such field in table' % fieldname)
476 DbfError.__init__(self, '%s: no such field in table' % fieldname)
477 self.data = fieldname
478
481 """
482 invalid field specification
483 """
484
488
491 """
492 Data for table not in unicode
493 """
494
497
498
499 -class NotFoundError(DbfError, ValueError, KeyError, IndexError):
500 """
501 record criteria not met
502 """
503
504 - def __init__(self, message=None, data=None):
510
513 """
514 Normal operations elicit this response
515 """
516
517
518 -class Eof(DbfWarning, StopIteration):
519 """
520 End of file reached
521 """
522
523 message = 'End of file reached'
524
528
529
530 -class Bof(DbfWarning, StopIteration):
531 """
532 Beginning of file reached
533 """
534
535 message = 'Beginning of file reached'
536
540
543 """
544 Returned by indexing functions to suppress a record from becoming part of the index
545 """
546
547 message = 'Not indexing record'
548
551
552
553
554
555
556 Unknown = Other = object()
610
611 NullType.null = object.__new__(NullType)
612 Null = NullType()
613
614
615 -class Vapor(object):
616 """
617 used in Vapor Records -- compares unequal with everything
618 """
619
622
625
626 Vapor = Vapor()
627
628
629 -class Char(unicode):
630 """
631 Strips trailing whitespace, and ignores trailing whitespace for comparisons
632 """
633
635 if not isinstance(text, (basestring, cls)):
636 raise ValueError("Unable to automatically coerce %r to Char" % text)
637 result = unicode.__new__(cls, text.rstrip())
638 return result
639
640 __hash__ = unicode.__hash__
641
643 """
644 ignores trailing whitespace
645 """
646 if not isinstance(other, (self.__class__, basestring)):
647 return NotImplemented
648 return unicode(self) == other.rstrip()
649
651 """
652 ignores trailing whitespace
653 """
654 if not isinstance(other, (self.__class__, basestring)):
655 return NotImplemented
656 return unicode(self) >= other.rstrip()
657
659 """
660 ignores trailing whitespace
661 """
662 if not isinstance(other, (self.__class__, basestring)):
663 return NotImplemented
664 return unicode(self) > other.rstrip()
665
667 """
668 ignores trailing whitespace
669 """
670 if not isinstance(other, (self.__class__, basestring)):
671 return NotImplemented
672 return unicode(self) <= other.rstrip()
673
675 """
676 ignores trailing whitespace
677 """
678 if not isinstance(other, (self.__class__, basestring)):
679 return NotImplemented
680 return unicode(self) < other.rstrip()
681
683 """
684 ignores trailing whitespace
685 """
686 if not isinstance(other, (self.__class__, basestring)):
687 return NotImplemented
688 return unicode(self) != other.rstrip()
689
691 """
692 ignores trailing whitespace
693 """
694 return bool(unicode(self))
695
697 result = self.__class__(unicode(self) + other)
698 return result
699
700 basestring = str, unicode, Char
701 baseinteger = int, long
702
703 -class Date(object):
704 """
705 adds null capable datetime.date constructs
706 """
707
708 __slots__ = ['_date']
709
710 - def __new__(cls, year=None, month=0, day=0):
727
729 if self and isinstance(other, (datetime.timedelta)):
730 return Date(self._date + other)
731 else:
732 return NotImplemented
733
735 if isinstance(other, self.__class__):
736 return self._date == other._date
737 if isinstance(other, datetime.date):
738 return self._date == other
739 if isinstance(other, type(None)):
740 return self._date is None
741 return NotImplemented
742
747
749 if name == '_date':
750 raise AttributeError('_date missing!')
751 elif self:
752 return getattr(self._date, name)
753 else:
754 raise AttributeError('NullDate object has no attribute %s' % name)
755
757 if isinstance(other, (datetime.date)):
758 return self._date >= other
759 elif isinstance(other, (Date)):
760 if other:
761 return self._date >= other._date
762 return False
763 return NotImplemented
764
766 if isinstance(other, (datetime.date)):
767 return self._date > other
768 elif isinstance(other, (Date)):
769 if other:
770 return self._date > other._date
771 return True
772 return NotImplemented
773
775 return hash(self._date)
776
778 if self:
779 if isinstance(other, (datetime.date)):
780 return self._date <= other
781 elif isinstance(other, (Date)):
782 if other:
783 return self._date <= other._date
784 return False
785 else:
786 if isinstance(other, (datetime.date)):
787 return True
788 elif isinstance(other, (Date)):
789 if other:
790 return True
791 return True
792 return NotImplemented
793
795 if self:
796 if isinstance(other, (datetime.date)):
797 return self._date < other
798 elif isinstance(other, (Date)):
799 if other:
800 return self._date < other._date
801 return False
802 else:
803 if isinstance(other, (datetime.date)):
804 return True
805 elif isinstance(other, (Date)):
806 if other:
807 return True
808 return False
809 return NotImplemented
810
812 if self:
813 if isinstance(other, (datetime.date)):
814 return self._date != other
815 elif isinstance(other, (Date)):
816 if other:
817 return self._date != other._date
818 return True
819 else:
820 if isinstance(other, (datetime.date)):
821 return True
822 elif isinstance(other, (Date)):
823 if other:
824 return True
825 return False
826 return NotImplemented
827
829 return self._date is not None
830
831 __radd__ = __add__
832
834 if self and isinstance(other, (datetime.date)):
835 return other - self._date
836 elif self and isinstance(other, (Date)):
837 return other._date - self._date
838 elif self and isinstance(other, (datetime.timedelta)):
839 return Date(other - self._date)
840 else:
841 return NotImplemented
842
844 if self:
845 return "Date(%d, %d, %d)" % self.timetuple()[:3]
846 else:
847 return "Date()"
848
850 if self:
851 return str(self._date)
852 return ""
853
855 if self and isinstance(other, (datetime.date)):
856 return self._date - other
857 elif self and isinstance(other, (Date)):
858 return self._date - other._date
859 elif self and isinstance(other, (datetime.timedelta)):
860 return Date(self._date - other)
861 else:
862 return NotImplemented
863
865 if self:
866 return self._date
867 return None
868
869 @classmethod
874
875 @classmethod
878
879 @classmethod
881 if yyyymmdd in ('', ' ', 'no date'):
882 return cls()
883 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
884
885 - def replace(self, year=None, month=None, day=None, delta_year=0, delta_month=0, delta_day=0):
886 if not self:
887 return self.__class__._null_date
888 old_year, old_month, old_day = self.timetuple()[:3]
889 if isinstance(month, RelativeMonth):
890 this_month = IsoMonth(old_month)
891 delta_month += month.months_from(this_month)
892 month = None
893 if isinstance(day, RelativeDay):
894 this_day = IsoDay(self.isoweekday())
895 delta_day += day.days_from(this_day)
896 day = None
897 year = (year or old_year) + delta_year
898 month = (month or old_month) + delta_month
899 day = (day or old_day) + delta_day
900 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
901 while not(0 < month < 13) or not (0 < day <= days_in_month[month]):
902 while month < 1:
903 year -= 1
904 month = 12 + month
905 while month > 12:
906 year += 1
907 month = month - 12
908 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
909 while day < 1:
910 month -= 1
911 day = days_in_month[month] + day
912 if not 0 < month < 13:
913 break
914 while day > days_in_month[month]:
915 day = day - days_in_month[month]
916 month += 1
917 if not 0 < month < 13:
918 break
919 return Date(year, month, day)
920
922 fmt_cls = type(format)
923 if self:
924 return fmt_cls(self._date.strftime(format))
925 return fmt_cls('')
926
927 @classmethod
928 - def strptime(cls, date_string, format=None):
929 if format is not None:
930 return cls(*(time.strptime(date_string, format)[0:3]))
931 return cls(*(time.strptime(date_string, "%Y-%m-%d")[0:3]))
932
933 @classmethod
936
938 if self:
939 return "%04d%02d%02d" % self.timetuple()[:3]
940 else:
941 return ' '
942
943 Date.max = Date(datetime.date.max)
944 Date.min = Date(datetime.date.min)
945 Date._null_date = object.__new__(Date)
946 Date._null_date._date = None
947 NullDate = Date()
951 """
952 adds null capable datetime.datetime constructs
953 """
954
955 __slots__ = ['_datetime']
956
957 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsecond=0):
958 """year may be a datetime.datetime"""
959 if year is None or year is Null:
960 return cls._null_datetime
961 ndt = object.__new__(cls)
962 if isinstance(year, basestring):
963 return DateTime.strptime(year)
964 elif isinstance(year, (DateTime)):
965 ndt._datetime = year._datetime
966 elif isinstance(year, (datetime.datetime)):
967 microsecond = year.microsecond // 1000 * 1000
968 hour, minute, second = year.hour, year.minute, year.second
969 year, month, day = year.year, year.month, year.day
970 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond)
971 elif year is not None:
972 microsecond = microsecond // 1000 * 1000
973 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond)
974 return ndt
975
977 if self and isinstance(other, (datetime.timedelta)):
978 return DateTime(self._datetime + other)
979 else:
980 return NotImplemented
981
983 if isinstance(other, self.__class__):
984 return self._datetime == other._datetime
985 if isinstance(other, datetime.date):
986 return self._datetime == other
987 if isinstance(other, type(None)):
988 return self._datetime is None
989 return NotImplemented
990
995
997 if name == '_datetime':
998 raise AttributeError('_datetime missing!')
999 elif self:
1000 return getattr(self._datetime, name)
1001 else:
1002 raise AttributeError('NullDateTime object has no attribute %s' % name)
1003
1005 if self:
1006 if isinstance(other, (datetime.datetime)):
1007 return self._datetime >= other
1008 elif isinstance(other, (DateTime)):
1009 if other:
1010 return self._datetime >= other._datetime
1011 return False
1012 else:
1013 if isinstance(other, (datetime.datetime)):
1014 return False
1015 elif isinstance(other, (DateTime)):
1016 if other:
1017 return False
1018 return True
1019 return NotImplemented
1020
1022 if self:
1023 if isinstance(other, (datetime.datetime)):
1024 return self._datetime > other
1025 elif isinstance(other, (DateTime)):
1026 if other:
1027 return self._datetime > other._datetime
1028 return True
1029 else:
1030 if isinstance(other, (datetime.datetime)):
1031 return False
1032 elif isinstance(other, (DateTime)):
1033 if other:
1034 return False
1035 return False
1036 return NotImplemented
1037
1040
1042 if self:
1043 if isinstance(other, (datetime.datetime)):
1044 return self._datetime <= other
1045 elif isinstance(other, (DateTime)):
1046 if other:
1047 return self._datetime <= other._datetime
1048 return False
1049 else:
1050 if isinstance(other, (datetime.datetime)):
1051 return True
1052 elif isinstance(other, (DateTime)):
1053 if other:
1054 return True
1055 return True
1056 return NotImplemented
1057
1059 if self:
1060 if isinstance(other, (datetime.datetime)):
1061 return self._datetime < other
1062 elif isinstance(other, (DateTime)):
1063 if other:
1064 return self._datetime < other._datetime
1065 return False
1066 else:
1067 if isinstance(other, (datetime.datetime)):
1068 return True
1069 elif isinstance(other, (DateTime)):
1070 if other:
1071 return True
1072 return False
1073 return NotImplemented
1074
1076 if self:
1077 if isinstance(other, (datetime.datetime)):
1078 return self._datetime != other
1079 elif isinstance(other, (DateTime)):
1080 if other:
1081 return self._datetime != other._datetime
1082 return True
1083 else:
1084 if isinstance(other, (datetime.datetime)):
1085 return True
1086 elif isinstance(other, (DateTime)):
1087 if other:
1088 return True
1089 return False
1090 return NotImplemented
1091
1094
1095 __radd__ = __add__
1096
1106
1108 if self:
1109 return "DateTime(%5d, %2d, %2d, %2d, %2d, %2d, %2d)" % (
1110 self._datetime.timetuple()[:6] + (self._datetime.microsecond, )
1111 )
1112 else:
1113 return "DateTime()"
1114
1116 if self:
1117 return str(self._datetime)
1118 return ""
1119
1129
1130 @classmethod
1135
1137 if self:
1138 return Date(self.year, self.month, self.day)
1139 return Date()
1140
1142 if self:
1143 return self._datetime
1144 return None
1145
1146 @classmethod
1152
1153 @classmethod
1156
1157 @classmethod
1161
1162 - def replace(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None,
1163 delta_year=0, delta_month=0, delta_day=0, delta_hour=0, delta_minute=0, delta_second=0):
1164 if not self:
1165 return self.__class__._null_datetime
1166 old_year, old_month, old_day, old_hour, old_minute, old_second, old_micro = self.timetuple()[:7]
1167 if isinstance(month, RelativeMonth):
1168 this_month = IsoMonth(old_month)
1169 delta_month += month.months_from(this_month)
1170 month = None
1171 if isinstance(day, RelativeDay):
1172 this_day = IsoDay(self.isoweekday())
1173 delta_day += day.days_from(this_day)
1174 day = None
1175 year = (year or old_year) + delta_year
1176 month = (month or old_month) + delta_month
1177 day = (day or old_day) + delta_day
1178 hour = (hour or old_hour) + delta_hour
1179 minute = (minute or old_minute) + delta_minute
1180 second = (second or old_second) + delta_second
1181 microsecond = microsecond or old_micro
1182 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
1183 while ( not (0 < month < 13)
1184 or not (0 < day <= days_in_month[month])
1185 or not (0 <= hour < 24)
1186 or not (0 <= minute < 60)
1187 or not (0 <= second < 60)
1188 ):
1189 while month < 1:
1190 year -= 1
1191 month = 12 + month
1192 while month > 12:
1193 year += 1
1194 month = month - 12
1195 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
1196 while day < 1:
1197 month -= 1
1198 day = days_in_month[month] + day
1199 if not 0 < month < 13:
1200 break
1201 while day > days_in_month[month]:
1202 day = day - days_in_month[month]
1203 month += 1
1204 if not 0 < month < 13:
1205 break
1206 while hour < 1:
1207 day -= 1
1208 hour = 24 + hour
1209 while hour > 23:
1210 day += 1
1211 hour = hour - 24
1212 while minute < 0:
1213 hour -= 1
1214 minute = 60 + minute
1215 while minute > 59:
1216 hour += 1
1217 minute = minute - 60
1218 while second < 0:
1219 minute -= 1
1220 second = 60 + second
1221 while second > 59:
1222 minute += 1
1223 second = second - 60
1224 return DateTime(year, month, day, hour, minute, second, microsecond)
1225
1227 fmt_cls = type(format)
1228 if self:
1229 return fmt_cls(self._datetime.strftime(format))
1230 return fmt_cls('')
1231
1232 @classmethod
1233 - def strptime(cls, datetime_string, format=None):
1234 if format is not None:
1235 return cls(datetime.datetime.strptime(datetime_string, format))
1236 for format in (
1237 "%Y-%m-%d %H:%M:%S.%f",
1238 "%Y-%m-%d %H:%M:%S",
1239 ):
1240 try:
1241 return cls(datetime.datetime.strptime(datetime_string, format))
1242 except ValueError:
1243 pass
1244 raise ValueError("Unable to convert %r" % datetime_string)
1245
1247 if self:
1248 return Time(self.hour, self.minute, self.second, self.microsecond)
1249 return Time()
1250
1251 @classmethod
1254
1255 @classmethod
1258
1259 DateTime.max = DateTime(datetime.datetime.max)
1260 DateTime.min = DateTime(datetime.datetime.min)
1261 DateTime._null_datetime = object.__new__(DateTime)
1262 DateTime._null_datetime._datetime = None
1263 NullDateTime = DateTime()
1264
1265
1266 -class Time(object):
1267 """
1268 adds null capable datetime.time constructs
1269 """
1270
1271 __slots__ = ['_time']
1272
1273 - def __new__(cls, hour=None, minute=0, second=0, microsecond=0):
1274 """
1275 hour may be a datetime.time or a str(Time)
1276 """
1277 if hour is None or hour is Null:
1278 return cls._null_time
1279 nt = object.__new__(cls)
1280 if isinstance(hour, basestring):
1281 hour = Time.strptime(hour)
1282 if isinstance(hour, (Time)):
1283 nt._time = hour._time
1284 elif isinstance(hour, (datetime.time)):
1285 microsecond = hour.microsecond // 1000 * 1000
1286 hour, minute, second = hour.hour, hour.minute, hour.second
1287 nt._time = datetime.time(hour, minute, second, microsecond)
1288 elif hour is not None:
1289 microsecond = microsecond // 1000 * 1000
1290 nt._time = datetime.time(hour, minute, second, microsecond)
1291 return nt
1292
1294 if self and isinstance(other, (datetime.timedelta)):
1295 t = self._time
1296 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1297 t += other
1298 return Time(t.hour, t.minute, t.second, t.microsecond)
1299 else:
1300 return NotImplemented
1301
1303 if isinstance(other, self.__class__):
1304 return self._time == other._time
1305 if isinstance(other, datetime.time):
1306 return self._time == other
1307 if isinstance(other, type(None)):
1308 return self._time is None
1309 return NotImplemented
1310
1315
1317 if name == '_time':
1318 raise AttributeError('_time missing!')
1319 elif self:
1320 return getattr(self._time, name)
1321 else:
1322 raise AttributeError('NullTime object has no attribute %s' % name)
1323
1325 if self:
1326 if isinstance(other, (datetime.time)):
1327 return self._time >= other
1328 elif isinstance(other, (Time)):
1329 if other:
1330 return self._time >= other._time
1331 return False
1332 else:
1333 if isinstance(other, (datetime.time)):
1334 return False
1335 elif isinstance(other, (Time)):
1336 if other:
1337 return False
1338 return True
1339 return NotImplemented
1340
1342 if self:
1343 if isinstance(other, (datetime.time)):
1344 return self._time > other
1345 elif isinstance(other, (DateTime)):
1346 if other:
1347 return self._time > other._time
1348 return True
1349 else:
1350 if isinstance(other, (datetime.time)):
1351 return False
1352 elif isinstance(other, (Time)):
1353 if other:
1354 return False
1355 return False
1356 return NotImplemented
1357
1360
1362 if self:
1363 if isinstance(other, (datetime.time)):
1364 return self._time <= other
1365 elif isinstance(other, (Time)):
1366 if other:
1367 return self._time <= other._time
1368 return False
1369 else:
1370 if isinstance(other, (datetime.time)):
1371 return True
1372 elif isinstance(other, (Time)):
1373 if other:
1374 return True
1375 return True
1376 return NotImplemented
1377
1379 if self:
1380 if isinstance(other, (datetime.time)):
1381 return self._time < other
1382 elif isinstance(other, (Time)):
1383 if other:
1384 return self._time < other._time
1385 return False
1386 else:
1387 if isinstance(other, (datetime.time)):
1388 return True
1389 elif isinstance(other, (Time)):
1390 if other:
1391 return True
1392 return False
1393 return NotImplemented
1394
1396 if self:
1397 if isinstance(other, (datetime.time)):
1398 return self._time != other
1399 elif isinstance(other, (Time)):
1400 if other:
1401 return self._time != other._time
1402 return True
1403 else:
1404 if isinstance(other, (datetime.time)):
1405 return True
1406 elif isinstance(other, (Time)):
1407 if other:
1408 return True
1409 return False
1410 return NotImplemented
1411
1413 return self._time is not None
1414
1415 __radd__ = __add__
1416
1418 if self and isinstance(other, (Time, datetime.time)):
1419 t = self._time
1420 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1421 other = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond)
1422 other -= t
1423 return other
1424 else:
1425 return NotImplemented
1426
1428 if self:
1429 return "Time(%d, %d, %d, %d)" % (self.hour, self.minute, self.second, self.microsecond)
1430 else:
1431 return "Time()"
1432
1434 if self:
1435 return str(self._time)
1436 return ""
1437
1439 if self and isinstance(other, (Time, datetime.time)):
1440 t = self._time
1441 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1442 o = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond)
1443 return t - o
1444 elif self and isinstance(other, (datetime.timedelta)):
1445 t = self._time
1446 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1447 t -= other
1448 return Time(t.hour, t.minute, t.second, t.microsecond)
1449 else:
1450 return NotImplemented
1451
1452 @classmethod
1454 "2.5 == 2 hours, 30 minutes, 0 seconds, 0 microseconds"
1455 if num < 0:
1456 raise ValueError("positive value required (got %r)" % num)
1457 if num == 0:
1458 return Time(0)
1459 hours = int(num)
1460 if hours:
1461 num = num % hours
1462 minutes = int(num * 60)
1463 if minutes:
1464 num = num * 60 % minutes
1465 else:
1466 num = num * 60
1467 seconds = int(num * 60)
1468 if seconds:
1469 num = num * 60 % seconds
1470 else:
1471 num = num * 60
1472 microseconds = int(num * 1000)
1473 return Time(hours, minutes, seconds, microseconds)
1474
1475 @staticmethod
1479
1480 - def replace(self, hour=None, minute=None, second=None, microsecond=None, delta_hour=0, delta_minute=0, delta_second=0):
1481 if not self:
1482 return self.__class__._null_time
1483 old_hour, old_minute, old_second, old_micro = self.hour, self.minute, self.second, self.microsecond
1484 hour = (hour or old_hour) + delta_hour
1485 minute = (minute or old_minute) + delta_minute
1486 second = (second or old_second) + delta_second
1487 microsecond = microsecond or old_micro
1488 while not (0 <= hour < 24) or not (0 <= minute < 60) or not (0 <= second < 60):
1489 while second < 0:
1490 minute -= 1
1491 second = 60 + second
1492 while second > 59:
1493 minute += 1
1494 second = second - 60
1495 while minute < 0:
1496 hour -= 1
1497 minute = 60 + minute
1498 while minute > 59:
1499 hour += 1
1500 minute = minute - 60
1501 while hour < 1:
1502 hour = 24 + hour
1503 while hour > 23:
1504 hour = hour - 24
1505 return Time(hour, minute, second, microsecond)
1506
1508 fmt_cls = type(format)
1509 if self:
1510 return fmt_cls(self._time.strftime(format))
1511 return fmt_cls('')
1512
1513 @classmethod
1514 - def strptime(cls, time_string, format=None):
1515 if format is not None:
1516 return cls(datetime.time.strptime(time_string, format))
1517 for format in (
1518 "%H:%M:%S.%f",
1519 "%H:%M:%S",
1520 ):
1521 try:
1522 return cls(datetime.datetime.strptime(datetime_string, format))
1523 except ValueError:
1524 pass
1525 raise ValueError("Unable to convert %r" % datetime_string)
1526
1528 if self:
1529 return self._time
1530 return None
1531
1533 "returns Time as a float"
1534 hour = self.hour
1535 minute = self.minute * (1.0 / 60)
1536 second = self.second * (1.0 / 3600)
1537 microsecond = self.microsecond * (1.0 / 3600000)
1538 return hour + minute + second + microsecond
1539
1540 Time.max = Time(datetime.time.max)
1541 Time.min = Time(datetime.time.min)
1542 Time._null_time = object.__new__(Time)
1543 Time._null_time._time = None
1544 NullTime = Time()
1545
1546
1547 -class Period(object):
1548 "for matching various time ranges"
1549
1550 - def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None):
1551 params = vars()
1552 self._mask = {}
1553 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'):
1554 value = params[attr]
1555 if value is not None:
1556 self._mask[attr] = value
1557
1559 if not self._mask:
1560 return True
1561 for attr, value in self._mask.items():
1562 other_value = getattr(other, attr, None)
1563 try:
1564 if other_value == value or other_value in value:
1565 continue
1566 except TypeError:
1567 pass
1568 return False
1569 return True
1570
1572 items = []
1573 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'):
1574 if attr in self._mask:
1575 items.append('%s=%s' % (attr, self._mask[attr]))
1576 return "Period(%s)" % ', '.join(items)
1577
1580 """
1581 Logical field return type.
1582
1583 Accepts values of True, False, or None/Null
1584 """
1585
1587 if value is None or value is Null or value is Other or value is Unknown:
1588 return cls.unknown
1589 elif isinstance(value, basestring):
1590 if value.lower() in ('t', 'true', 'y', 'yes', 'on'):
1591 return cls.true
1592 elif value.lower() in ('f', 'false', 'n', 'no', 'off'):
1593 return cls.false
1594 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''):
1595 return cls.unknown
1596 else:
1597 raise ValueError('unknown value for Logical: %s' % value)
1598 else:
1599 return (cls.false, cls.true)[bool(value)]
1600
1602 if isinstance(y, type(None)) or y is Unknown or x is Unknown:
1603 return Unknown
1604 try:
1605 i = int(y)
1606 except Exception:
1607 return NotImplemented
1608 return int(x) + i
1609
1610
1611 __radd__ = __iadd__ = __add__
1612
1614 if isinstance(y, type(None)) or y is Unknown or x is Unknown:
1615 return Unknown
1616 try:
1617 i = int(y)
1618 except Exception:
1619 return NotImplemented
1620 return int(x) - i
1621
1622 __isub__ = __sub__
1623
1625 if isinstance(x, type(None)) or x is Unknown or y is Unknown:
1626 return Unknown
1627 try:
1628 i = int(x)
1629 except Exception:
1630 return NotImplemented
1631 return i - int(y)
1632
1634 if x == 0 or y == 0:
1635 return 0
1636 elif isinstance(y, type(None)) or y is Unknown or x is Unknown:
1637 return Unknown
1638 try:
1639 i = int(y)
1640 except Exception:
1641 return NotImplemented
1642 return int(x) * i
1643
1644 __rmul__ = __imul__ = __mul__
1645
1647 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1648 return Unknown
1649 try:
1650 i = int(y)
1651 except Exception:
1652 return NotImplemented
1653 return int(x).__div__(i)
1654
1655 __idiv__ = __div__
1656
1658 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown:
1659 return Unknown
1660 try:
1661 i = int(x)
1662 except Exception:
1663 return NotImplemented
1664 return i.__div__(int(y))
1665
1667 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1668 return Unknown
1669 try:
1670 i = int(y)
1671 except Exception:
1672 return NotImplemented
1673 return int(x).__truediv__(i)
1674
1675 __itruediv__ = __truediv__
1676
1678 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1679 return Unknown
1680 try:
1681 i = int(x)
1682 except Exception:
1683 return NotImplemented
1684 return i.__truediv__(int(y))
1685
1687 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1688 return Unknown
1689 try:
1690 i = int(y)
1691 except Exception:
1692 return NotImplemented
1693 return int(x).__floordiv__(i)
1694
1695 __ifloordiv__ = __floordiv__
1696
1698 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1699 return Unknown
1700 try:
1701 i = int(x)
1702 except Exception:
1703 return NotImplemented
1704 return i.__floordiv__(int(y))
1705
1707 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1708 return (Unknown, Unknown)
1709 try:
1710 i = int(y)
1711 except Exception:
1712 return NotImplemented
1713 return divmod(int(x), i)
1714
1716 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1717 return (Unknown, Unknown)
1718 try:
1719 i = int(x)
1720 except Exception:
1721 return NotImplemented
1722 return divmod(i, int(y))
1723
1725 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1726 return Unknown
1727 try:
1728 i = int(y)
1729 except Exception:
1730 return NotImplemented
1731 return int(x) % i
1732
1733 __imod__ = __mod__
1734
1736 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown:
1737 return Unknown
1738 try:
1739 i = int(x)
1740 except Exception:
1741 return NotImplemented
1742 return i % int(y)
1743
1745 if not isinstance(y, (x.__class__, bool, type(None), int)):
1746 return NotImplemented
1747 if isinstance(y, type(None)) or y is Unknown:
1748 return Unknown
1749 i = int(y)
1750 if i == 0:
1751 return 1
1752 if x is Unknown:
1753 return Unknown
1754 return int(x) ** i
1755
1756 __ipow__ = __pow__
1757
1759 if not isinstance(x, (y.__class__, bool, type(None), int)):
1760 return NotImplemented
1761 if y is Unknown:
1762 return Unknown
1763 i = int(y)
1764 if i == 0:
1765 return 1
1766 if x is Unknown or isinstance(x, type(None)):
1767 return Unknown
1768 return int(x) ** i
1769
1774
1775 __ilshift__ = __lshift__
1776
1781
1786
1787 __irshift__ = __rshift__
1788
1793
1799
1805
1810
1815
1817 if x.value is None:
1818 raise ValueError("unable to return complex() of %r" % x)
1819 return complex(x.value)
1820
1822 if x.value is None:
1823 raise ValueError("unable to return int() of %r" % x)
1824 return int(x.value)
1825
1827 if x.value is None:
1828 raise ValueError("unable to return long() of %r" % x)
1829 return long(x.value)
1830
1832 if x.value is None:
1833 raise ValueError("unable to return float() of %r" % x)
1834 return float(x.value)
1835
1837 if x.value is None:
1838 raise ValueError("unable to return oct() of %r" % x)
1839 return oct(x.value)
1840
1842 if x.value is None:
1843 raise ValueError("unable to return hex() of %r" % x)
1844 return hex(x.value)
1845
1847 """
1848 AND (conjunction) x & y:
1849 True iff both x, y are True
1850 False iff at least one of x, y is False
1851 Unknown otherwise
1852 """
1853 if (isinstance(x, int) and not isinstance(x, bool)) or (isinstance(y, int) and not isinstance(y, bool)):
1854 if x == 0 or y == 0:
1855 return 0
1856 elif x is Unknown or y is Unknown:
1857 return Unknown
1858 return int(x) & int(y)
1859 elif x in (False, Falsth) or y in (False, Falsth):
1860 return Falsth
1861 elif x in (True, Truth) and y in (True, Truth):
1862 return Truth
1863 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1864 return Unknown
1865 return NotImplemented
1866
1867 __rand__ = __and__
1868
1870 "OR (disjunction): x | y => True iff at least one of x, y is True"
1871 if (isinstance(x, int) and not isinstance(x, bool)) or (isinstance(y, int) and not isinstance(y, bool)):
1872 if x is Unknown or y is Unknown:
1873 return Unknown
1874 return int(x) | int(y)
1875 elif x in (True, Truth) or y in (True, Truth):
1876 return Truth
1877 elif x in (False, Falsth) and y in (False, Falsth):
1878 return Falsth
1879 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1880 return Unknown
1881 return NotImplemented
1882
1883 __ror__ = __or__
1884
1886 "XOR (parity) x ^ y: True iff only one of x,y is True"
1887 if (isinstance(x, int) and not isinstance(x, bool)) or (isinstance(y, int) and not isinstance(y, bool)):
1888 if x is Unknown or y is Unknown:
1889 return Unknown
1890 return int(x) ^ int(y)
1891 elif x in (True, Truth, False, Falsth) and y in (True, Truth, False, Falsth):
1892 return {
1893 (True, True) : Falsth,
1894 (True, False) : Truth,
1895 (False, True) : Truth,
1896 (False, False): Falsth,
1897 }[(x, y)]
1898 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1899 return Unknown
1900 return NotImplemented
1901
1902 __rxor__ = __xor__
1903
1905 if x is Unknown:
1906 raise TypeError('True/False value of %r is unknown' % x)
1907 return x.value is True
1908
1910 if isinstance(y, x.__class__):
1911 return x.value == y.value
1912 elif isinstance(y, (bool, type(None), int)):
1913 return x.value == y
1914 return NotImplemented
1915
1917 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1918 return x.value == None
1919 elif isinstance(y, x.__class__):
1920 return x.value >= y.value
1921 elif isinstance(y, (bool, int)):
1922 return x.value >= y
1923 return NotImplemented
1924
1926 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1927 return False
1928 elif isinstance(y, x.__class__):
1929 return x.value > y.value
1930 elif isinstance(y, (bool, int)):
1931 return x.value > y
1932 return NotImplemented
1933
1935 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1936 return x.value == None
1937 elif isinstance(y, x.__class__):
1938 return x.value <= y.value
1939 elif isinstance(y, (bool, int)):
1940 return x.value <= y
1941 return NotImplemented
1942
1944 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1945 return False
1946 elif isinstance(y, x.__class__):
1947 return x.value < y.value
1948 elif isinstance(y, (bool, int)):
1949 return x.value < y
1950 return NotImplemented
1951
1953 if isinstance(y, x.__class__):
1954 return x.value != y.value
1955 elif isinstance(y, (bool, type(None), int)):
1956 return x.value != y
1957 return NotImplemented
1958
1960 return hash(x.value)
1961
1963 if x.value is None:
1964 raise ValueError("unable to return index of %r" % x)
1965 return x.value
1966
1968 return "Logical(%r)" % x.string
1969
1972
1973 Logical.true = object.__new__(Logical)
1974 Logical.true.value = True
1975 Logical.true.string = 'T'
1976 Logical.false = object.__new__(Logical)
1977 Logical.false.value = False
1978 Logical.false.string = 'F'
1979 Logical.unknown = object.__new__(Logical)
1980 Logical.unknown.value = None
1981 Logical.unknown.string = '?'
1982 Truth = Logical(True)
1983 Falsth = Logical(False)
1984 Unknown = Logical()
1988 """
1989 Logical field return type that implements boolean algebra
1990
1991 Accepts values of True/On, False/Off, or None/Null/Unknown/Other
1992 """
1993
1995 if value is None or value is Null or value is Other or value is Unknown:
1996 return cls.unknown
1997 elif isinstance(value, basestring):
1998 if value.lower() in ('t', 'true', 'y', 'yes', 'on'):
1999 return cls.true
2000 elif value.lower() in ('f', 'false', 'n', 'no', 'off'):
2001 return cls.false
2002 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''):
2003 return cls.unknown
2004 else:
2005 raise ValueError('unknown value for Quantum: %s' % value)
2006 else:
2007 return (cls.false, cls.true)[bool(value)]
2008
2010 "OR (disjunction): x | y => True iff at least one of x, y is True"
2011 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2012 return NotImplemented
2013 if x.value is True or y is not Other and y == True:
2014 return x.true
2015 elif x.value is False and y is not Other and y == False:
2016 return x.false
2017 return Other
2018
2020 "IMP (material implication) x >> y => False iff x == True and y == False"
2021 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2022 return NotImplemented
2023 if (x.value is False
2024 or (x.value is True and y is not Other and y == True)):
2025 return x.true
2026 elif x.value is True and y is not Other and y == False:
2027 return False
2028 return Other
2029
2031 "IMP (material implication) x >> y => False iff x = True and y = False"
2032 if not isinstance(x, (y.__class__, bool, NullType, type(None))):
2033 return NotImplemented
2034 if (x is not Other and x == False
2035 or (x is not Other and x == True and y.value is True)):
2036 return y.true
2037 elif x is not Other and x == True and y.value is False:
2038 return y.false
2039 return Other
2040
2042 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if x is False"
2043 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2044 return NotImplemented
2045 if x.value is True and y is not Other and y == True:
2046 return x.true
2047 if x.value is True and y is not Other and y == False:
2048 return x.false
2049 return Other
2050
2052 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if y is False"
2053 if not isinstance(x, (y.__class__, bool, NullType, type(None))):
2054 return NotImplemented
2055 if x is not Other and x == True and y.value is True:
2056 return y.true
2057 if x is not Other and x == True and y.value is False:
2058 return y.false
2059 return Other
2060
2062 "NAND (negative AND) x.D(y): False iff x and y are both True"
2063 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2064 return NotImplemented
2065 if x.value is False or y is not Other and y == False:
2066 return x.true
2067 elif x.value is True and y is not Other and y == True:
2068 return x.false
2069 return Other
2070
2072 "EQV (equivalence) x.E(y): True iff x and y are the same"
2073 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2074 return NotImplemented
2075 elif (
2076 (x.value is True and y is not Other and y == True)
2077 or
2078 (x.value is False and y is not Other and y == False)
2079 ):
2080 return x.true
2081 elif (
2082 (x.value is True and y is not Other and y == False)
2083 or
2084 (x.value is False and y is not Other and y == True)
2085 ):
2086 return x.false
2087 return Other
2088
2090 "XOR (parity) x ^ y: True iff only one of x,y is True"
2091 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2092 return NotImplemented
2093 if (
2094 (x.value is True and y is not Other and y == False)
2095 or
2096 (x.value is False and y is not Other and y == True)
2097 ):
2098 return x.true
2099 if (
2100 (x.value is False and y is not Other and y == False)
2101 or
2102 (x.value is True and y is not Other and y == True)
2103 ):
2104 return x.false
2105 return Other
2106
2108 "AND (conjunction) x & y: True iff both x, y are True"
2109 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2110 return NotImplemented
2111 if x.value is True and y is not Other and y == True:
2112 return x.true
2113 elif x.value is False or y is not Other and y == False:
2114 return x.false
2115 return Other
2116
2118 "NEG (negation) -x: True iff x = False"
2119 if x is x.true:
2120 return x.false
2121 elif x is x.false:
2122 return x.true
2123 return Other
2124
2125 @classmethod
2138
2140 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2141 return NotImplemented
2142 if (
2143 (x.value is True and y is not Other and y == True)
2144 or
2145 (x.value is False and y is not Other and y == False)
2146 ):
2147 return x.true
2148 elif (
2149 (x.value is True and y is not Other and y == False)
2150 or
2151 (x.value is False and y is not Other and y == True)
2152 ):
2153 return x.false
2154 return Other
2155
2157 return hash(x.value)
2158
2160 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2161 return NotImplemented
2162 if (
2163 (x.value is True and y is not Other and y == False)
2164 or
2165 (x.value is False and y is not Other and y == True)
2166 ):
2167 return x.true
2168 elif (
2169 (x.value is True and y is not Other and y == True)
2170 or
2171 (x.value is False and y is not Other and y == False)
2172 ):
2173 return x.false
2174 return Other
2175
2177 if x is Other:
2178 raise TypeError('True/False value of %r is unknown' % x)
2179 return x.value is True
2180
2182 return "Quantum(%r)" % x.string
2183
2186
2187 __add__ = A
2188 __and__ = K
2189 __mul__ = K
2190 __neg__ = N
2191 __or__ = A
2192 __radd__ = A
2193 __rand__ = K
2194 __rshift__ = None
2195 __rmul__ = K
2196 __ror__ = A
2197 __rrshift__ = None
2198 __rxor__ = J
2199 __xor__ = J
2200
2201 Quantum.true = object.__new__(Quantum)
2202 Quantum.true.value = True
2203 Quantum.true.string = 'Y'
2204 Quantum.false = object.__new__(Quantum)
2205 Quantum.false.value = False
2206 Quantum.false.string = 'N'
2207 Quantum.unknown = object.__new__(Quantum)
2208 Quantum.unknown.value = None
2209 Quantum.unknown.string = '?'
2210 Quantum.set_implication('material')
2211 On = Quantum(True)
2212 Off = Quantum(False)
2213 Other = Quantum()
2214
2215
2216
2217 from xmlrpclib import Marshaller
2218 Marshaller.dispatch[Char] = Marshaller.dump_unicode
2219 Marshaller.dispatch[Logical] = Marshaller.dump_bool
2220 Marshaller.dispatch[DateTime] = Marshaller.dump_datetime
2221 del Marshaller
2227 """
2228 Navigation base class that provides VPFish movement methods
2229 """
2230
2231 _index = -1
2232
2234 """
2235 implemented by subclass; must return True if underlying structure meets need
2236 """
2237 raise NotImplementedError()
2238
2239 - def _get_index(self, direction, n=1, start=None):
2240 """
2241 returns index of next available record towards direction
2242 """
2243 if start is not None:
2244 index = start
2245 else:
2246 index = self._index
2247 if direction == 'reverse':
2248 move = -1 * n
2249 limit = 0
2250 index += move
2251 if index < limit:
2252 return -1
2253 else:
2254 return index
2255 elif direction == 'forward':
2256 move = +1 * n
2257 limit = len(self) - 1
2258 index += move
2259 if index > limit:
2260 return len(self)
2261 else:
2262 return index
2263 else:
2264 raise ValueError("direction should be 'forward' or 'reverse', not %r" % direction)
2265
2266 @property
2268 """
2269 returns True if no more usable records towards the beginning of the table
2270 """
2271 self._nav_check()
2272 index = self._get_index('reverse')
2273 return index == -1
2274
2276 """
2277 sets record index to bottom of table (end of table)
2278 """
2279 self._nav_check()
2280 self._index = len(self)
2281 return self._index
2282
2283 @property
2295
2296 @property
2298 """
2299 returns current index
2300 """
2301 self._nav_check()
2302 return self._index
2303
2304 @property
2306 """
2307 returns True if no more usable records towards the end of the table
2308 """
2309 self._nav_check()
2310 index = self._get_index('forward')
2311 return index == len(self)
2312
2313 @property
2324
2325 - def goto(self, where):
2326 """
2327 changes the record pointer to the first matching (deleted) record
2328 where should be either an integer, or 'top' or 'bottom'.
2329 top -> before first record
2330 bottom -> after last record
2331 """
2332 self._nav_check()
2333 max = len(self)
2334 if isinstance(where, baseinteger):
2335 if not -max <= where < max:
2336 raise IndexError("Record %d does not exist" % where)
2337 if where < 0:
2338 where += max
2339 self._index = where
2340 return self._index
2341 move = getattr(self, where, None)
2342 if move is None:
2343 raise DbfError("unable to go to %r" % where)
2344 return move()
2345
2346 @property
2357
2358 @property
2369
2370 @property
2381
2382 - def skip(self, n=1):
2383 """
2384 move index to the next nth available record
2385 """
2386 self._nav_check()
2387 if n < 0:
2388 n *= -1
2389 direction = 'reverse'
2390 else:
2391 direction = 'forward'
2392 self._index = index = self._get_index(direction, n)
2393 if index < 0:
2394 raise Bof()
2395 elif index >= len(self):
2396 raise Eof()
2397 else:
2398 return index
2399
2401 """
2402 sets record index to top of table (beginning of table)
2403 """
2404 self._nav_check()
2405 self._index = -1
2406 return self._index
2407
2410 """
2411 Provides routines to extract and save data within the fields of a
2412 dbf record.
2413 """
2414
2415 __slots__ = ('_recnum', '_meta', '_data', '_old_data', '_dirty',
2416 '_memos', '_write_to_disk', '__weakref__')
2417
2418 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
2419 """
2420 record = ascii array of entire record;
2421 layout=record specification;
2422 memo = memo object for table
2423 """
2424 record = object.__new__(cls)
2425 record._dirty = False
2426 record._recnum = recnum
2427 record._meta = layout
2428 record._memos = {}
2429 record._write_to_disk = True
2430 record._old_data = None
2431 header = layout.header
2432 record._data = layout.blankrecord[:]
2433 if kamikaze and len(record._data) != len(kamikaze):
2434 raise BadDataError("record data is not the correct length (should be %r, not %r)" %
2435 (len(record._data), len(kamikaze)), data=kamikaze[:])
2436 if recnum == -1:
2437 return record
2438 elif type(kamikaze) == array:
2439 record._data = kamikaze[:]
2440 elif type(kamikaze) == str:
2441 if kamikaze:
2442 record._data = array('c', kamikaze)
2443 else:
2444 raise BadDataError("%r recieved for record data" % kamikaze)
2445 if record._data[0] == '\x00':
2446 record._data[0] = ' '
2447 if record._data[0] not in (' ', '*', '\x00'):
2448 raise DbfError("record data not correct -- first character should be a ' ' or a '*'.")
2449 if not _fromdisk and layout.location == ON_DISK:
2450 record._update_disk()
2451 return record
2452
2454 for field in self._meta.user_fields:
2455 if self[field] == value:
2456 return True
2457 return False
2458
2464
2466 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2467 return NotImplemented
2468 if isinstance(other, (Record, RecordTemplate)):
2469 if field_names(self) != field_names(other):
2470 return False
2471 for field in self._meta.user_fields:
2472 s_value, o_value = self[field], other[field]
2473 if s_value is not o_value and s_value != o_value:
2474 return False
2475 elif isinstance(other, dict):
2476 if sorted(field_names(self)) != sorted(other.keys()):
2477 return False
2478 for field in self._meta.user_fields:
2479 s_value, o_value = self[field], other[field]
2480 if s_value is not o_value and s_value != o_value:
2481 return False
2482 else:
2483 if len(self) != len(other):
2484 return False
2485 for s_value, o_value in zip(self, other):
2486 if s_value is not o_value and s_value != o_value:
2487 return False
2488 return True
2489
2495
2498
2514
2516 if isinstance(item, baseinteger):
2517 fields = self._meta.user_fields
2518 field_count = len(fields)
2519 if not -field_count <= item < field_count:
2520 raise NotFoundError("Field offset %d is not in record" % item)
2521 field = fields[item]
2522 if field in self._memos:
2523 return self._memos[field]
2524 return self[field]
2525 elif isinstance(item, slice):
2526 sequence = []
2527 if isinstance(item.start, basestring) or isinstance(item.stop, basestring):
2528 field_names = dbf.field_names(self)
2529 start, stop, step = item.start, item.stop, item.step
2530 if start not in field_names or stop not in field_names:
2531 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2532 if step is not None and not isinstance(step, baseinteger):
2533 raise DbfError("step value must be an int or long, not %r" % type(step))
2534 start = field_names.index(start)
2535 stop = field_names.index(stop) + 1
2536 item = slice(start, stop, step)
2537 for index in self._meta.fields[item]:
2538 sequence.append(self[index])
2539 return sequence
2540 elif isinstance(item, basestring):
2541 return self.__getattr__(item)
2542 else:
2543 raise TypeError("%r is not a field name" % item)
2544
2547
2549 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2550 return NotImplemented
2551 return not self == other
2552
2577
2579 if self._meta.status != READ_WRITE:
2580 raise DbfError("%s not in read/write mode" % self._meta.filename)
2581 if self._write_to_disk:
2582 raise DbfError("unable to modify fields individually except in `with` or `Process()`")
2583 if isinstance(name, basestring):
2584 self.__setattr__(name, value)
2585 elif isinstance(name, baseinteger):
2586 self.__setattr__(self._meta.fields[name], value)
2587 elif isinstance(name, slice):
2588 sequence = []
2589 field_names = dbf.field_names(self)
2590 if isinstance(name.start, basestring) or isinstance(name.stop, basestring):
2591 start, stop, step = name.start, name.stop, name.step
2592 if start not in field_names or stop not in field_names:
2593 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2594 if step is not None and not isinstance(step, baseinteger):
2595 raise DbfError("step value must be an int or long, not %r" % type(step))
2596 start = field_names.index(start)
2597 stop = field_names.index(stop) + 1
2598 name = slice(start, stop, step)
2599 for field in self._meta.fields[name]:
2600 sequence.append(field)
2601 if len(sequence) != len(value):
2602 raise DbfError("length of slices not equal")
2603 for field, val in zip(sequence, value):
2604 self[field] = val
2605 else:
2606 raise TypeError("%s is not a field name" % name)
2607
2609 result = []
2610 for seq, field in enumerate(field_names(self)):
2611 result.append("%3d - %-10s: %r" % (seq, field, self[field]))
2612 return '\n'.join(result)
2613
2615 return self._data.tostring()
2616
2634
2635 @classmethod
2637 """
2638 creates a blank record data chunk
2639 """
2640 record = object.__new__(cls)
2641 record._dirty = False
2642 record._recnum = -1
2643 record._meta = layout
2644 record._data = array('c', ' ' * layout.header.record_length)
2645 layout.memofields = []
2646 signature = [layout.table().codepage.name]
2647 for index, name in enumerate(layout.fields):
2648 if name == '_nullflags':
2649 record._data[layout['_nullflags'][START]:layout['_nullflags'][END]] = array('c', chr(0) * layout['_nullflags'][LENGTH])
2650 for index, name in enumerate(layout.fields):
2651 signature.append(name)
2652 if name != '_nullflags':
2653 type = layout[name][TYPE]
2654 start = layout[name][START]
2655 size = layout[name][LENGTH]
2656 end = layout[name][END]
2657 record._data[start:end] = array('c', layout.fieldtypes[type]['Blank'](size))
2658 if layout[name][TYPE] in layout.memo_types:
2659 layout.memofields.append(name)
2660 decimals = layout[name][DECIMALS]
2661 signature[-1] = '_'.join([str(x) for x in (signature[-1], type, size, decimals)])
2662 layout.blankrecord = record._data[:]
2663 data_types = []
2664 for fieldtype, defs in sorted(layout.fieldtypes.items()):
2665 if fieldtype != '0':
2666 data_types.append("%s_%s_%s" % (fieldtype, defs['Empty'], defs['Class']))
2667 layout.record_sig = ('___'.join(signature), '___'.join(data_types))
2668
2670 """
2671 rerun all indices with this record
2672 """
2673 if self._meta.status == CLOSED:
2674 raise DbfError("%s is closed; cannot alter indices" % self._meta.filename)
2675 elif not self._write_to_disk:
2676 raise DbfError("unable to reindex record until it is written to disk")
2677 for dbfindex in self._meta.table()._indexen:
2678 dbfindex(self)
2679
2681 """
2682 calls appropriate routine to convert value stored in field from array
2683 """
2684 fielddef = self._meta[name]
2685 flags = fielddef[FLAGS]
2686 nullable = flags & NULLABLE and '_nullflags' in self._meta
2687 binary = flags & BINARY
2688 if nullable:
2689 byte, bit = divmod(index, 8)
2690 null_def = self._meta['_nullflags']
2691 null_data = self._data[null_def[START]:null_def[END]]
2692 try:
2693 if ord(null_data[byte]) >> bit & 1:
2694 return Null
2695 except IndexError:
2696 print(null_data)
2697 print(index)
2698 print(byte, bit)
2699 print(len(self._data), self._data)
2700 print(null_def)
2701 print(null_data)
2702 raise
2703
2704 record_data = self._data[fielddef[START]:fielddef[END]]
2705 field_type = fielddef[TYPE]
2706 retrieve = self._meta.fieldtypes[field_type]['Retrieve']
2707 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder)
2708 return datum
2709
2721
2734
2736 """
2737 calls appropriate routine to convert value to ascii bytes, and save it in record
2738 """
2739 fielddef = self._meta[name]
2740 field_type = fielddef[TYPE]
2741 flags = fielddef[FLAGS]
2742 binary = flags & BINARY
2743 nullable = flags & NULLABLE and '_nullflags' in self._meta
2744 update = self._meta.fieldtypes[field_type]['Update']
2745 if nullable:
2746 byte, bit = divmod(index, 8)
2747 null_def = self._meta['_nullflags']
2748 null_data = self._data[null_def[START]:null_def[END]].tostring()
2749 null_data = [ord(c) for c in null_data]
2750 if value is Null:
2751 null_data[byte] |= 1 << bit
2752 value = None
2753 else:
2754 null_data[byte] &= 0xff ^ 1 << bit
2755 null_data = array('c', [chr(n) for n in null_data])
2756 self._data[null_def[START]:null_def[END]] = null_data
2757 if value is not Null:
2758 bytes = array('c', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder))
2759 size = fielddef[LENGTH]
2760 if len(bytes) > size:
2761 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size))
2762 blank = array('c', ' ' * size)
2763 start = fielddef[START]
2764 end = start + size
2765 blank[:len(bytes)] = bytes[:]
2766 self._data[start:end] = blank[:]
2767 self._dirty = True
2768
2770 layout = self._meta
2771 if self._recnum < 0:
2772 raise DbfError("cannot update a packed record")
2773 if layout.location == ON_DISK:
2774 header = layout.header
2775 if location == '':
2776 location = self._recnum * header.record_length + header.start
2777 if data is None:
2778 data = self._data
2779 layout.dfd.seek(location)
2780 layout.dfd.write(data)
2781 self._dirty = False
2782 table = layout.table()
2783 if table is not None:
2784 for index in table._indexen:
2785 index(self)
2786
2792
2795 """
2796 Provides routines to mimic a dbf record.
2797 """
2798
2799 __slots__ = ('_meta', '_data', '_old_data', '_memos', '_write_to_disk', '__weakref__')
2800
2811
2813 """
2814 Calls appropriate routine to convert value stored in field from
2815 array
2816 """
2817 fielddef = self._meta[name]
2818 flags = fielddef[FLAGS]
2819 nullable = flags & NULLABLE and '_nullflags' in self._meta
2820 binary = flags & BINARY
2821 if nullable:
2822 byte, bit = divmod(index, 8)
2823 null_def = self._meta['_nullflags']
2824 null_data = self._data[null_def[START]:null_def[END]]
2825 if ord(null_data[byte]) >> bit & 1:
2826 return Null
2827 record_data = self._data[fielddef[START]:fielddef[END]]
2828 field_type = fielddef[TYPE]
2829 retrieve = self._meta.fieldtypes[field_type]['Retrieve']
2830 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder)
2831 return datum
2832
2843
2845 """
2846 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes
2847 """
2848 if not self._write_to_disk:
2849 raise DbfError("template already in a state of flux")
2850 self._old_data = self._data[:]
2851 self._write_to_disk = False
2852
2854 """
2855 calls appropriate routine to convert value to ascii bytes, and save it in record
2856 """
2857 fielddef = self._meta[name]
2858 field_type = fielddef[TYPE]
2859 flags = fielddef[FLAGS]
2860 binary = flags & BINARY
2861 nullable = flags & NULLABLE and '_nullflags' in self._meta
2862 update = self._meta.fieldtypes[field_type]['Update']
2863 if nullable:
2864 byte, bit = divmod(index, 8)
2865 null_def = self._meta['_nullflags']
2866 null_data = self._data[null_def[START]:null_def[END]].tostring()
2867 null_data = [ord(c) for c in null_data]
2868 if value is Null:
2869 null_data[byte] |= 1 << bit
2870 value = None
2871 else:
2872 null_data[byte] &= 0xff ^ 1 << bit
2873 null_data = array('c', [chr(n) for n in null_data])
2874 self._data[null_def[START]:null_def[END]] = null_data
2875 if value is not Null:
2876 bytes = array('c', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder))
2877 size = fielddef[LENGTH]
2878 if len(bytes) > size:
2879 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size))
2880 blank = array('c', ' ' * size)
2881 start = fielddef[START]
2882 end = start + size
2883 blank[:len(bytes)] = bytes[:]
2884 self._data[start:end] = blank[:]
2885
2886 - def __new__(cls, layout, original_record=None, defaults=None):
2917
2920
2922 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2923 return NotImplemented
2924 if isinstance(other, (Record, RecordTemplate)):
2925 if field_names(self) != field_names(other):
2926 return False
2927 for field in self._meta.user_fields:
2928 s_value, o_value = self[field], other[field]
2929 if s_value is not o_value and s_value != o_value:
2930 return False
2931 elif isinstance(other, dict):
2932 if sorted(field_names(self)) != sorted(other.keys()):
2933 return False
2934 for field in self._meta.user_fields:
2935 s_value, o_value = self[field], other[field]
2936 if s_value is not o_value and s_value != o_value:
2937 return False
2938 else:
2939 if len(self) != len(other):
2940 return False
2941 for s_value, o_value in zip(self, other):
2942 if s_value is not o_value and s_value != o_value:
2943 return False
2944 return True
2945
2948
2964
2966 fields = self._meta.user_fields
2967 if isinstance(item, baseinteger):
2968 field_count = len(fields)
2969 if not -field_count <= item < field_count:
2970 raise NotFoundError("Field offset %d is not in record" % item)
2971 field = fields[item]
2972 if field in self._memos:
2973 return self._memos[field]
2974 return self[field]
2975 elif isinstance(item, slice):
2976 sequence = []
2977 if isinstance(item.start, basestring) or isinstance(item.stop, basestring):
2978 start, stop, step = item.start, item.stop, item.step
2979 if start not in fields or stop not in fields:
2980 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2981 if step is not None and not isinstance(step, baseinteger):
2982 raise DbfError("step value must be an int or long, not %r" % type(step))
2983 start = fields.index(start)
2984 stop = fields.index(stop) + 1
2985 item = slice(start, stop, step)
2986 for index in self._meta.fields[item]:
2987 sequence.append(self[index])
2988 return sequence
2989 elif isinstance(item, basestring):
2990 return self.__getattr__(item)
2991 else:
2992 raise TypeError("%r is not a field name" % item)
2993
2996
2998 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2999 return NotImplemented
3000 return not self == other
3001
3021
3023 if isinstance(name, basestring):
3024 self.__setattr__(name, value)
3025 elif isinstance(name, baseinteger):
3026 self.__setattr__(self._meta.fields[name], value)
3027 elif isinstance(name, slice):
3028 sequence = []
3029 field_names = dbf.field_names(self)
3030 if isinstance(name.start, basestring) or isinstance(name.stop, basestring):
3031 start, stop, step = name.start, name.stop, name.step
3032 if start not in field_names or stop not in field_names:
3033 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
3034 if step is not None and not isinstance(step, baseinteger):
3035 raise DbfError("step value must be an int or long, not %r" % type(step))
3036 start = field_names.index(start)
3037 stop = field_names.index(stop) + 1
3038 name = slice(start, stop, step)
3039 for field in self._meta.fields[name]:
3040 sequence.append(field)
3041 if len(sequence) != len(value):
3042 raise DbfError("length of slices not equal")
3043 for field, val in zip(sequence, value):
3044 self[field] = val
3045 else:
3046 raise TypeError("%s is not a field name" % name)
3047
3048
3050 return self._data.tostring()
3051
3053 result = []
3054 for seq, field in enumerate(field_names(self)):
3055 result.append("%3d - %-10s: %r" % (seq, field, self[field]))
3056 return '\n'.join(result)
3057
3060 """
3061 Provides routines to mimic a dbf record, but all values are non-existent.
3062 """
3063
3064 __slots__ = ('_recno', '_sequence')
3065
3066 - def __new__(cls, position, sequence):
3067 """
3068 record = ascii array of entire record
3069 layout=record specification
3070 memo = memo object for table
3071 """
3072 if position not in ('bof', 'eof'):
3073 raise ValueError("position should be 'bof' or 'eof', not %r" % position)
3074 vapor = object.__new__(cls)
3075 vapor._recno = (-1, None)[position == 'eof']
3076 vapor._sequence = sequence
3077 return vapor
3078
3081
3086
3087
3089 if name[0:2] == '__' and name[-2:] == '__':
3090 raise AttributeError('Method %s is not implemented.' % name)
3091 else:
3092 return Vapor
3093
3095 if isinstance(item, baseinteger):
3096 return Vapor
3097 elif isinstance(item, slice):
3098 raise TypeError('slice notation not allowed on Vapor records')
3099 elif isinstance(item, basestring):
3100 return self.__getattr__(item)
3101 else:
3102 raise TypeError("%r is not a field name" % item)
3103
3105 raise TypeError("Vapor records have no length")
3106
3111
3113 """
3114 Vapor records are always False
3115 """
3116 return False
3117
3123
3125 if isinstance(name, (basestring, baseinteger)):
3126 raise TypeError("cannot change Vapor record")
3127 elif isinstance(name, slice):
3128 raise TypeError("slice notation not allowed on Vapor records")
3129 else:
3130 raise TypeError("%s is not a field name" % name)
3131
3133 return "RecordVaporWare(position=%r, sequence=%r)" % (('bof', 'eof')[recno(self) is None], self._sequence)
3134
3136 return 'VaporRecord(%r)' % recno(self)
3137
3138 @property
3144
3147 """
3148 Provides access to memo fields as dictionaries
3149 Must override _init, _get_memo, and _put_memo to
3150 store memo contents to disk
3151 """
3152
3154 """
3155 Initialize disk file usage
3156 """
3157
3159 """
3160 Retrieve memo contents from disk
3161 """
3162
3164 """
3165 Store memo contents to disk
3166 """
3167
3169 """
3170 Resets memo structure back to zero memos
3171 """
3172 self.memory.clear()
3173 self.nextmemo = 1
3174
3176 self.meta = meta
3177 self.memory = {}
3178 self.nextmemo = 1
3179 self._init()
3180 self.meta.newmemofile = False
3181
3183 """
3184 Gets the memo in block
3185 """
3186 if self.meta.ignorememos or not block:
3187 return ''
3188 if self.meta.location == ON_DISK:
3189 return self._get_memo(block)
3190 else:
3191 return self.memory[block]
3192
3194 """
3195 Stores data in memo file, returns block number
3196 """
3197 if self.meta.ignorememos or data == '':
3198 return 0
3199 if self.meta.location == IN_MEMORY:
3200 thismemo = self.nextmemo
3201 self.nextmemo += 1
3202 self.memory[thismemo] = data
3203 else:
3204 thismemo = self._put_memo(data)
3205 return thismemo
3206
3209 """
3210 dBase III specific
3211 """
3212
3229
3231 block = int(block)
3232 self.meta.mfd.seek(block * self.meta.memo_size)
3233 eom = -1
3234 data = ''
3235 while eom == -1:
3236 newdata = self.meta.mfd.read(self.meta.memo_size)
3237 if not newdata:
3238 return data
3239 data += newdata
3240 eom = data.find('\x1a\x1a')
3241 return data[:eom]
3242
3244 data = data
3245 length = len(data) + self.record_header_length
3246 blocks = length // self.meta.memo_size
3247 if length % self.meta.memo_size:
3248 blocks += 1
3249 thismemo = self.nextmemo
3250 self.nextmemo = thismemo + blocks
3251 self.meta.mfd.seek(0)
3252 self.meta.mfd.write(pack_long_int(self.nextmemo))
3253 self.meta.mfd.seek(thismemo * self.meta.memo_size)
3254 self.meta.mfd.write(data)
3255 self.meta.mfd.write('\x1a\x1a')
3256 double_check = self._get_memo(thismemo)
3257 if len(double_check) != len(data):
3258 uhoh = open('dbf_memo_dump.err', 'wb')
3259 uhoh.write('thismemo: %d' % thismemo)
3260 uhoh.write('nextmemo: %d' % self.nextmemo)
3261 uhoh.write('saved: %d bytes' % len(data))
3262 uhoh.write(data)
3263 uhoh.write('retrieved: %d bytes' % len(double_check))
3264 uhoh.write(double_check)
3265 uhoh.close()
3266 raise DbfError("unknown error: memo not saved")
3267 return thismemo
3268
3276
3278 """
3279 Visual Foxpro 6 specific
3280 """
3281
3283 if self.meta.location == ON_DISK and not self.meta.ignorememos:
3284 self.record_header_length = 8
3285 if self.meta.newmemofile:
3286 if self.meta.memo_size == 0:
3287 self.meta.memo_size = 1
3288 elif 1 < self.meta.memo_size < 33:
3289 self.meta.memo_size *= 512
3290 self.meta.mfd = open(self.meta.memoname, 'w+b')
3291 nextmemo = 512 // self.meta.memo_size
3292 if nextmemo * self.meta.memo_size < 512:
3293 nextmemo += 1
3294 self.nextmemo = nextmemo
3295 self.meta.mfd.write(pack_long_int(nextmemo, bigendian=True) + '\x00\x00' + \
3296 pack_short_int(self.meta.memo_size, bigendian=True) + '\x00' * 504)
3297 else:
3298 try:
3299 self.meta.mfd = open(self.meta.memoname, 'r+b')
3300 self.meta.mfd.seek(0)
3301 header = self.meta.mfd.read(512)
3302 self.nextmemo = unpack_long_int(header[:4], bigendian=True)
3303 self.meta.memo_size = unpack_short_int(header[6:8], bigendian=True)
3304 except Exception:
3305 exc = sys.exc_info()[1]
3306 raise DbfError("memo file appears to be corrupt: %r" % exc.args)
3307
3309 self.meta.mfd.seek(block * self.meta.memo_size)
3310 header = self.meta.mfd.read(8)
3311 length = unpack_long_int(header[4:], bigendian=True)
3312 return self.meta.mfd.read(length)
3313
3315 data = data
3316 self.meta.mfd.seek(0)
3317 thismemo = unpack_long_int(self.meta.mfd.read(4), bigendian=True)
3318 self.meta.mfd.seek(0)
3319 length = len(data) + self.record_header_length
3320 blocks = length // self.meta.memo_size
3321 if length % self.meta.memo_size:
3322 blocks += 1
3323 self.meta.mfd.write(pack_long_int(thismemo + blocks, bigendian=True))
3324 self.meta.mfd.seek(thismemo * self.meta.memo_size)
3325 self.meta.mfd.write('\x00\x00\x00\x01' + pack_long_int(len(data), bigendian=True) + data)
3326 return thismemo
3327
3329 if self.meta.location == ON_DISK and not self.meta.ignorememos:
3330 mfd = self.meta.mfd
3331 mfd.seek(0)
3332 mfd.truncate(0)
3333 nextmemo = 512 // self.meta.memo_size
3334 if nextmemo * self.meta.memo_size < 512:
3335 nextmemo += 1
3336 self.nextmemo = nextmemo
3337 mfd.write(pack_long_int(nextmemo, bigendian=True) + '\x00\x00' + \
3338 pack_short_int(self.meta.memo_size, bigendian=True) + '\x00' * 504)
3339 mfd.flush()
3340
3341
3342 -class DbfCsv(csv.Dialect):
3353 csv.register_dialect('dbf', DbfCsv)
3357 """
3358 used because you cannot weakref None
3359 """
3360
3363
3364 _DeadObject = _DeadObject()
3365
3366
3367
3368
3369 VFPTIME = 1721425
3372 """
3373 Returns a two-bye integer from the value, or raises DbfError
3374 """
3375
3376 if value > 65535:
3377 raise DataOverflowError("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value)
3378 if bigendian:
3379 return struct.pack('>H', value)
3380 else:
3381 return struct.pack('<H', value)
3382
3384 """
3385 Returns a four-bye integer from the value, or raises DbfError
3386 """
3387
3388 if value > 4294967295:
3389 raise DataOverflowError("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value)
3390 if bigendian:
3391 return struct.pack('>L', value)
3392 else:
3393 return struct.pack('<L', value)
3394
3396 """
3397 Returns an 11 byte, upper-cased, null padded string suitable for field names;
3398 raises DbfError if the string is bigger than 10 bytes
3399 """
3400 if len(string) > 10:
3401 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string)))
3402 return struct.pack('11s', string.upper())
3403
3405 """
3406 Returns the value in the two-byte integer passed in
3407 """
3408 if bigendian:
3409 return struct.unpack('>H', bytes)[0]
3410 else:
3411 return struct.unpack('<H', bytes)[0]
3412
3414 """
3415 Returns the value in the four-byte integer passed in
3416 """
3417 if bigendian:
3418 return int(struct.unpack('>L', bytes)[0])
3419 else:
3420 return int(struct.unpack('<L', bytes)[0])
3421
3423 """
3424 Returns a normal, lower-cased string from a null-padded byte string
3425 """
3426 field = struct.unpack('%ds' % len(chars), chars)[0]
3427 name = []
3428 for ch in field:
3429 if ch == '\x00':
3430 break
3431 name.append(ch.lower())
3432 return ''.join(name)
3433
3435 """
3436 return scientific notation with not more than decimals-1 decimal places
3437 """
3438 value = str(value)
3439 sign = ''
3440 if value[0] in ('+-'):
3441 sign = value[0]
3442 if sign == '+':
3443 sign = ''
3444 value = value[1:]
3445 if 'e' in value:
3446 e = value.find('e')
3447 if e - 1 <= decimals:
3448 return sign + value
3449 integer, mantissa, power = value[0], value[1:e], value[e+1:]
3450 mantissa = mantissa[:decimals]
3451 value = sign + integer + mantissa + 'e' + power
3452 return value
3453 integer, mantissa = value[0], value[1:]
3454 if integer == '0':
3455 for e, integer in enumerate(mantissa):
3456 if integer not in ('.0'):
3457 break
3458 mantissa = '.' + mantissa[e+1:]
3459 mantissa = mantissa[:decimals]
3460 value = sign + integer + mantissa + 'e-%03d' % e
3461 return value
3462 e = mantissa.find('.')
3463 mantissa = '.' + mantissa.replace('.','')
3464 mantissa = mantissa[:decimals]
3465 value = sign + integer + mantissa + 'e+%03d' % e
3466 return value
3467
3469 """
3470 called if a data type is not supported for that style of table
3471 """
3472 return something
3473
3475 """
3476 Returns the string in bytes as fielddef[CLASS] or fielddef[EMPTY]
3477 """
3478 data = bytes.tostring()
3479 if not data.strip():
3480 cls = fielddef[EMPTY]
3481 if cls is NoneType:
3482 return None
3483 return cls(data)
3484 if fielddef[FLAGS] & BINARY:
3485 return data
3486 return fielddef[CLASS](decoder(data)[0])
3487
3489 """
3490 returns the string as bytes (not unicode) as fielddef[CLASS] or fielddef[EMPTY]
3491 """
3492 length = fielddef[LENGTH]
3493 if string == None:
3494 return length * ' '
3495 if fielddef[FLAGS] & BINARY:
3496 if not isinstance(string, str):
3497 raise ValueError('binary field: %r not in bytes format' % string)
3498 string = str(string)
3499 return string
3500 else:
3501 if not isinstance(string, unicode):
3502 if not isinstance(string, str):
3503 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3504 string = decoder(string)[0]
3505 string = encoder(string)[0]
3506 if not string[length:].strip():
3507 string = string[:length]
3508 return string
3509
3511 """
3512 Returns the currency value in bytes
3513 """
3514 value = struct.unpack('<q', bytes)[0]
3515 return fielddef[CLASS](("%de-4" % value).strip())
3516
3518 """
3519 Returns the value to be stored in the record's disk data
3520 """
3521 if value == None:
3522 value = 0
3523 currency = int(value * 10000)
3524 if not -9223372036854775808 < currency < 9223372036854775808:
3525 raise DataOverflowError("value %s is out of bounds" % value)
3526 return struct.pack('<q', currency)
3527
3529 """
3530 Returns the ascii coded date as fielddef[CLASS] or fielddef[EMPTY]
3531 """
3532 text = bytes.tostring()
3533 if text == ' ':
3534 cls = fielddef[EMPTY]
3535 if cls is NoneType:
3536 return None
3537 return cls()
3538 year = int(text[0:4])
3539 month = int(text[4:6])
3540 day = int(text[6:8])
3541 return fielddef[CLASS](year, month, day)
3542
3544 """
3545 Returns the Date or datetime.date object ascii-encoded (yyyymmdd)
3546 """
3547 if moment == None:
3548 return ' '
3549 return "%04d%02d%02d" % moment.timetuple()[:3]
3550
3552 """
3553 Returns the double in bytes as fielddef[CLASS] ('default' == float)
3554 """
3555 typ = fielddef[CLASS]
3556 if typ == 'default':
3557 typ = float
3558 return typ(struct.unpack('<d', bytes)[0])
3559
3561 """
3562 returns the value to be stored in the record's disk data
3563 """
3564 if value == None:
3565 value = 0
3566 return struct.pack('<d', float(value))
3567
3569 """
3570 Returns the binary number stored in bytes in little-endian
3571 format as fielddef[CLASS]
3572 """
3573 typ = fielddef[CLASS]
3574 if typ == 'default':
3575 typ = int
3576 return typ(struct.unpack('<i', bytes)[0])
3577
3579 """
3580 Returns value in little-endian binary format
3581 """
3582 if value == None:
3583 value = 0
3584 try:
3585 value = int(value)
3586 except Exception:
3587 raise DbfError("incompatible type: %s(%s)" % (type(value), value))
3588 if not -2147483648 < value < 2147483647:
3589 raise DataOverflowError("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value)
3590 return struct.pack('<i', int(value))
3591
3593 """
3594 Returns True if bytes is 't', 'T', 'y', or 'Y'
3595 None if '?'
3596 False otherwise
3597 """
3598 cls = fielddef[CLASS]
3599 empty = fielddef[EMPTY]
3600 bytes = bytes.tostring()
3601 if bytes in 'tTyY':
3602 return cls(True)
3603 elif bytes in 'fFnN':
3604 return cls(False)
3605 elif bytes in '? ':
3606 if empty is NoneType:
3607 return None
3608 return empty()
3609 elif LOGICAL_BAD_IS_NONE:
3610 return None
3611 else:
3612 raise BadDataError('Logical field contained %r' % bytes)
3613 return typ(bytes)
3614
3616 """
3617 Returns 'T' if logical is True, 'F' if False, '?' otherwise
3618 """
3619 if data is Unknown or data is None or data is Null or data is Other:
3620 return '?'
3621 if data == True:
3622 return 'T'
3623 if data == False:
3624 return 'F'
3625 raise ValueError("unable to automatically coerce %r to Logical" % data)
3626
3628 """
3629 Returns the block of data from a memo file
3630 """
3631 stringval = bytes.tostring().strip()
3632 if not stringval or memo is None:
3633 cls = fielddef[EMPTY]
3634 if cls is NoneType:
3635 return None
3636 return cls()
3637 block = int(stringval)
3638 data = memo.get_memo(block)
3639 if fielddef[FLAGS] & BINARY:
3640 return data
3641 return fielddef[CLASS](decoder(data)[0])
3642
3643 -def update_memo(string, fielddef, memo, decoder, encoder):
3644 """
3645 Writes string as a memo, returns the block number it was saved into
3646 """
3647 if memo is None:
3648 raise DbfError('Memos are being ignored, unable to update')
3649 if string == None:
3650 string = ''
3651 if fielddef[FLAGS] & BINARY:
3652 if not isinstance(string, str):
3653 raise ValueError('binary field: %r not in bytes format' % string)
3654 string = str(string)
3655 else:
3656 if not isinstance(string, unicode):
3657 if not isinstance(string, str):
3658 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3659 string = decoder(string)[0]
3660 string = encoder(string)[0]
3661 block = memo.put_memo(string)
3662 if block == 0:
3663 block = ''
3664 return "%*s" % (fielddef[LENGTH], block)
3665
3667 """
3668 Returns the number stored in bytes as integer if field spec for
3669 decimals is 0, float otherwise
3670 """
3671 string = bytes.tostring().replace('\x00', '').strip()
3672 cls = fielddef[CLASS]
3673 if not string or string[0:1] == '*':
3674 cls = fielddef[EMPTY]
3675 if cls is NoneType:
3676 return None
3677 return cls()
3678 if cls == 'default':
3679 if fielddef[DECIMALS] == 0:
3680 return int(string)
3681 else:
3682 return float(string)
3683 else:
3684 return cls(string.strip())
3685
3687 """
3688 returns value as ascii representation, rounding decimal
3689 portion as necessary
3690 """
3691 if value == None:
3692 return fielddef[LENGTH] * ' '
3693 try:
3694 value = float(value)
3695 except Exception:
3696 raise DbfError("incompatible type: %s(%s)" % (type(value), value))
3697 decimalsize = fielddef[DECIMALS]
3698 totalsize = fielddef[LENGTH]
3699 if decimalsize:
3700 decimalsize += 1
3701 maxintegersize = totalsize - decimalsize
3702 integersize = len("%.0f" % floor(value))
3703 if integersize > maxintegersize:
3704 if integersize != 1:
3705 raise DataOverflowError('Integer portion too big')
3706 string = scinot(value, decimalsize)
3707 if len(string) > totalsize:
3708 raise DataOverflowError('Value representation too long for field')
3709 return "%*.*f" % (fielddef[LENGTH], fielddef[DECIMALS], value)
3710
3712 """
3713 returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00
3714 may not be accurate; BC dates are nulled.
3715 """
3716
3717
3718 if bytes == array('c', '\x00' * 8):
3719 cls = fielddef[EMPTY]
3720 if cls is NoneType:
3721 return None
3722 return cls()
3723 cls = fielddef[CLASS]
3724 time = unpack_long_int(bytes[4:])
3725 microseconds = (time % 1000) * 1000
3726 time = time // 1000
3727 hours = time // 3600
3728 mins = time % 3600 // 60
3729 secs = time % 3600 % 60
3730 time = datetime.time(hours, mins, secs, microseconds)
3731 possible = unpack_long_int(bytes[:4])
3732 possible -= VFPTIME
3733 possible = max(0, possible)
3734 date = datetime.date.fromordinal(possible)
3735 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond)
3736
3738 """
3739 Sets the date/time stored in moment
3740 moment must have fields:
3741 year, month, day, hour, minute, second, microsecond
3742 """
3743 bytes = ['\x00'] * 8
3744 if moment:
3745 hour = moment.hour
3746 minute = moment.minute
3747 second = moment.second
3748 millisecond = moment.microsecond // 1000
3749 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond
3750 bytes[4:] = update_integer(time)
3751 bytes[:4] = update_integer(moment.toordinal() + VFPTIME)
3752 return ''.join(bytes)
3753
3755 """
3756 Returns the block of data from a memo file
3757 """
3758 if memo is None:
3759 block = 0
3760 else:
3761 block = struct.unpack('<i', bytes)[0]
3762 if not block:
3763 cls = fielddef[EMPTY]
3764 if cls is NoneType:
3765 return None
3766 return cls()
3767 data = memo.get_memo(block)
3768 if fielddef[FLAGS] & BINARY:
3769 return data
3770 return fielddef[CLASS](decoder(data)[0])
3771
3773 """
3774 Writes string as a memo, returns the block number it was saved into
3775 """
3776 if memo is None:
3777 raise DbfError('Memos are being ignored, unable to update')
3778 if string == None:
3779 string = ''
3780 if fielddef[FLAGS] & BINARY:
3781 if not isinstance(string, str):
3782 raise ValueError('binary field: %r not in bytes format' % string)
3783 string = str(string)
3784 else:
3785 if not isinstance(string, unicode):
3786 if not isinstance(string, str):
3787 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3788 string = decoder(string)[0]
3789 string = encoder(string)[0]
3790 block = memo.put_memo(string)
3791 return struct.pack('<i', block)
3792
3794 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3795 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3796 length = int(format[0][1:-1])
3797 if not 0 < length < 256:
3798 raise FieldSpecError("Character fields must be between 1 and 255, not %d" % length)
3799 decimals = 0
3800 flag = 0
3801 for f in format[1:]:
3802 flag |= FIELD_FLAGS[f]
3803 return length, decimals, flag
3804
3814
3824
3834
3836 if len(format) > 1 or format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]):
3837 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags))
3838 length, decimals = format[0][1:-1].split(',')
3839 length = int(length)
3840 decimals = int(decimals)
3841 flag = 0
3842 for f in format[1:]:
3843 flag |= FIELD_FLAGS[f]
3844 if not 0 < length < 20:
3845 raise FieldSpecError("Numeric fields must be between 1 and 19 digits, not %d" % length)
3846 if decimals and not 0 < decimals <= length - 2:
3847 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals))
3848 return length, decimals, flag
3849
3851 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3852 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3853 length = int(format[0][1:-1])
3854 if not 0 < length < 65519:
3855 raise FieldSpecError("Character fields must be between 1 and 65,519")
3856 decimals = 0
3857 flag = 0
3858 for f in format[1:]:
3859 flag |= FIELD_FLAGS[f]
3860 return length, decimals, flag
3861
3863 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3864 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3865 length = int(format[0][1:-1])
3866 if not 0 < length < 255:
3867 raise FieldSpecError("Character fields must be between 1 and 255")
3868 decimals = 0
3869 flag = 0
3870 for f in format[1:]:
3871 flag |= FIELD_FLAGS[f]
3872 return length, decimals, flag
3873
3883
3893
3903
3913
3915 if any(f not in flags for f in format[1:]):
3916 raise FieldSpecError("Format for Memo field creation is 'M%s', not 'M%s'" % field_spec_error_text(format, flags))
3917 length = 4
3918 decimals = 0
3919 flag = 0
3920 for f in format:
3921 flag |= FIELD_FLAGS[f]
3922 if 'binary' not in flags:
3923 flag |= FIELD_FLAGS['binary']
3924 return length, decimals, flag
3925
3927 if format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]):
3928 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags))
3929 length, decimals = format[0][1:-1].split(',')
3930 length = int(length)
3931 decimals = int(decimals)
3932 flag = 0
3933 for f in format[1:]:
3934 flag |= FIELD_FLAGS[f]
3935 if not 0 < length < 21:
3936 raise FieldSpecError("Numeric fields must be between 1 and 20 digits, not %d" % length)
3937 if decimals and not 0 < decimals <= length - 2:
3938 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals))
3939 return length, decimals, flag
3940
3941 -def field_spec_error_text(format, flags):
3942 """
3943 generic routine for error text for the add...() functions
3944 """
3945 flg = ''
3946 if flags:
3947 flg = ' [ ' + ' | '.join(flags) + ' ]'
3948 frmt = ''
3949 if format:
3950 frmt = ' ' + ' '.join(format)
3951 return flg, frmt
3952
3954 """
3955 extends all iters to longest one, using last value from each as necessary
3956 """
3957 iters = [iter(x) for x in iters]
3958 last = [None] * len(iters)
3959 while "any iters have items left":
3960 alive = len(iters)
3961 for i, iterator in enumerate(iters):
3962 try:
3963 value = next(iterator)
3964 last[i] = value
3965 except StopIteration:
3966 alive -= 1
3967 if alive:
3968 yield tuple(last)
3969 alive = len(iters)
3970 continue
3971 break
3972
3973
3974
3975
3976 -class Tables(object):
3977 """
3978 context manager for multiple tables and/or indices
3979 """
3981 if len(tables) == 1 and not isinstance(tables[0], (Table, basestring)):
3982 tables = tables[0]
3983 yo._tables = []
3984 yo._entered = []
3985 for table in tables:
3986 if isinstance(table, basestring):
3987 table = Table(table)
3988 yo._tables.append(table)
3990 for table in yo._tables:
3991 table.__enter__()
3992 yo._entered.append(table)
3993 return tuple(yo._tables)
3995 while yo._entered:
3996 table = yo._entered.pop()
3997 try:
3998 table.__exit__()
3999 except Exception:
4000 pass
4001
4003 """
4004 Represents the index where the match criteria is if True,
4005 or would be if False
4006
4007 Used by Index.index_search
4008 """
4009
4011 "value is the number, found is True/False"
4012 result = long.__new__(cls, value)
4013 result.found = found
4014 return result
4015
4018
4021 """
4022 tuple with named attributes for representing a field's dbf type,
4023 length, decimal portion, and python class
4024 """
4025
4026 __slots__= ()
4027
4029 if len(args) != 4:
4030 raise TypeError("%s should be called with Type, Length, Decimal size, and Class" % cls.__name__)
4031 return tuple.__new__(cls, args)
4032
4033 @property
4036
4037 @property
4040
4041 @property
4044
4045 @property
4048
4049
4050 -class CodePage(tuple):
4051 """
4052 tuple with named attributes for representing a tables codepage
4053 """
4054
4055 __slots__= ()
4056
4057 - def __new__(cls, name):
4058 "call with name of codepage (e.g. 'cp1252')"
4059 code, name, desc = _codepage_lookup(name)
4060 return tuple.__new__(cls, (name, desc, code))
4061
4062 - def __repr__(self):
4063 return "CodePage(%r, %r, %r)" % (self[0], self[1], self[2])
4064
4065 - def __str__(self):
4066 return "%s (%s)" % (self[0], self[1])
4067
4068 @property
4071
4072 @property
4075
4076 @property
4079
4080
4081 -class Iter(_Navigation):
4082 """
4083 Provides iterable behavior for a table
4084 """
4085
4086 - def __init__(self, table, include_vapor=False):
4087 """
4088 Return a Vapor record as the last record in the iteration
4089 if include_vapor is True
4090 """
4091 self._table = table
4092 self._record = None
4093 self._include_vapor = include_vapor
4094 self._exhausted = False
4095
4098
4100 while not self._exhausted:
4101 if self._index == len(self._table):
4102 break
4103 if self._index >= (len(self._table) - 1):
4104 self._index = max(self._index, len(self._table))
4105 if self._include_vapor:
4106 return RecordVaporWare('eof', self._table)
4107 break
4108 self._index += 1
4109 record = self._table[self._index]
4110 return record
4111 self._exhausted = True
4112 raise StopIteration
4113
4114
4115 -class Table(_Navigation):
4116 """
4117 Base class for dbf style tables
4118 """
4119
4120 _version = 'basic memory table'
4121 _versionabbr = 'dbf'
4122 _max_fields = 255
4123 _max_records = 4294967296
4124
4125 @MutableDefault
4127 return {
4128 'C' : {
4129 'Type':'Character', 'Init':add_character, 'Blank':lambda x: ' ' * x, 'Retrieve':retrieve_character, 'Update':update_character,
4130 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
4131 },
4132 'D' : {
4133 'Type':'Date', 'Init':add_date, 'Blank':lambda x: ' ', 'Retrieve':retrieve_date, 'Update':update_date,
4134 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
4135 },
4136 'F' : {
4137 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_numeric,
4138 'Class':'default', 'Empty':none, 'flags':tuple(),
4139 },
4140 'L' : {
4141 'Type':'Logical', 'Init':add_logical, 'Blank':lambda x: '?', 'Retrieve':retrieve_logical, 'Update':update_logical,
4142 'Class':bool, 'Empty':none, 'flags':tuple(),
4143 },
4144 'M' : {
4145 'Type':'Memo', 'Init':add_memo, 'Blank':lambda x: ' ', 'Retrieve':retrieve_memo, 'Update':update_memo,
4146 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
4147 },
4148 'N' : {
4149 'Type':'Numeric', 'Init':add_numeric, 'Blank':lambda x: ' ' * x, 'Retrieve':retrieve_numeric, 'Update':update_numeric,
4150 'Class':'default', 'Empty':none, 'flags':tuple(),
4151 },
4152 }
4153 @MutableDefault
4156 _memoext = ''
4157 _memoClass = _DbfMemo
4158 _yesMemoMask = ''
4159 _noMemoMask = ''
4160 _binary_types = tuple()
4161 _character_types = ('C', 'D', 'F', 'L', 'M', 'N')
4162 _currency_types = tuple()
4163 _date_types = ('D', )
4164 _datetime_types = tuple()
4165 _decimal_types = ('N', 'F')
4166 _fixed_types = ('M', 'D', 'L')
4167 _logical_types = ('L', )
4168 _memo_types = tuple('M', )
4169 _numeric_types = ('N', 'F')
4170 _variable_types = ('C', 'N', 'F')
4171 _dbfTableHeader = array('c', '\x00' * 32)
4172 _dbfTableHeader[0] = '\x00'
4173 _dbfTableHeader[8:10] = array('c', pack_short_int(33))
4174 _dbfTableHeader[10] = '\x01'
4175 _dbfTableHeader[29] = '\x00'
4176 _dbfTableHeader = _dbfTableHeader.tostring()
4177 _dbfTableHeaderExtra = ''
4178 _supported_tables = []
4179 _pack_count = 0
4180 backup = None
4181
4183 """
4184 implements the weakref structure for seperate indexes
4185 """
4186
4188 self._indexen = set()
4189
4191 self._indexen = set([s for s in self._indexen if s() is not None])
4192 return (s() for s in self._indexen if s() is not None)
4193
4195 self._indexen = set([s for s in self._indexen if s() is not None])
4196 return len(self._indexen)
4197
4198 - def add(self, new_index):
4199 self._indexen.add(weakref.ref(new_index))
4200 self._indexen = set([s for s in self._indexen if s() is not None])
4201
4221
4223 """
4224 represents the data block that defines a tables type and layout
4225 """
4226
4228 if len(data) != 32:
4229 raise BadDataError('table header should be 32 bytes, but is %d bytes' % len(data))
4230 self.packDate = pack_date
4231 self.unpackDate = unpack_date
4232 self._data = array('c', data + '\x0d')
4233
4235 """
4236 get/set code page of table
4237 """
4238 if cp is None:
4239 return self._data[29]
4240 else:
4241 cp, sd, ld = _codepage_lookup(cp)
4242 self._data[29] = cp
4243 return cp
4244
4245 @property
4247 """
4248 main data structure
4249 """
4250 date = self.packDate(Date.today())
4251 self._data[1:4] = array('c', date)
4252 return self._data.tostring()
4253
4254 @data.setter
4256 if len(bytes) < 32:
4257 raise BadDataError("length for data of %d is less than 32" % len(bytes))
4258 self._data[:] = array('c', bytes)
4259
4260 @property
4262 "extra dbf info (located after headers, before data records)"
4263 fieldblock = self._data[32:]
4264 for i in range(len(fieldblock) // 32 + 1):
4265 cr = i * 32
4266 if fieldblock[cr] == '\x0d':
4267 break
4268 else:
4269 raise BadDataError("corrupt field structure")
4270 cr += 33
4271 return self._data[cr:].tostring()
4272
4273 @extra.setter
4275 fieldblock = self._data[32:]
4276 for i in range(len(fieldblock) // 32 + 1):
4277 cr = i * 32
4278 if fieldblock[cr] == '\x0d':
4279 break
4280 else:
4281 raise BadDataError("corrupt field structure")
4282 cr += 33
4283 self._data[cr:] = array('c', data)
4284 self._data[8:10] = array('c', pack_short_int(len(self._data)))
4285
4286 @property
4288 "number of fields (read-only)"
4289 fieldblock = self._data[32:]
4290 for i in range(len(fieldblock) // 32 + 1):
4291 cr = i * 32
4292 if fieldblock[cr] == '\x0d':
4293 break
4294 else:
4295 raise BadDataError("corrupt field structure")
4296 return len(fieldblock[:cr]) // 32
4297
4298 @property
4300 """
4301 field block structure
4302 """
4303 fieldblock = self._data[32:]
4304 for i in range(len(fieldblock) // 32 + 1):
4305 cr = i * 32
4306 if fieldblock[cr] == '\x0d':
4307 break
4308 else:
4309 raise BadDataError("corrupt field structure")
4310 return fieldblock[:cr].tostring()
4311
4312 @fields.setter
4314 fieldblock = self._data[32:]
4315 for i in range(len(fieldblock) // 32 + 1):
4316 cr = i * 32
4317 if fieldblock[cr] == '\x0d':
4318 break
4319 else:
4320 raise BadDataError("corrupt field structure")
4321 cr += 32
4322 fieldlen = len(block)
4323 if fieldlen % 32 != 0:
4324 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
4325 self._data[32:cr] = array('c', block)
4326 self._data[8:10] = array('c', pack_short_int(len(self._data)))
4327 fieldlen = fieldlen // 32
4328 recordlen = 1
4329 for i in range(fieldlen):
4330 recordlen += ord(block[i*32+16])
4331 self._data[10:12] = array('c', pack_short_int(recordlen))
4332
4333 @property
4335 """
4336 number of records (maximum 16,777,215)
4337 """
4338 return unpack_long_int(self._data[4:8].tostring())
4339
4340 @record_count.setter
4343
4344 @property
4346 """
4347 length of a record (read_only) (max of 65,535)
4348 """
4349 return unpack_short_int(self._data[10:12].tostring())
4350
4351 @record_length.setter
4357
4358 @property
4360 """
4361 starting position of first record in file (must be within first 64K)
4362 """
4363 return unpack_short_int(self._data[8:10].tostring())
4364
4365 @start.setter
4368
4369 @property
4371 """
4372 date of last table modification (read-only)
4373 """
4374 return self.unpackDate(self._data[1:4].tostring())
4375
4376 @property
4378 """
4379 dbf version
4380 """
4381 return self._data[0]
4382
4383 @version.setter
4386
4388 """
4389 implements the weakref table for records
4390 """
4391
4393 self._meta = meta
4394 self._max_count = count
4395 self._weakref_list = {}
4396 self._accesses = 0
4397 self._dead_check = 1024
4398
4400
4401 if index < 0:
4402 if self._max_count + index < 0:
4403 raise IndexError('index %d smaller than available records' % index)
4404 index = self._max_count + index
4405 if index >= self._max_count:
4406 raise IndexError('index %d greater than available records' % index)
4407 maybe = self._weakref_list.get(index)
4408 if maybe:
4409 maybe = maybe()
4410 self._accesses += 1
4411 if self._accesses >= self._dead_check:
4412 for key, value in self._weakref_list.items():
4413 if value() is None:
4414 del self._weakref_list[key]
4415 if not maybe:
4416 meta = self._meta
4417 if meta.status == CLOSED:
4418 raise DbfError("%s is closed; record %d is unavailable" % (meta.filename, index))
4419 header = meta.header
4420 if index < 0:
4421 index += header.record_count
4422 size = header.record_length
4423 location = index * size + header.start
4424 meta.dfd.seek(location)
4425 if meta.dfd.tell() != location:
4426 raise ValueError("unable to seek to offset %d in file" % location)
4427 bytes = meta.dfd.read(size)
4428 if not bytes:
4429 raise ValueError("unable to read record data from %s at location %d" % (meta.filename, location))
4430 maybe = Record(recnum=index, layout=meta, kamikaze=bytes, _fromdisk=True)
4431 self._weakref_list[index] = weakref.ref(maybe)
4432 return maybe
4433
4435 self._weakref_list[self._max_count] = weakref.ref(record)
4436 self._max_count += 1
4437
4439 for key in self._weakref_list.keys():
4440 del self._weakref_list[key]
4441 self._max_count = 0
4442
4444 for maybe in self._weakref_list.values():
4445 maybe = maybe()
4446 if maybe and not maybe._write_to_disk:
4447 raise DbfError("some records have not been written to disk")
4448
4450 if not self._max_count:
4451 raise IndexError('no records exist')
4452 self._max_count -= 1
4453 return self[self._max_count-1]
4454
4456 """
4457 constructs fieldblock for disk table
4458 """
4459 fieldblock = array('c', '')
4460 memo = False
4461 nulls = False
4462 meta = self._meta
4463 header = meta.header
4464 header.version = chr(ord(header.version) & ord(self._noMemoMask))
4465 meta.fields = [f for f in meta.fields if f != '_nullflags']
4466 for field in meta.fields:
4467 layout = meta[field]
4468 if meta.fields.count(field) > 1:
4469 raise BadDataError("corrupted field structure (noticed in _build_header_fields)")
4470 fielddef = array('c', '\x00' * 32)
4471 fielddef[:11] = array('c', pack_str(meta.encoder(field)[0]))
4472 fielddef[11] = layout[TYPE]
4473 fielddef[12:16] = array('c', pack_long_int(layout[START]))
4474 fielddef[16] = chr(layout[LENGTH])
4475 fielddef[17] = chr(layout[DECIMALS])
4476 fielddef[18] = chr(layout[FLAGS])
4477 fieldblock.extend(fielddef)
4478 if layout[TYPE] in meta.memo_types:
4479 memo = True
4480 if layout[FLAGS] & NULLABLE:
4481 nulls = True
4482 if memo:
4483 header.version = chr(ord(header.version) | ord(self._yesMemoMask))
4484 if meta.memo is None:
4485 meta.memo = self._memoClass(meta)
4486 else:
4487 if os.path.exists(meta.memoname):
4488 if meta.mfd is not None:
4489 meta.mfd.close()
4490
4491 os.remove(meta.memoname)
4492 meta.memo = None
4493 if nulls:
4494 start = layout[START] + layout[LENGTH]
4495 length, one_more = divmod(len(meta.fields), 8)
4496 if one_more:
4497 length += 1
4498 fielddef = array('c', '\x00' * 32)
4499 fielddef[:11] = array('c', pack_str('_nullflags'))
4500 fielddef[11] = '0'
4501 fielddef[12:16] = array('c', pack_long_int(start))
4502 fielddef[16] = chr(length)
4503 fielddef[17] = chr(0)
4504 fielddef[18] = chr(BINARY | SYSTEM)
4505 fieldblock.extend(fielddef)
4506 meta.fields.append('_nullflags')
4507 nullflags = (
4508 '0',
4509 start,
4510 length,
4511 start + length,
4512 0,
4513 BINARY | SYSTEM,
4514 none,
4515 none,
4516 )
4517 meta['_nullflags'] = nullflags
4518 header.fields = fieldblock.tostring()
4519 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
4520 meta.user_field_count = len(meta.user_fields)
4521 Record._create_blank_data(meta)
4522
4524 """
4525 checks memo file for problems
4526 """
4527 raise NotImplementedError("_check_memo_integrity must be implemented by subclass")
4528
4530 """
4531 builds the FieldList of names, types, and descriptions from the disk file
4532 """
4533 raise NotImplementedError("_initialize_fields must be implemented by subclass")
4534
4536 """
4537 Returns field information Name Type(Length[, Decimals])
4538 """
4539 name = self._meta.fields[i]
4540 fielddef = self._meta[name]
4541 type = fielddef[TYPE]
4542 length = fielddef[LENGTH]
4543 decimals = fielddef[DECIMALS]
4544 set_flags = fielddef[FLAGS]
4545 flags = []
4546 if type in ('G', 'P'):
4547 printable_flags = NULLABLE, SYSTEM
4548 else:
4549 printable_flags = BINARY, NULLABLE, SYSTEM
4550 for flg in printable_flags:
4551 if flg & set_flags == flg:
4552 flags.append(FIELD_FLAGS[flg])
4553 set_flags &= 255 ^ flg
4554 if flags:
4555 flags = ' ' + ' '.join(flags)
4556 else:
4557 flags = ''
4558 if type in self._fixed_types:
4559 description = "%s %s%s" % (name, type, flags)
4560 elif type in self._numeric_types:
4561 description = "%s %s(%d,%d)%s" % (name, type, length, decimals, flags)
4562 else:
4563 description = "%s %s(%d)%s" % (name, type, length, flags)
4564 return description
4565
4567 """
4568 standardizes field specs
4569 """
4570 if specs is None:
4571 specs = self.field_names
4572 elif isinstance(specs, str):
4573 specs = specs.strip(sep).split(sep)
4574 else:
4575 specs = list(specs)
4576 specs = [s.strip() for s in specs]
4577 return specs
4578
4585
4586 @staticmethod
4588 """
4589 Returns a group of three bytes, in integer form, of the date
4590 """
4591 return "%c%c%c" % (date.year - 1900, date.month, date.day)
4592
4593 @staticmethod
4595 """
4596 Returns a Date() of the packed three-byte date passed in
4597 """
4598 year, month, day = struct.unpack('<BBB', bytestr)
4599 year += 1900
4600 return Date(year, month, day)
4601
4624
4626 """
4627 data can be a record, template, dict, or tuple
4628 """
4629 if not isinstance(data, (Record, RecordTemplate, dict, tuple)):
4630 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(data))
4631 for record in Iter(self):
4632 if data == record:
4633 return True
4634 return False
4635
4640
4644
4646 if name in (
4647 'binary_types',
4648 'character_types',
4649 'currency_types',
4650 'date_types',
4651 'datetime_types',
4652 'decimal_types',
4653 'fixed_types',
4654 'logical_types',
4655 'memo_types',
4656 'numeric_types',
4657 'variable_types',
4658 ):
4659 return getattr(self, '_'+name)
4660 if name in ('_table', ):
4661 if self._meta.location == ON_DISK:
4662 self._table = self._Table(len(self), self._meta)
4663 else:
4664 self._table = []
4665 return object.__getattribute__(self, name)
4666
4680
4681 - def __init__(self, filename, field_specs=None, memo_size=128, ignore_memos=False,
4682 codepage=None, default_data_types=None, field_data_types=None,
4683 dbf_type=None, on_disk=True,
4684 ):
4685 """
4686 open/create dbf file
4687 filename should include path if needed
4688 field_specs can be either a ;-delimited string or a list of strings
4689 memo_size is always 512 for db3 memos
4690 ignore_memos is useful if the memo file is missing or corrupt
4691 read_only will load records into memory, then close the disk file
4692 keep_memos will also load any memo fields into memory
4693 meta_only will ignore all records, keeping only basic table information
4694 codepage will override whatever is set in the table itself
4695 """
4696
4697 if not on_disk:
4698 if field_specs is None:
4699 raise DbfError("field list must be specified for memory tables")
4700 self._indexen = self._Indexen()
4701 self._meta = meta = self._MetaData()
4702 meta.max_fields = self._max_fields
4703 meta.max_records = self._max_records
4704 meta.table = weakref.ref(self)
4705 meta.filename = filename
4706 meta.fields = []
4707 meta.user_fields = []
4708 meta.user_field_count = 0
4709 meta.fieldtypes = fieldtypes = self._field_types
4710 meta.fixed_types = self._fixed_types
4711 meta.variable_types = self._variable_types
4712 meta.character_types = self._character_types
4713 meta.currency_types = self._currency_types
4714 meta.decimal_types = self._decimal_types
4715 meta.numeric_types = self._numeric_types
4716 meta.memo_types = self._memo_types
4717 meta.ignorememos = meta.original_ignorememos = ignore_memos
4718 meta.memo_size = memo_size
4719 meta.input_decoder = codecs.getdecoder(input_decoding)
4720 meta.output_encoder = codecs.getencoder(input_decoding)
4721 meta.header = header = self._TableHeader(self._dbfTableHeader, self._pack_date, self._unpack_date)
4722 header.extra = self._dbfTableHeaderExtra
4723 if default_data_types is None:
4724 default_data_types = dict()
4725 elif default_data_types == 'enhanced':
4726 default_data_types = {
4727 'C' : dbf.Char,
4728 'L' : dbf.Logical,
4729 'D' : dbf.Date,
4730 'T' : dbf.DateTime,
4731 }
4732
4733 self._meta._default_data_types = default_data_types
4734 if field_data_types is None:
4735 field_data_types = dict()
4736 self._meta._field_data_types = field_data_types
4737 for field, types in default_data_types.items():
4738 if not isinstance(types, tuple):
4739 types = (types, )
4740 for result_name, result_type in ezip(('Class', 'Empty', 'Null'), types):
4741 fieldtypes[field][result_name] = result_type
4742 if not on_disk:
4743 self._table = []
4744 meta.location = IN_MEMORY
4745 meta.memoname = filename
4746 meta.header.data
4747 else:
4748 base, ext = os.path.splitext(filename)
4749 if ext.lower() != '.dbf':
4750 meta.filename = filename + '.dbf'
4751 searchname = filename + '.[Db][Bb][Ff]'
4752 else:
4753 meta.filename = filename
4754 searchname = filename
4755 matches = glob(searchname)
4756 if len(matches) == 1:
4757 meta.filename = matches[0]
4758 elif matches:
4759 raise DbfError("please specify exactly which of %r you want" % (matches, ))
4760 case = [('l','u')[c.isupper()] for c in meta.filename[-4:]]
4761 if case == ['l','l','l','l']:
4762 meta.memoname = base + self._memoext.lower()
4763 elif case == ['l','u','u','u']:
4764 meta.memoname = base + self._memoext.upper()
4765 else:
4766 meta.memoname = base + ''.join([c.lower() if case[i] == 'l' else c.upper() for i, c in enumerate(self._memoext)])
4767 meta.location = ON_DISK
4768 if codepage is not None:
4769 header.codepage(codepage)
4770 cp, sd, ld = _codepage_lookup(codepage)
4771 self._meta.decoder = codecs.getdecoder(sd)
4772 self._meta.encoder = codecs.getencoder(sd)
4773 if field_specs:
4774 if meta.location == ON_DISK:
4775 meta.dfd = open(meta.filename, 'w+b')
4776 meta.newmemofile = True
4777 if codepage is None:
4778 header.codepage(default_codepage)
4779 cp, sd, ld = _codepage_lookup(header.codepage())
4780 meta.decoder = codecs.getdecoder(sd)
4781 meta.encoder = codecs.getencoder(sd)
4782 meta.status = READ_WRITE
4783 self.add_fields(field_specs)
4784 else:
4785 try:
4786 dfd = meta.dfd = open(meta.filename, 'r+b')
4787 except IOError:
4788 e= sys.exc_info()[1]
4789 raise DbfError(str(e))
4790 dfd.seek(0)
4791 meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date)
4792 if not header.version in self._supported_tables:
4793 dfd.close()
4794 dfd = None
4795 raise DbfError(
4796 "%s does not support %s [%x]" %
4797 (self._version,
4798 version_map.get(header.version, 'Unknown: %s' % header.version),
4799 ord(header.version)))
4800 if codepage is None:
4801 cp, sd, ld = _codepage_lookup(header.codepage())
4802 self._meta.decoder = codecs.getdecoder(sd)
4803 self._meta.encoder = codecs.getencoder(sd)
4804 fieldblock = dfd.read(header.start - 32)
4805 for i in range(len(fieldblock) // 32 + 1):
4806 fieldend = i * 32
4807 if fieldblock[fieldend] == '\x0d':
4808 break
4809 else:
4810 raise BadDataError("corrupt field structure in header")
4811 if len(fieldblock[:fieldend]) % 32 != 0:
4812 raise BadDataError("corrupt field structure in header")
4813 old_length = header.data[10:12]
4814 header.fields = fieldblock[:fieldend]
4815 header.data = header.data[:10] + old_length + header.data[12:]
4816 header.extra = fieldblock[fieldend + 1:]
4817 self._initialize_fields()
4818 self._check_memo_integrity()
4819 dfd.seek(0)
4820
4821 for field in meta.fields:
4822 field_type = meta[field][TYPE]
4823 default_field_type = (
4824 fieldtypes[field_type]['Class'],
4825 fieldtypes[field_type]['Empty'],
4826 )
4827 specific_field_type = field_data_types.get(field)
4828 if specific_field_type is not None and not isinstance(specific_field_type, tuple):
4829 specific_field_type = (specific_field_type, )
4830 classes = []
4831 for result_name, result_type in ezip(
4832 ('class', 'empty'),
4833 specific_field_type or default_field_type,
4834 ):
4835 classes.append(result_type)
4836 meta[field] = meta[field][:-2] + tuple(classes)
4837 meta.status = READ_ONLY
4838 self.close()
4839
4841 """
4842 iterates over the table's records
4843 """
4844 return Iter(self)
4845
4847 """
4848 returns number of records in table
4849 """
4850 return self._meta.header.record_count
4851
4852 - def __new__(cls, filename, field_specs=None, memo_size=128, ignore_memos=False,
4853 codepage=None, default_data_types=None, field_data_types=None,
4854 dbf_type=None, on_disk=True,
4855 ):
4856 if dbf_type is None and isinstance(filename, Table):
4857 return filename
4858 if field_specs and dbf_type is None:
4859 dbf_type = default_type
4860 if dbf_type is not None:
4861 dbf_type = dbf_type.lower()
4862 table = table_types.get(dbf_type)
4863 if table is None:
4864 raise DbfError("Unknown table type: %s" % dbf_type)
4865 return object.__new__(table)
4866 else:
4867 base, ext = os.path.splitext(filename)
4868 if ext.lower() != '.dbf':
4869 filename = filename + '.dbf'
4870 possibles = guess_table_type(filename)
4871 if len(possibles) == 1:
4872 return object.__new__(possibles[0][2])
4873 else:
4874 for type, desc, cls in possibles:
4875 if type == default_type:
4876 return object.__new__(cls)
4877 else:
4878 types = ', '.join(["%s" % item[1] for item in possibles])
4879 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']'
4880 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
4881
4883 """
4884 True if table has any records
4885 """
4886 return self._meta.header.record_count != 0
4887
4890
4913
4914 @property
4915 - def codepage(self):
4916 """
4917 code page used for text translation
4918 """
4919 return CodePage(code_pages[self._meta.header.codepage()][0])
4920
4921 @codepage.setter
4922 - def codepage(self, codepage):
4923 if not isinstance(codepage, CodePage):
4924 raise TypeError("codepage should be a CodePage, not a %r" % type(codepage))
4925 meta = self._meta
4926 if meta.status != READ_WRITE:
4927 raise DbfError('%s not in read/write mode, unable to change codepage' % meta.filename)
4928 meta.header.codepage(codepage.code)
4929 meta.decoder = codecs.getdecoder(codepage.name)
4930 meta.encoder = codecs.getencoder(codepage.name)
4931 self._update_disk(headeronly=True)
4932
4933 @property
4939
4940 @property
4942 """
4943 a list of the user fields in the table
4944 """
4945 return self._meta.user_fields[:]
4946
4947 @property
4949 """
4950 table's file name, including path (if specified on open)
4951 """
4952 return self._meta.filename
4953
4954 @property
4956 """
4957 date of last update
4958 """
4959 return self._meta.header.update
4960
4961 @property
4963 """
4964 table's memo name (if path included in filename on open)
4965 """
4966 return self._meta.memoname
4967
4968 @property
4970 """
4971 number of bytes in a record (including deleted flag and null field size
4972 """
4973 return self._meta.header.record_length
4974
4975 @property
4981
4982 @property
4984 """
4985 CLOSED, READ_ONLY, or READ_WRITE
4986 """
4987 return self._meta.status
4988
4989 @property
4991 """
4992 returns the dbf type of the table
4993 """
4994 return self._version
4995
4997 """
4998 adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]]
4999 backup table is created with _backup appended to name
5000 then zaps table, recreates current structure, and copies records back from the backup
5001 """
5002 meta = self._meta
5003 if meta.status != READ_WRITE:
5004 raise DbfError('%s not in read/write mode, unable to add fields (%s)' % (meta.filename, meta.status))
5005 header = meta.header
5006 fields = self.structure() + self._list_fields(field_specs, sep=';')
5007 if (len(fields) + ('_nullflags' in meta)) > meta.max_fields:
5008 raise DbfError(
5009 "Adding %d more field%s would exceed the limit of %d"
5010 % (len(fields), ('','s')[len(fields)==1], meta.max_fields)
5011 )
5012 old_table = None
5013 if self:
5014 old_table = self.create_backup()
5015 self.zap()
5016 if meta.mfd is not None and not meta.ignorememos:
5017 meta.mfd.close()
5018 meta.mfd = None
5019 meta.memo = None
5020 if not meta.ignorememos:
5021 meta.newmemofile = True
5022 offset = 1
5023 for name in meta.fields:
5024 del meta[name]
5025 meta.fields[:] = []
5026
5027 meta.blankrecord = None
5028 for field in fields:
5029 field = field.lower()
5030 pieces = field.split()
5031 name = pieces.pop(0)
5032 if '(' in pieces[0]:
5033 loc = pieces[0].index('(')
5034 pieces.insert(0, pieces[0][:loc])
5035 pieces[1] = pieces[1][loc:]
5036 format = pieces.pop(0).upper()
5037 if pieces and '(' in pieces[0]:
5038 for i, p in enumerate(pieces):
5039 if ')' in p:
5040 pieces[0:i+1] = [''.join(pieces[0:i+1])]
5041 break
5042 if name[0] == '_' or name[0].isdigit() or not name.replace('_', '').isalnum():
5043 raise FieldSpecError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name)
5044 name = unicode(name)
5045 if name in meta.fields:
5046 raise DbfError("Field '%s' already exists" % name)
5047 field_type = format.encode('ascii')
5048 if len(name) > 10:
5049 raise FieldSpecError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name)))
5050 if not field_type in meta.fieldtypes.keys():
5051 raise FieldSpecError("Unknown field type: %s" % field_type)
5052 init = self._meta.fieldtypes[field_type]['Init']
5053 flags = self._meta.fieldtypes[field_type]['flags']
5054 try:
5055 length, decimals, flags = init(pieces, flags)
5056 except FieldSpecError:
5057 exc = sys.exc_info()[1]
5058 raise FieldSpecError(exc.message + ' (%s:%s)' % (meta.filename, name))
5059 start = offset
5060 end = offset + length
5061 offset = end
5062 meta.fields.append(name)
5063 cls = meta.fieldtypes[field_type]['Class']
5064 empty = meta.fieldtypes[field_type]['Empty']
5065 meta[name] = (
5066 field_type,
5067 start,
5068 length,
5069 end,
5070 decimals,
5071 flags,
5072 cls,
5073 empty,
5074 )
5075 self._build_header_fields()
5076 self._update_disk()
5077 if old_table is not None:
5078 old_table.open()
5079 for record in old_table:
5080 self.append(scatter(record))
5081 old_table.close()
5082
5124
5125 - def append(self, data='', drop=False, multiple=1):
5126 """
5127 adds <multiple> blank records, and fills fields with dict/tuple values if present
5128 """
5129 meta = self._meta
5130 if meta.status != READ_WRITE:
5131 raise DbfError('%s not in read/write mode, unable to append records' % meta.filename)
5132 if not self.field_count:
5133 raise DbfError("No fields defined, cannot append")
5134 empty_table = len(self) == 0
5135 dictdata = False
5136 tupledata = False
5137 header = meta.header
5138 kamikaze = ''
5139 if header.record_count == meta.max_records:
5140 raise DbfError("table %r is full; unable to add any more records" % self)
5141 if isinstance(data, (Record, RecordTemplate)):
5142 if data._meta.record_sig[0] == self._meta.record_sig[0]:
5143 kamikaze = data._data
5144 else:
5145 if isinstance(data, dict):
5146 dictdata = data
5147 data = ''
5148 elif isinstance(data, tuple):
5149 if len(data) > self.field_count:
5150 raise DbfError("incoming data has too many values")
5151 tupledata = data
5152 data = ''
5153 elif data:
5154 raise TypeError("data to append must be a tuple, dict, record, or template; not a %r" % type(data))
5155 newrecord = Record(recnum=header.record_count, layout=meta, kamikaze=kamikaze)
5156 if kamikaze and meta.memofields:
5157 newrecord._start_flux()
5158 for field in meta.memofields:
5159 newrecord[field] = data[field]
5160 newrecord._commit_flux()
5161
5162 self._table.append(newrecord)
5163 header.record_count += 1
5164 if not kamikaze:
5165 try:
5166 if dictdata:
5167 gather(newrecord, dictdata, drop=drop)
5168 elif tupledata:
5169 newrecord._start_flux()
5170 for index, item in enumerate(tupledata):
5171 newrecord[index] = item
5172 newrecord._commit_flux()
5173 elif data:
5174 newrecord._start_flux()
5175 data_fields = field_names(data)
5176 my_fields = self.field_names
5177 for field in data_fields:
5178 if field not in my_fields:
5179 if not drop:
5180 raise DbfError("field %r not in table %r" % (field, self))
5181 else:
5182 newrecord[field] = data[field]
5183 newrecord._commit_flux()
5184 except Exception:
5185 self._table.pop()
5186 header.record_count = header.record_count - 1
5187 self._update_disk()
5188 raise
5189 multiple -= 1
5190 if multiple:
5191 data = newrecord._data
5192 single = header.record_count
5193 total = single + multiple
5194 while single < total:
5195 multi_record = Record(single, meta, kamikaze=data)
5196 multi_record._start_flux()
5197 self._table.append(multi_record)
5198 for field in meta.memofields:
5199 multi_record[field] = newrecord[field]
5200 single += 1
5201 multi_record._commit_flux()
5202 header.record_count = total
5203 newrecord = multi_record
5204 self._update_disk(headeronly=True)
5205
5220
5222 """
5223 creates a backup table
5224 """
5225 meta = self._meta
5226 already_open = meta.status != CLOSED
5227 if not already_open:
5228 self.open()
5229 if on_disk is None:
5230 on_disk = meta.location
5231 if not on_disk and new_name is None:
5232 new_name = self.filename + '_backup'
5233 if new_name is None:
5234 upper = self.filename.isupper()
5235 directory, filename = os.path.split(self.filename)
5236 name, ext = os.path.splitext(filename)
5237 extra = ('_backup', '_BACKUP')[upper]
5238 new_name = os.path.join(temp_dir or directory, name + extra + ext)
5239 bkup = Table(new_name, self.structure(), codepage=self.codepage.name, dbf_type=self._versionabbr, on_disk=on_disk)
5240
5241 bkup._meta.encoder = self._meta.encoder
5242 bkup._meta.decoder = self._meta.decoder
5243 bkup.open()
5244 for record in self:
5245 bkup.append(record)
5246 bkup.close()
5247 self.backup = new_name
5248 if not already_open:
5249 self.close()
5250 return bkup
5251
5253 """
5254 creates an in-memory index using the function key
5255 """
5256 meta = self._meta
5257 if meta.status == CLOSED:
5258 raise DbfError('%s is closed' % meta.filename)
5259 return Index(self, key)
5260
5262 """
5263 returns a record template that can be used like a record
5264 """
5265 return RecordTemplate(self._meta, original_record=record, defaults=defaults)
5266
5268 """
5269 removes field(s) from the table
5270 creates backup files with _backup appended to the file name,
5271 then modifies current structure
5272 """
5273 meta = self._meta
5274 if meta.status != READ_WRITE:
5275 raise DbfError('%s not in read/write mode, unable to delete fields' % meta.filename)
5276 doomed = self._list_fields(doomed)
5277 header = meta.header
5278 for victim in doomed:
5279 if victim not in meta.user_fields:
5280 raise DbfError("field %s not in table -- delete aborted" % victim)
5281 old_table = None
5282 if self:
5283 old_table = self.create_backup()
5284 self.zap()
5285 if meta.mfd is not None and not meta.ignorememos:
5286 meta.mfd.close()
5287 meta.mfd = None
5288 meta.memo = None
5289 if not meta.ignorememos:
5290 meta.newmemofile = True
5291 if '_nullflags' in meta.fields:
5292 doomed.append('_nullflags')
5293 for victim in doomed:
5294 layout = meta[victim]
5295 meta.fields.pop(meta.fields.index(victim))
5296 start = layout[START]
5297 end = layout[END]
5298 for field in meta.fields:
5299 if meta[field][START] == end:
5300 specs = list(meta[field])
5301 end = specs[END]
5302 specs[START] = start
5303 specs[END] = start + specs[LENGTH]
5304 start = specs[END]
5305 meta[field] = tuple(specs)
5306 self._build_header_fields()
5307 self._update_disk()
5308 for name in list(meta):
5309 if name not in meta.fields:
5310 del meta[name]
5311 if old_table is not None:
5312 old_table.open()
5313 for record in old_table:
5314 self.append(scatter(record), drop=True)
5315 old_table.close()
5316
5350
5359
5360 - def index(self, record, start=None, stop=None):
5361 """
5362 returns the index of record between start and stop
5363 start and stop default to the first and last record
5364 """
5365 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
5366 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
5367 meta = self._meta
5368 if meta.status == CLOSED:
5369 raise DbfError('%s is closed' % meta.filename)
5370 if start is None:
5371 start = 0
5372 if stop is None:
5373 stop = len(self)
5374 for i in range(start, stop):
5375 if record == (self[i]):
5376 return i
5377 else:
5378 raise NotFoundError("dbf.Table.index(x): x not in table", data=record)
5379
5380 - def new(self, filename, field_specs=None, memo_size=None, ignore_memos=None, codepage=None, default_data_types=None, field_data_types=None, on_disk=True):
5381 """
5382 returns a new table of the same type
5383 """
5384 if field_specs is None:
5385 field_specs = self.structure()
5386 if on_disk:
5387 path, name = os.path.split(filename)
5388 if path == "":
5389 filename = os.path.join(os.path.split(self.filename)[0], filename)
5390 elif name == "":
5391 filename = os.path.join(path, os.path.split(self.filename)[1])
5392 if memo_size is None:
5393 memo_size = self._meta.memo_size
5394 if ignore_memos is None:
5395 ignore_memos = self._meta.ignorememos
5396 if codepage is None:
5397 codepage = self._meta.header.codepage()[0]
5398 if default_data_types is None:
5399 default_data_types = self._meta._default_data_types
5400 if field_data_types is None:
5401 field_data_types = self._meta._field_data_types
5402 return Table(filename, field_specs, memo_size, ignore_memos, codepage, default_data_types, field_data_types, dbf_type=self._versionabbr, on_disk=on_disk)
5403
5405 """
5406 returns True if field allows Nulls
5407 """
5408 if field not in self.field_names:
5409 raise MissingField(field)
5410 return bool(self._meta[field][FLAGS] & NULLABLE)
5411
5450
5480
5481 - def query(self, criteria):
5482 """
5483 criteria is a string that will be converted into a function that returns
5484 a List of all matching records
5485 """
5486 meta = self._meta
5487 if meta.status == CLOSED:
5488 raise DbfError('%s is closed' % meta.filename)
5489 return pql(self, criteria)
5490
5492 """
5493 reprocess all indices for this table
5494 """
5495 meta = self._meta
5496 if meta.status == CLOSED:
5497 raise DbfError('%s is closed' % meta.filename)
5498 for dbfindex in self._indexen:
5499 dbfindex._reindex()
5500
5502 """
5503 renames an existing field
5504 """
5505 meta = self._meta
5506 if meta.status != READ_WRITE:
5507 raise DbfError('%s not in read/write mode, unable to change field names' % meta.filename)
5508 if self:
5509 self.create_backup()
5510 if not oldname in self._meta.user_fields:
5511 raise FieldMissingError("field --%s-- does not exist -- cannot rename it." % oldname)
5512 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_', '').isalnum():
5513 raise FieldSpecError("field names cannot start with _ or digits, and can only contain the _, letters, and digits")
5514 newname = newname.lower()
5515 if newname in self._meta.fields:
5516 raise DbfError("field --%s-- already exists" % newname)
5517 if len(newname) > 10:
5518 raise FieldSpecError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname)))
5519 self._meta[newname] = self._meta[oldname]
5520 self._meta.fields[self._meta.fields.index(oldname)] = newname
5521 self._build_header_fields()
5522 self._update_disk(headeronly=True)
5523
5525 """
5526 resizes field (C only at this time)
5527 creates backup file, then modifies current structure
5528 """
5529 meta = self._meta
5530 if meta.status != READ_WRITE:
5531 raise DbfError('%s not in read/write mode, unable to change field size' % meta.filename)
5532 if not 0 < new_size < 256:
5533 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)")
5534 chosen = self._list_fields(chosen)
5535 for candidate in chosen:
5536 if candidate not in self._meta.user_fields:
5537 raise DbfError("field %s not in table -- resize aborted" % candidate)
5538 elif self.field_info(candidate).field_type != 'C':
5539 raise DbfError("field %s is not Character -- resize aborted" % candidate)
5540 if self:
5541 old_table = self.create_backup()
5542 self.zap()
5543 if meta.mfd is not None and not meta.ignorememos:
5544 meta.mfd.close()
5545 meta.mfd = None
5546 meta.memo = None
5547 if not meta.ignorememos:
5548 meta.newmemofile = True
5549 struct = self.structure()
5550 meta.user_fields[:] = []
5551 new_struct = []
5552 for field_spec in struct:
5553 name, spec = field_spec.split(' ', 1)
5554 if name in chosen:
5555 spec = "C(%d)" % new_size
5556 new_struct.append(' '.join([name, spec]))
5557 self.add_fields(';'.join(new_struct))
5558 if old_table is not None:
5559 old_table.open()
5560 for record in old_table:
5561 self.append(scatter(record), drop=True)
5562 old_table.close()
5563
5565 """
5566 return field specification list suitable for creating same table layout
5567 fields should be a list of fields or None for all fields in table
5568 """
5569 field_specs = []
5570 fields = self._list_fields(fields)
5571 try:
5572 for name in fields:
5573 field_specs.append(self._field_layout(self.field_names.index(name)))
5574 except ValueError:
5575 raise DbfError("field %s does not exist" % name)
5576 return field_specs
5577
5594
5597 """
5598 Provides an interface for working with dBase III tables.
5599 """
5600
5601 _version = 'dBase III Plus'
5602 _versionabbr = 'db3'
5603
5604 @MutableDefault
5606 return {
5607 'C' : {
5608 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: ' ' * x, 'Init':add_character,
5609 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
5610 },
5611 'D' : {
5612 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: ' ', 'Init':add_date,
5613 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
5614 },
5615 'F' : {
5616 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_numeric,
5617 'Class':'default', 'Empty':none, 'flags':tuple(),
5618 },
5619 'L' : {
5620 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: '?', 'Init':add_logical,
5621 'Class':bool, 'Empty':none, 'flags':tuple(),
5622 },
5623 'M' : {
5624 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: ' ', 'Init':add_memo,
5625 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
5626 },
5627 'N' : {
5628 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_numeric,
5629 'Class':'default', 'Empty':none, 'flags':tuple(),
5630 } }
5631
5632 _memoext = '.dbt'
5633 _memoClass = _Db3Memo
5634 _yesMemoMask = '\x80'
5635 _noMemoMask = '\x7f'
5636 _binary_types = ()
5637 _character_types = ('C', 'M')
5638 _currency_types = tuple()
5639 _date_types = ('D',)
5640 _datetime_types = tuple()
5641 _decimal_types = ('N', 'F')
5642 _fixed_types = ('D', 'L', 'M')
5643 _logical_types = ('L',)
5644 _memo_types = ('M',)
5645 _numeric_types = ('N', 'F')
5646 _variable_types = ('C', 'N')
5647 _dbfTableHeader = array('c', '\x00' * 32)
5648 _dbfTableHeader[0] = '\x03'
5649 _dbfTableHeader[8:10] = array('c', pack_short_int(33))
5650 _dbfTableHeader[10] = '\x01'
5651 _dbfTableHeader[29] = '\x03'
5652 _dbfTableHeader = _dbfTableHeader.tostring()
5653 _dbfTableHeaderExtra = ''
5654 _supported_tables = ['\x03', '\x83']
5655
5657 """
5658 dBase III and Clipper
5659 """
5660 if not self._meta.ignorememos:
5661 memo_fields = False
5662 for field in self._meta.fields:
5663 if self._meta[field][TYPE] in self._memo_types:
5664 memo_fields = True
5665 break
5666 if memo_fields and self._meta.header.version != '\x83':
5667 self._meta.dfd.close()
5668 self._meta.dfd = None
5669 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos")
5670 elif memo_fields and not os.path.exists(self._meta.memoname):
5671 self._meta.dfd.close()
5672 self._meta.dfd = None
5673 raise BadDataError("Table structure corrupt: memo fields exist without memo file")
5674 if memo_fields:
5675 try:
5676 self._meta.memo = self._memoClass(self._meta)
5677 except Exception:
5678 exc = sys.exc_info()[1]
5679 self._meta.dfd.close()
5680 self._meta.dfd = None
5681 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1])
5682
5684 """
5685 builds the FieldList of names, types, and descriptions
5686 """
5687 old_fields = defaultdict(dict)
5688 meta = self._meta
5689 for name in meta.fields:
5690 old_fields[name]['type'] = meta[name][TYPE]
5691 old_fields[name]['empty'] = meta[name][EMPTY]
5692 old_fields[name]['class'] = meta[name][CLASS]
5693 meta.fields[:] = []
5694 offset = 1
5695 fieldsdef = meta.header.fields
5696 if len(fieldsdef) % 32 != 0:
5697 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
5698 if len(fieldsdef) // 32 != meta.header.field_count:
5699 raise BadDataError("Header shows %d fields, but field definition block has %d fields" % (meta.header.field_count, len(fieldsdef) // 32))
5700 total_length = meta.header.record_length
5701 for i in range(meta.header.field_count):
5702 fieldblock = fieldsdef[i*32:(i+1)*32]
5703 name = unpack_str(fieldblock[:11])
5704 type = fieldblock[11]
5705 if not type in meta.fieldtypes:
5706 raise BadDataError("Unknown field type: %s" % type)
5707 start = offset
5708 length = ord(fieldblock[16])
5709 offset += length
5710 end = start + length
5711 decimals = ord(fieldblock[17])
5712 flags = ord(fieldblock[18])
5713 if name in meta.fields:
5714 raise BadDataError('Duplicate field name found: %s' % name)
5715 meta.fields.append(name)
5716 if name in old_fields and old_fields[name]['type'] == type:
5717 cls = old_fields[name]['class']
5718 empty = old_fields[name]['empty']
5719 else:
5720 cls = meta.fieldtypes[type]['Class']
5721 empty = meta.fieldtypes[type]['Empty']
5722 meta[name] = (
5723 type,
5724 start,
5725 length,
5726 end,
5727 decimals,
5728 flags,
5729 cls,
5730 empty,
5731 )
5732 if offset != total_length:
5733 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
5734 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5735 meta.user_field_count = len(meta.user_fields)
5736 Record._create_blank_data(meta)
5737
5740 """
5741 Provides an interface for working with Clipper tables.
5742 """
5743
5744 _version = 'Clipper 5'
5745 _versionabbr = 'clp'
5746
5747 @MutableDefault
5749 return {
5750 'C' : {
5751 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: ' ' * x, 'Init':add_clp_character,
5752 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
5753 },
5754 'D' : {
5755 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: ' ', 'Init':add_date,
5756 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
5757 },
5758 'F' : {
5759 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_numeric,
5760 'Class':'default', 'Empty':none, 'flags':tuple(),
5761 },
5762 'L' : {
5763 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: '?', 'Init':add_logical,
5764 'Class':bool, 'Empty':none, 'flags':tuple(),
5765 },
5766 'M' : {
5767 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: ' ', 'Init':add_memo,
5768 'Class':unicode, 'Empty':unicode, 'flags':tuple(),
5769 },
5770 'N' : {
5771 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_numeric,
5772 'Class':'default', 'Empty':none, 'flags':tuple(),
5773 } }
5774
5775 _memoext = '.dbt'
5776 _memoClass = _Db3Memo
5777 _yesMemoMask = '\x80'
5778 _noMemoMask = '\x7f'
5779 _binary_types = ()
5780 _character_types = ('C', 'M')
5781 _currency_types = tuple()
5782 _date_types = ('D',)
5783 _datetime_types = tuple()
5784 _decimal_types = ('N', 'F')
5785 _fixed_types = ('D', 'L', 'M')
5786 _logical_types = ('L',)
5787 _memo_types = ('M',)
5788 _numeric_types = ('N', 'F')
5789 _variable_types = ('C', 'N')
5790 _dbfTableHeader = array('c', '\x00' * 32)
5791 _dbfTableHeader[0] = '\x03'
5792 _dbfTableHeader[8:10] = array('c', pack_short_int(33))
5793 _dbfTableHeader[10] = '\x01'
5794 _dbfTableHeader[29] = '\x03'
5795 _dbfTableHeader = _dbfTableHeader.tostring()
5796 _dbfTableHeaderExtra = ''
5797 _supported_tables = ['\x03', '\x83']
5798
5800 """
5801 represents the data block that defines a tables type and layout
5802 """
5803
5804 @property
5806 "field block structure"
5807 fieldblock = self._data[32:]
5808 for i in range(len(fieldblock)//32+1):
5809 cr = i * 32
5810 if fieldblock[cr] == '\x0d':
5811 break
5812 else:
5813 raise BadDataError("corrupt field structure")
5814 return fieldblock[:cr].tostring()
5815
5816 @fields.setter
5818 fieldblock = self._data[32:]
5819 for i in range(len(fieldblock)//32+1):
5820 cr = i * 32
5821 if fieldblock[cr] == '\x0d':
5822 break
5823 else:
5824 raise BadDataError("corrupt field structure")
5825 cr += 32
5826 fieldlen = len(block)
5827 if fieldlen % 32 != 0:
5828 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
5829 self._data[32:cr] = array('c', block)
5830 self._data[8:10] = array('c', pack_short_int(len(self._data)))
5831 fieldlen = fieldlen // 32
5832 recordlen = 1
5833 for i in range(fieldlen):
5834 recordlen += ord(block[i*32+16])
5835 if block[i*32+11] == 'C':
5836 recordlen += ord(block[i*32+17]) * 256
5837 self._data[10:12] = array('c', pack_short_int(recordlen))
5838
5839
5841 """
5842 constructs fieldblock for disk table
5843 """
5844 fieldblock = array('c', '')
5845 memo = False
5846 nulls = False
5847 meta = self._meta
5848 header = meta.header
5849 header.version = chr(ord(header.version) & ord(self._noMemoMask))
5850 meta.fields = [f for f in meta.fields if f != '_nullflags']
5851 total_length = 1
5852 for field in meta.fields:
5853 layout = meta[field]
5854 if meta.fields.count(field) > 1:
5855 raise BadDataError("corrupted field structure (noticed in _build_header_fields)")
5856 fielddef = array('c', '\x00' * 32)
5857 fielddef[:11] = array('c', pack_str(meta.encoder(field)[0]))
5858 fielddef[11] = layout[TYPE]
5859 fielddef[12:16] = array('c', pack_long_int(layout[START]))
5860 total_length += layout[LENGTH]
5861 if layout[TYPE] == 'C':
5862 fielddef[16] = chr(layout[LENGTH] % 256)
5863 fielddef[17] = chr(layout[LENGTH] // 256)
5864 else:
5865 fielddef[16] = chr(layout[LENGTH])
5866 fielddef[17] = chr(layout[DECIMALS])
5867 fielddef[18] = chr(layout[FLAGS])
5868 fieldblock.extend(fielddef)
5869 if layout[TYPE] in meta.memo_types:
5870 memo = True
5871 if layout[FLAGS] & NULLABLE:
5872 nulls = True
5873 if memo:
5874 header.version = chr(ord(header.version) | ord(self._yesMemoMask))
5875 if meta.memo is None:
5876 meta.memo = self._memoClass(meta)
5877 else:
5878 if os.path.exists(meta.memoname):
5879 if meta.mfd is not None:
5880 meta.mfd.close()
5881
5882 os.remove(meta.memoname)
5883 meta.memo = None
5884 if nulls:
5885 start = layout[START] + layout[LENGTH]
5886 length, one_more = divmod(len(meta.fields), 8)
5887 if one_more:
5888 length += 1
5889 fielddef = array('c', '\x00' * 32)
5890 fielddef[:11] = array('c', pack_str('_nullflags'))
5891 fielddef[11] = '0'
5892 fielddef[12:16] = array('c', pack_long_int(start))
5893 fielddef[16] = chr(length)
5894 fielddef[17] = chr(0)
5895 fielddef[18] = chr(BINARY | SYSTEM)
5896 fieldblock.extend(fielddef)
5897 meta.fields.append('_nullflags')
5898 nullflags = (
5899 '0',
5900 start,
5901 length,
5902 start + length,
5903 0,
5904 BINARY | SYSTEM,
5905 none,
5906 none,
5907 )
5908 meta['_nullflags'] = nullflags
5909 header.fields = fieldblock.tostring()
5910 header.record_length = total_length
5911 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5912 meta.user_field_count = len(meta.user_fields)
5913 Record._create_blank_data(meta)
5914
5916 """
5917 builds the FieldList of names, types, and descriptions
5918 """
5919 meta = self._meta
5920 old_fields = defaultdict(dict)
5921 for name in meta.fields:
5922 old_fields[name]['type'] = meta[name][TYPE]
5923 old_fields[name]['empty'] = meta[name][EMPTY]
5924 old_fields[name]['class'] = meta[name][CLASS]
5925 meta.fields[:] = []
5926 offset = 1
5927 fieldsdef = meta.header.fields
5928 if len(fieldsdef) % 32 != 0:
5929 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
5930 if len(fieldsdef) // 32 != meta.header.field_count:
5931 raise BadDataError("Header shows %d fields, but field definition block has %d fields"
5932 (meta.header.field_count, len(fieldsdef) // 32))
5933 total_length = meta.header.record_length
5934 for i in range(meta.header.field_count):
5935 fieldblock = fieldsdef[i*32:(i+1)*32]
5936 name = unpack_str(fieldblock[:11])
5937 type = fieldblock[11]
5938 if not type in meta.fieldtypes:
5939 raise BadDataError("Unknown field type: %s" % type)
5940 start = offset
5941 length = ord(fieldblock[16])
5942 decimals = ord(fieldblock[17])
5943 if type == 'C':
5944 length += decimals * 256
5945 offset += length
5946 end = start + length
5947 flags = ord(fieldblock[18])
5948 if name in meta.fields:
5949 raise BadDataError('Duplicate field name found: %s' % name)
5950 meta.fields.append(name)
5951 if name in old_fields and old_fields[name]['type'] == type:
5952 cls = old_fields[name]['class']
5953 empty = old_fields[name]['empty']
5954 else:
5955 cls = meta.fieldtypes[type]['Class']
5956 empty = meta.fieldtypes[type]['Empty']
5957 meta[name] = (
5958 type,
5959 start,
5960 length,
5961 end,
5962 decimals,
5963 flags,
5964 cls,
5965 empty,
5966 )
5967 if offset != total_length:
5968 raise BadDataError("Header shows record length of %d, but calculated record length is %d"
5969 (total_length, offset))
5970 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5971 meta.user_field_count = len(meta.user_fields)
5972 Record._create_blank_data(meta)
5973
5976 """
5977 Provides an interface for working with FoxPro 2 tables
5978 """
5979
5980 _version = 'Foxpro'
5981 _versionabbr = 'fp'
5982
5983 @MutableDefault
5985 return {
5986 'C' : {
5987 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_character,
5988 'Class':unicode, 'Empty':unicode, 'flags':('binary', 'nocptrans', 'null', ),
5989 },
5990 'F' : {
5991 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_numeric,
5992 'Class':'default', 'Empty':none, 'flags':('null', ),
5993 },
5994 'N' : {
5995 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_numeric,
5996 'Class':'default', 'Empty':none, 'flags':('null', ),
5997 },
5998 'L' : {
5999 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: '?', 'Init':add_logical,
6000 'Class':bool, 'Empty':none, 'flags':('null', ),
6001 },
6002 'D' : {
6003 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: ' ', 'Init':add_date,
6004 'Class':datetime.date, 'Empty':none, 'flags':('null', ),
6005 },
6006 'M' : {
6007 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: ' ', 'Init':add_memo,
6008 'Class':unicode, 'Empty':unicode, 'flags':('binary', 'nocptrans', 'null', ),
6009 },
6010 'G' : {
6011 'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: ' ', 'Init':add_memo,
6012 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6013 },
6014 'P' : {
6015 'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: ' ', 'Init':add_memo,
6016 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6017 },
6018 '0' : {
6019 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: '\x00' * x, 'Init':None,
6020 'Class':none, 'Empty':none, 'flags':('binary', 'system', ),
6021 } }
6022
6023 _memoext = '.fpt'
6024 _memoClass = _VfpMemo
6025 _yesMemoMask = '\xf5'
6026 _noMemoMask = '\x03'
6027 _binary_types = ('G', 'P')
6028 _character_types = ('C', 'D', 'F', 'L', 'M', 'N')
6029 _currency_types = tuple()
6030 _date_types = ('D',)
6031 _datetime_types = tuple()
6032 _fixed_types = ('D', 'G', 'L', 'M', 'P')
6033 _logical_types = ('L',)
6034 _memo_types = ('G', 'M', 'P')
6035 _numeric_types = ('F', 'N')
6036 _text_types = ('C', 'M')
6037 _variable_types = ('C', 'F', 'N')
6038 _supported_tables = ('\x03', '\xf5')
6039 _dbfTableHeader = array('c', '\x00' * 32)
6040 _dbfTableHeader[0] = '\x30'
6041 _dbfTableHeader[8:10] = array('c', pack_short_int(33 + 263))
6042 _dbfTableHeader[10] = '\x01'
6043 _dbfTableHeader[29] = '\x03'
6044 _dbfTableHeader = _dbfTableHeader.tostring()
6045 _dbfTableHeaderExtra = '\x00' * 263
6046
6070
6072 """
6073 builds the FieldList of names, types, and descriptions
6074 """
6075 meta = self._meta
6076 old_fields = defaultdict(dict)
6077 for name in meta.fields:
6078 old_fields[name]['type'] = meta[name][TYPE]
6079 old_fields[name]['class'] = meta[name][CLASS]
6080 old_fields[name]['empty'] = meta[name][EMPTY]
6081 meta.fields[:] = []
6082 offset = 1
6083 fieldsdef = meta.header.fields
6084 if len(fieldsdef) % 32 != 0:
6085 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
6086 if len(fieldsdef) // 32 != meta.header.field_count:
6087 raise BadDataError("Header shows %d fields, but field definition block has %d fields"
6088 (meta.header.field_count, len(fieldsdef) // 32))
6089 total_length = meta.header.record_length
6090 for i in range(meta.header.field_count):
6091 fieldblock = fieldsdef[i*32:(i+1)*32]
6092 name = unpack_str(fieldblock[:11])
6093 type = fieldblock[11]
6094 if not type in meta.fieldtypes:
6095 raise BadDataError("Unknown field type: %s" % type)
6096 start = offset
6097 length = ord(fieldblock[16])
6098 offset += length
6099 end = start + length
6100 decimals = ord(fieldblock[17])
6101 flags = ord(fieldblock[18])
6102 if name in meta.fields:
6103 raise BadDataError('Duplicate field name found: %s' % name)
6104 meta.fields.append(name)
6105 if name in old_fields and old_fields[name]['type'] == type:
6106 cls = old_fields[name]['class']
6107 empty = old_fields[name]['empty']
6108 else:
6109 cls = meta.fieldtypes[type]['Class']
6110 empty = meta.fieldtypes[type]['Empty']
6111 meta[name] = (
6112 type,
6113 start,
6114 length,
6115 end,
6116 decimals,
6117 flags,
6118 cls,
6119 empty,
6120 )
6121 if offset != total_length:
6122 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
6123 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
6124 meta.user_field_count = len(meta.user_fields)
6125 Record._create_blank_data(meta)
6126
6127 @staticmethod
6129 """
6130 Returns a group of three bytes, in integer form, of the date
6131 """
6132 return "%c%c%c" % (date.year - 2000, date.month, date.day)
6133
6134 @staticmethod
6136 """
6137 Returns a Date() of the packed three-byte date passed in
6138 """
6139 year, month, day = struct.unpack('<BBB', bytestr)
6140 year += 2000
6141 return Date(year, month, day)
6142
6144 """
6145 Provides an interface for working with Visual FoxPro 6 tables
6146 """
6147
6148 _version = 'Visual Foxpro'
6149 _versionabbr = 'vfp'
6150
6151 @MutableDefault
6153 return {
6154 'C' : {
6155 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_character,
6156 'Class':unicode, 'Empty':unicode, 'flags':('binary', 'nocptrans', 'null', ),
6157 },
6158 'Y' : {
6159 'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':lambda x: '\x00' * 8, 'Init':add_vfp_currency,
6160 'Class':Decimal, 'Empty':none, 'flags':('null', ),
6161 },
6162 'B' : {
6163 'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':lambda x: '\x00' * 8, 'Init':add_vfp_double,
6164 'Class':float, 'Empty':none, 'flags':('null', ),
6165 },
6166 'F' : {
6167 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_numeric,
6168 'Class':'default', 'Empty':none, 'flags':('null', ),
6169 },
6170 'N' : {
6171 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: ' ' * x, 'Init':add_vfp_numeric,
6172 'Class':'default', 'Empty':none, 'flags':('null', ),
6173 },
6174 'I' : {
6175 'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':lambda x: '\x00' * 4, 'Init':add_vfp_integer,
6176 'Class':int, 'Empty':none, 'flags':('null', ),
6177 },
6178 'L' : {
6179 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: '?', 'Init':add_logical,
6180 'Class':bool, 'Empty':none, 'flags':('null', ),
6181 },
6182 'D' : {
6183 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: ' ', 'Init':add_date,
6184 'Class':datetime.date, 'Empty':none, 'flags':('null', ),
6185 },
6186 'T' : {
6187 'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':lambda x: '\x00' * 8, 'Init':add_vfp_datetime,
6188 'Class':datetime.datetime, 'Empty':none, 'flags':('null', ),
6189 },
6190 'M' : {
6191 'Type':'Memo', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: '\x00\x00\x00\x00', 'Init':add_vfp_memo,
6192 'Class':unicode, 'Empty':unicode, 'flags':('binary', 'nocptrans', 'null', ),
6193 },
6194 'G' : {
6195 'Type':'General', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: '\x00\x00\x00\x00', 'Init':add_vfp_memo,
6196 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6197 },
6198 'P' : {
6199 'Type':'Picture', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: '\x00\x00\x00\x00', 'Init':add_vfp_memo,
6200 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6201 },
6202 '0' : {
6203 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: '\x00' * x, 'Init':int,
6204 'Class':none, 'Empty':none, 'flags':('binary', 'system',),
6205 } }
6206
6207 _memoext = '.fpt'
6208 _memoClass = _VfpMemo
6209 _yesMemoMask = '\x30'
6210 _noMemoMask = '\x30'
6211 _binary_types = ('B', 'G', 'I', 'P', 'T', 'Y')
6212 _character_types = ('C', 'D', 'F', 'L', 'M', 'N')
6213 _currency_types = ('Y',)
6214 _date_types = ('D', 'T')
6215 _datetime_types = ('T',)
6216 _fixed_types = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
6217 _logical_types = ('L',)
6218 _memo_types = ('G', 'M', 'P')
6219 _numeric_types = ('B', 'F', 'I', 'N', 'Y')
6220 _variable_types = ('C', 'F', 'N')
6221 _supported_tables = ('\x30', '\x31')
6222 _dbfTableHeader = array('c', '\x00' * 32)
6223 _dbfTableHeader[0] = '\x30'
6224 _dbfTableHeader[8:10] = array('c', pack_short_int(33 + 263))
6225 _dbfTableHeader[10] = '\x01'
6226 _dbfTableHeader[29] = '\x03'
6227 _dbfTableHeader = _dbfTableHeader.tostring()
6228 _dbfTableHeaderExtra = '\x00' * 263
6229
6231 """
6232 builds the FieldList of names, types, and descriptions
6233 """
6234 meta = self._meta
6235 old_fields = defaultdict(dict)
6236 for name in meta.fields:
6237 old_fields[name]['type'] = meta[name][TYPE]
6238 old_fields[name]['class'] = meta[name][CLASS]
6239 old_fields[name]['empty'] = meta[name][EMPTY]
6240 meta.fields[:] = []
6241 offset = 1
6242 fieldsdef = meta.header.fields
6243 meta.nullflags = None
6244 total_length = meta.header.record_length
6245 for i in range(meta.header.field_count):
6246 fieldblock = fieldsdef[i*32:(i+1)*32]
6247 name = unpack_str(fieldblock[:11])
6248 type = fieldblock[11]
6249 if not type in meta.fieldtypes:
6250 raise BadDataError("Unknown field type: %s" % type)
6251 start = unpack_long_int(fieldblock[12:16])
6252 length = ord(fieldblock[16])
6253 offset += length
6254 end = start + length
6255 decimals = ord(fieldblock[17])
6256 flags = ord(fieldblock[18])
6257 if name in meta.fields:
6258 raise BadDataError('Duplicate field name found: %s' % name)
6259 meta.fields.append(name)
6260 if name in old_fields and old_fields[name]['type'] == type:
6261 cls = old_fields[name]['class']
6262 empty = old_fields[name]['empty']
6263 else:
6264 cls = meta.fieldtypes[type]['Class']
6265 empty = meta.fieldtypes[type]['Empty']
6266 meta[name] = (
6267 type,
6268 start,
6269 length,
6270 end,
6271 decimals,
6272 flags,
6273 cls,
6274 empty,
6275 )
6276 if offset != total_length:
6277 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
6278 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
6279 meta.user_field_count = len(meta.user_fields)
6280 Record._create_blank_data(meta)
6281
6282
6283 -class List(_Navigation):
6284 """
6285 list of Dbf records, with set-like behavior
6286 """
6287
6288 _desc = ''
6289
6290 - def __init__(self, records=None, desc=None, key=None):
6291 self._list = []
6292 self._set = set()
6293 self._tables = dict()
6294 if key is not None:
6295 self.key = key
6296 if key.__doc__ is None:
6297 key.__doc__ = 'unknown'
6298 key = self.key
6299 self._current = -1
6300 if isinstance(records, self.__class__) and key is records.key:
6301 self._list = records._list[:]
6302 self._set = records._set.copy()
6303 self._current = 0
6304 elif records is not None:
6305 for record in records:
6306 value = key(record)
6307 item = (source_table(record), recno(record), value)
6308 if value not in self._set:
6309 self._set.add(value)
6310 self._list.append(item)
6311 self._current = 0
6312 if desc is not None:
6313 self._desc = desc
6314
6316 self._still_valid_check()
6317 key = self.key
6318 if isinstance(other, (Table, list)):
6319 other = self.__class__(other, key=key)
6320 if isinstance(other, self.__class__):
6321 other._still_valid_check()
6322 result = self.__class__()
6323 result._set = self._set.copy()
6324 result._list[:] = self._list[:]
6325 result._tables = {}
6326 result._tables.update(self._tables)
6327 result.key = self.key
6328 if key is other.key:
6329 for item in other._list:
6330 result._maybe_add(item)
6331 else:
6332 for rec in other:
6333 result._maybe_add((source_table(rec), recno(rec), key(rec)))
6334 return result
6335 return NotImplemented
6336
6338 self._still_valid_check()
6339 if not isinstance(data, (Record, RecordTemplate, tuple, dict)):
6340 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, ))
6341 try:
6342 item = self.key(data)
6343 if not isinstance(item, tuple):
6344 item = (item, )
6345 return item in self._set
6346 except Exception:
6347 for record in self:
6348 if record == data:
6349 return True
6350 return False
6351
6353 self._still_valid_check()
6354 if isinstance(key, int):
6355 item = self._list.pop[key]
6356 self._set.remove(item[2])
6357 elif isinstance(key, slice):
6358 self._set.difference_update([item[2] for item in self._list[key]])
6359 self._list.__delitem__(key)
6360 elif isinstance(key, (Record, RecordTemplate, dict, tuple)):
6361 index = self.index(key)
6362 item = self._list.pop[index]
6363 self._set.remove(item[2])
6364 else:
6365 raise TypeError('%r should be an int, slice, record, template, tuple, or dict -- not a %r' % (key, type(key)))
6366
6368 self._still_valid_check()
6369 if isinstance(key, int):
6370 count = len(self._list)
6371 if not -count <= key < count:
6372 raise NotFoundError("Record %d is not in list." % key)
6373 return self._get_record(*self._list[key])
6374 elif isinstance(key, slice):
6375 result = self.__class__()
6376 result._list[:] = self._list[key]
6377 result._set = set(result._list)
6378 result.key = self.key
6379 return result
6380 elif isinstance(key, (Record, RecordTemplate, dict, tuple)):
6381 index = self.index(key)
6382 return self._get_record(*self._list[index])
6383 else:
6384 raise TypeError('%r should be an int, slice, record, record template, tuple, or dict -- not a %r' % (key, type(key)))
6385
6389
6393
6397
6399 self._still_valid_check()
6400 key = self.key
6401 if isinstance(other, (Table, list)):
6402 other = self.__class__(other, key=key)
6403 if isinstance(other, self.__class__):
6404 other._still_valid_check()
6405 result = other.__class__()
6406 result._set = other._set.copy()
6407 result._list[:] = other._list[:]
6408 result._tables = {}
6409 result._tables.update(self._tables)
6410 result.key = other.key
6411 if key is other.key:
6412 for item in self._list:
6413 result._maybe_add(item)
6414 else:
6415 for rec in self:
6416 result._maybe_add((source_table(rec), recno(rec), key(rec)))
6417 return result
6418 return NotImplemented
6419
6421 self._still_valid_check()
6422 if self._desc:
6423 return "%s(key=(%s), desc=%s)" % (self.__class__, self.key.__doc__, self._desc)
6424 else:
6425 return "%s(key=(%s))" % (self.__class__, self.key.__doc__)
6426
6428 self._still_valid_check()
6429 key = self.key
6430 if isinstance(other, (Table, list)):
6431 other = self.__class__(other, key=key)
6432 if isinstance(other, self.__class__):
6433 other._still_valid_check()
6434 result = other.__class__()
6435 result._list[:] = other._list[:]
6436 result._set = other._set.copy()
6437 result._tables = {}
6438 result._tables.update(other._tables)
6439 result.key = key
6440 lost = set()
6441 if key is other.key:
6442 for item in self._list:
6443 if item[2] in result._list:
6444 result._set.remove(item[2])
6445 lost.add(item)
6446 else:
6447 for rec in self:
6448 value = key(rec)
6449 if value in result._set:
6450 result._set.remove(value)
6451 lost.add((source_table(rec), recno(rec), value))
6452 result._list = [item for item in result._list if item not in lost]
6453 lost = set(result._tables.keys())
6454 for table, _1, _2 in result._list:
6455 if table in result._tables:
6456 lost.remove(table)
6457 if not lost:
6458 break
6459 for table in lost:
6460 del result._tables[table]
6461 return result
6462 return NotImplemented
6463
6465 self._still_valid_check()
6466 key = self.key
6467 if isinstance(other, (Table, list)):
6468 other = self.__class__(other, key=key)
6469 if isinstance(other, self.__class__):
6470 other._still_valid_check()
6471 result = self.__class__()
6472 result._list[:] = self._list[:]
6473 result._set = self._set.copy()
6474 result._tables = {}
6475 result._tables.update(self._tables)
6476 result.key = key
6477 lost = set()
6478 if key is other.key:
6479 for item in other._list:
6480 if item[2] in result._set:
6481 result._set.remove(item[2])
6482 lost.add(item[2])
6483 else:
6484 for rec in other:
6485 value = key(rec)
6486 if value in result._set:
6487 result._set.remove(value)
6488 lost.add(value)
6489 result._list = [item for item in result._list if item[2] not in lost]
6490 lost = set(result._tables.keys())
6491 for table, _1, _2 in result._list:
6492 if table in result._tables:
6493 lost.remove(table)
6494 if not lost:
6495 break
6496 for table in lost:
6497 del result._tables[table]
6498 return result
6499 return NotImplemented
6500
6508
6509 - def _get_record(self, table=None, rec_no=None, value=None):
6510 if table is rec_no is None:
6511 table, rec_no, value = self._list[self._index]
6512 return table[rec_no]
6513
6514 - def _purge(self, record, old_record_number, offset):
6515 partial = source_table(record), old_record_number
6516 records = sorted(self._list, key=lambda item: (item[0], item[1]))
6517 for item in records:
6518 if partial == item[:2]:
6519 found = True
6520 break
6521 elif partial[0] is item[0] and partial[1] < item[1]:
6522 found = False
6523 break
6524 else:
6525 found = False
6526 if found:
6527 self._list.pop(self._list.index(item))
6528 self._set.remove(item[2])
6529 start = records.index(item) + found
6530 for item in records[start:]:
6531 if item[0] is not partial[0]:
6532 break
6533 i = self._list.index(item)
6534 self._set.remove(item[2])
6535 item = item[0], (item[1] - offset), item[2]
6536 self._list[i] = item
6537 self._set.add(item[2])
6538 return found
6539
6541 for table, last_pack in self._tables.items():
6542 if last_pack != getattr(table, '_pack_count'):
6543 raise DbfError("table has been packed; list is invalid")
6544
6545 _nav_check = _still_valid_check
6546
6550
6552 self._list = []
6553 self._set = set()
6554 self._index = -1
6555 self._tables.clear()
6556
6572
6573 - def index(self, record, start=None, stop=None):
6574 """
6575 returns the index of record between start and stop
6576 start and stop default to the first and last record
6577 """
6578 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
6579 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
6580 self._still_valid_check()
6581 if start is None:
6582 start = 0
6583 if stop is None:
6584 stop = len(self)
6585 for i in range(start, stop):
6586 if record == (self[i]):
6587 return i
6588 else:
6589 raise NotFoundError("dbf.List.index(x): x not in List", data=record)
6590
6591 - def insert(self, i, record):
6597
6598 - def key(self, record):
6604
6605 - def pop(self, index=None):
6613
6614 - def query(self, criteria):
6615 """
6616 criteria is a callback that returns a truthy value for matching record
6617 """
6618 return pql(self, criteria)
6619
6621 self._still_valid_check()
6622 if not isinstance(data, (Record, RecordTemplate, dict, tuple)):
6623 raise TypeError("%r(%r) is not a record, template, tuple, nor dict" % (type(data), data))
6624 index = self.index(data)
6625 record = self[index]
6626 item = source_table(record), recno(record), self.key(record)
6627 self._list.remove(item)
6628 self._set.remove(item[2])
6629
6633
6634 - def sort(self, key=None, reverse=False):
6639
6640
6641 -class Index(_Navigation):
6642 """
6643 non-persistent index for a table
6644 """
6645
6647 self._table = table
6648 self._values = []
6649 self._rec_by_val = []
6650 self._records = {}
6651 self.__doc__ = key.__doc__ or 'unknown'
6652 self._key = key
6653 self._previous_status = []
6654 for record in table:
6655 value = key(record)
6656 if value is DoNotIndex:
6657 continue
6658 rec_num = recno(record)
6659 if not isinstance(value, tuple):
6660 value = (value, )
6661 vindex = bisect_right(self._values, value)
6662 self._values.insert(vindex, value)
6663 self._rec_by_val.insert(vindex, rec_num)
6664 self._records[rec_num] = value
6665 table._indexen.add(self)
6666
6668 rec_num = recno(record)
6669 key = self.key(record)
6670 if rec_num in self._records:
6671 if self._records[rec_num] == key:
6672 return
6673 old_key = self._records[rec_num]
6674 vindex = bisect_left(self._values, old_key)
6675 self._values.pop(vindex)
6676 self._rec_by_val.pop(vindex)
6677 del self._records[rec_num]
6678 assert rec_num not in self._records
6679 if key == (DoNotIndex, ):
6680 return
6681 vindex = bisect_right(self._values, key)
6682 self._values.insert(vindex, key)
6683 self._rec_by_val.insert(vindex, rec_num)
6684 self._records[rec_num] = key
6685
6687 if not isinstance(data, (Record, RecordTemplate, tuple, dict)):
6688 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, ))
6689 try:
6690 value = self.key(data)
6691 return value in self._values
6692 except Exception:
6693 for record in self:
6694 if record == data:
6695 return True
6696 return False
6697
6699 '''if key is an integer, returns the matching record;
6700 if key is a [slice | string | tuple | record] returns a List;
6701 raises NotFoundError on failure'''
6702 if isinstance(key, int):
6703 count = len(self._values)
6704 if not -count <= key < count:
6705 raise NotFoundError("Record %d is not in list." % key)
6706 rec_num = self._rec_by_val[key]
6707 return self._table[rec_num]
6708 elif isinstance(key, slice):
6709 result = List()
6710 start, stop, step = key.start, key.stop, key.step
6711 if start is None: start = 0
6712 if stop is None: stop = len(self._rec_by_val)
6713 if step is None: step = 1
6714 if step < 0:
6715 start, stop = stop - 1, -(stop - start + 1)
6716 for loc in range(start, stop, step):
6717 record = self._table[self._rec_by_val[loc]]
6718 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6719 return result
6720 elif isinstance (key, (basestring, tuple, Record, RecordTemplate)):
6721 if isinstance(key, (Record, RecordTemplate)):
6722 key = self.key(key)
6723 elif isinstance(key, basestring):
6724 key = (key, )
6725 lo = self._search(key, where='left')
6726 hi = self._search(key, where='right')
6727 if lo == hi:
6728 raise NotFoundError(key)
6729 result = List(desc='match = %r' % (key, ))
6730 for loc in range(lo, hi):
6731 record = self._table[self._rec_by_val[loc]]
6732 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6733 return result
6734 else:
6735 raise TypeError('indices must be integers, match objects must by strings or tuples')
6736
6738 self._table.__enter__()
6739 return self
6740
6742 self._table.__exit__()
6743 return False
6744
6747
6749 return len(self._records)
6750
6752 """
6753 removes all entries from index
6754 """
6755 self._values[:] = []
6756 self._rec_by_val[:] = []
6757 self._records.clear()
6758
6759 - def _key(self, record):
6765
6772
6774 target = target[:len(match)]
6775 if isinstance(match[-1], basestring):
6776 target = list(target)
6777 target[-1] = target[-1][:len(match[-1])]
6778 target = tuple(target)
6779 return target == match
6780
6782 value = self._records.get(rec_num)
6783 if value is not None:
6784 vindex = bisect_left(self._values, value)
6785 del self._records[rec_num]
6786 self._values.pop(vindex)
6787 self._rec_by_val.pop(vindex)
6788
6790 """
6791 reindexes all records
6792 """
6793 for record in self._table:
6794 self(record)
6795
6796 - def _search(self, match, lo=0, hi=None, where=None):
6797 if hi is None:
6798 hi = len(self._values)
6799 if where == 'left':
6800 return bisect_left(self._values, match, lo, hi)
6801 elif where == 'right':
6802 return bisect_right(self._values, match, lo, hi)
6803
6804 - def index(self, record, start=None, stop=None):
6805 """
6806 returns the index of record between start and stop
6807 start and stop default to the first and last record
6808 """
6809 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
6810 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
6811 self._nav_check()
6812 if start is None:
6813 start = 0
6814 if stop is None:
6815 stop = len(self)
6816 for i in range(start, stop):
6817 if record == (self[i]):
6818 return i
6819 else:
6820 raise NotFoundError("dbf.Index.index(x): x not in Index", data=record)
6821
6822 - def index_search(self, match, start=None, stop=None, nearest=False, partial=False):
6823 """
6824 returns the index of match between start and stop
6825 start and stop default to the first and last record.
6826 if nearest is true returns the location of where the match should be
6827 otherwise raises NotFoundError
6828 """
6829 self._nav_check()
6830 if not isinstance(match, tuple):
6831 match = (match, )
6832 if start is None:
6833 start = 0
6834 if stop is None:
6835 stop = len(self)
6836 loc = self._search(match, start, stop, where='left')
6837 if loc == len(self._values):
6838 if nearest:
6839 return IndexLocation(loc, False)
6840 raise NotFoundError("dbf.Index.index_search(x): x not in index", data=match)
6841 if self._values[loc] == match \
6842 or partial and self._partial_match(self._values[loc], match):
6843 return IndexLocation(loc, True)
6844 elif nearest:
6845 return IndexLocation(loc, False)
6846 else:
6847 raise NotFoundError("dbf.Index.index_search(x): x not in Index", data=match)
6848
6849 - def key(self, record):
6850 result = self._key(record)
6851 if not isinstance(result, tuple):
6852 result = (result, )
6853 return result
6854
6855 - def query(self, criteria):
6856 """
6857 criteria is a callback that returns a truthy value for matching record
6858 """
6859 self._nav_check()
6860 return pql(self, criteria)
6861
6862 - def search(self, match, partial=False):
6863 """
6864 returns dbf.List of all (partially) matching records
6865 """
6866 self._nav_check()
6867 result = List()
6868 if not isinstance(match, tuple):
6869 match = (match, )
6870 loc = self._search(match, where='left')
6871 if loc == len(self._values):
6872 return result
6873 while loc < len(self._values) and self._values[loc] == match:
6874 record = self._table[self._rec_by_val[loc]]
6875 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6876 loc += 1
6877 if partial:
6878 while loc < len(self._values) and self._partial_match(self._values[loc], match):
6879 record = self._table[self._rec_by_val[loc]]
6880 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6881 loc += 1
6882 return result
6883
6886 """
6887 establishes a relation between two dbf tables (not persistent)
6888 """
6889
6890 relations = {}
6891
6892 - def __new__(cls, src, tgt, src_names=None, tgt_names=None):
6893 if (len(src) != 2 or len(tgt) != 2):
6894 raise DbfError("Relation should be called with ((src_table, src_field), (tgt_table, tgt_field))")
6895 if src_names and len(src_names) !=2 or tgt_names and len(tgt_names) != 2:
6896 raise DbfError('src_names and tgt_names, if specified, must be ("table","field")')
6897 src_table, src_field = src
6898 tgt_table, tgt_field = tgt
6899 try:
6900 if isinstance(src_field, baseinteger):
6901 table, field = src_table, src_field
6902 src_field = table.field_names[field]
6903 else:
6904 src_table.field_names.index(src_field)
6905 if isinstance(tgt_field, baseinteger):
6906 table, field = tgt_table, tgt_field
6907 tgt_field = table.field_names[field]
6908 else:
6909 tgt_table.field_names.index(tgt_field)
6910 except (IndexError, ValueError):
6911 raise DbfError('%r not in %r' % (field, table))
6912 if src_names:
6913 src_table_name, src_field_name = src_names
6914 else:
6915 src_table_name, src_field_name = src_table.filename, src_field
6916 if src_table_name[-4:].lower() == '.dbf':
6917 src_table_name = src_table_name[:-4]
6918 if tgt_names:
6919 tgt_table_name, tgt_field_name = tgt_names
6920 else:
6921 tgt_table_name, tgt_field_name = tgt_table.filename, tgt_field
6922 if tgt_table_name[-4:].lower() == '.dbf':
6923 tgt_table_name = tgt_table_name[:-4]
6924 relation = cls.relations.get(((src_table, src_field), (tgt_table, tgt_field)))
6925 if relation is not None:
6926 return relation
6927 obj = object.__new__(cls)
6928 obj._src_table, obj._src_field = src_table, src_field
6929 obj._tgt_table, obj._tgt_field = tgt_table, tgt_field
6930 obj._src_table_name, obj._src_field_name = src_table_name, src_field_name
6931 obj._tgt_table_name, obj._tgt_field_name = tgt_table_name, tgt_field_name
6932 obj._tables = dict()
6933 cls.relations[((src_table, src_field), (tgt_table, tgt_field))] = obj
6934 return obj
6935
6943
6953
6956
6964
6967
6970
6971 @property
6973 "name of source table"
6974 return yo._src_table
6975
6976 @property
6978 "name of source field"
6979 return yo._src_field
6980
6981 @property
6983 return yo._src_table_name
6984
6985 @property
6987 return yo._src_field_name
6988
6989 @property
6991 "name of target table"
6992 return yo._tgt_table
6993
6994 @property
6996 "name of target field"
6997 return yo._tgt_field
6998
6999 @property
7001 return yo._tgt_table_name
7002
7003 @property
7005 return yo._tgt_field_name
7006
7007 @LazyAttr
7009 def index(record, field=yo._tgt_field):
7010 return record[field]
7011 index.__doc__ = "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name)
7012 yo.index = yo._tgt_table.create_index(index)
7013 source = dbf.List(yo._src_table, key=lambda rec, field=yo._src_field: rec[field])
7014 target = dbf.List(yo._tgt_table, key=lambda rec, field=yo._tgt_field: rec[field])
7015 if len(source) != len(yo._src_table):
7016 yo._tables[yo._src_table] = 'many'
7017 else:
7018 yo._tables[yo._src_table] = 'one'
7019 if len(target) != len(yo._tgt_table):
7020 yo._tables[yo._tgt_table] = 'many'
7021 else:
7022 yo._tables[yo._tgt_table] = 'one'
7023 return yo.index
7024
7026 yo.index
7027 try:
7028 if isinstance(table, basestring):
7029 table = (yo._src_table, yo._tgt_table)[yo._tgt_table_name == table]
7030 return yo._tables[table]
7031 except IndexError:
7032 raise NotFoundError("table %s not in relation" % table)
7033
7037
7039
7041 self.offset = offset
7042
7043 - def __get__(self, inst, cls=None):
7044 if inst is None:
7045 return self
7046 start = self.offset
7047 end = start + self.size
7048 byte_data = inst._data[start:end]
7049 return self.from_bytes(byte_data)
7050
7056
7059 """
7060 add big_endian and neg_one to __init__
7061 """
7062
7063 - def __init__(self, offset, big_endian=False, neg_one_is_none=False, one_based=False):
7064 self.offset = offset
7065 self.big_endian = big_endian
7066 self.neg_one_is_none = neg_one_is_none
7067 self.one_based = one_based
7068
7070 if self.neg_one_is_none and byte_data == '\xff' * self.size:
7071 return None
7072 if self.big_endian:
7073 value = struct.unpack('>%s' % self.code, byte_data)[0]
7074 else:
7075 value = struct.unpack('<%s' % self.code, byte_data)[0]
7076 if self.one_based:
7077
7078 value -= 1
7079 return value
7080
7082 if value is None:
7083 if self.neg_one_is_none:
7084 return '\xff\xff'
7085 raise DbfError('unable to store None in %r' % self.__name__)
7086 limit = 2 ** (self.size * 8) - 1
7087 if self.one_based:
7088 limit -= 1
7089 if value > 2 ** limit:
7090 raise DataOverflowError("Maximum Integer size exceeded. Possible: %d. Attempted: %d" % (limit, value))
7091 if self.one_based:
7092 value += 1
7093 if self.big_endian:
7094 return struct.pack('>%s' % self.code, value)
7095 else:
7096 return struct.pack('<%s' % self.code, value)
7097
7098
7099 -class Int8(IntBytesType):
7100 """
7101 1-byte integer
7102 """
7103
7104 size = 1
7105 code = 'B'
7106
7107
7108 -class Int16(IntBytesType):
7109 """
7110 2-byte integer
7111 """
7112
7113 size = 2
7114 code = 'H'
7115
7116
7117 -class Int32(IntBytesType):
7118 """
7119 4-byte integer
7120 """
7121
7122 size = 4
7123 code = 'L'
7124
7125
7126 -class Bytes(BytesType):
7127
7128 - def __init__(self, offset, size=0, fill_to=0, strip_null=False):
7129 if not (size or fill_to):
7130 raise DbfError("either size or fill_to must be specified")
7131 self.offset = offset
7132 self.size = size
7133 self.fill_to = fill_to
7134 self.strip_null = strip_null
7135
7137 if self.strip_null:
7138 return byte_data.rstrip('\x00')
7139 else:
7140 return byte_data
7141
7148
7151 """
7152 adds _data as a str to class
7153 binds variable name to BytesType descriptor
7154 """
7155
7158
7160 fields = []
7161 initialized = stringified = False
7162 for name, thing in cls.__dict__.items():
7163 if isinstance(thing, BytesType):
7164 thing.__name__ = name
7165 fields.append((name, thing))
7166 elif name in ('__init__', '__new__'):
7167 initialized = True
7168 elif name in ('__repr__', ):
7169 stringified = True
7170 fields.sort(key=lambda t: t[1].offset)
7171 for _, field in fields:
7172 offset = field.offset
7173 if not field.size:
7174 field.size = field.fill_to - offset
7175 total_field_size = field.offset + field.size
7176 if self.size and total_field_size > self.size:
7177 raise DbfError('Fields in %r are using %d bytes, but only %d allocated' % (cls, self.size))
7178 total_field_size = self.size or total_field_size
7179 cls._data = str('\x00' * total_field_size)
7180 cls.__len__ = lambda s: len(s._data)
7181 cls._size_ = total_field_size
7182 if not initialized:
7183 def init(self, data):
7184 if len(data) != self._size_:
7185 raise Exception('%d bytes required, received %d' % (self._size_, len(data)))
7186 self._data = data
7187 cls.__init__ = init
7188 if not stringified:
7189 def repr(self):
7190 clauses = []
7191 for name, _ in fields:
7192 value = getattr(self, name)
7193 if isinstance(value, str) and len(value) > 12:
7194 value = value[:9] + '...'
7195 clauses.append('%s=%r' % (name, value))
7196 return ('%s(%s)' % (cls.__name__, ', '.join(clauses)))
7197 cls.__repr__ = repr
7198 return cls
7199
7202 """
7203 keep the most recent n items in the dict
7204
7205 based on code from Raymond Hettinger: http://stackoverflow.com/a/8334739/208880
7206 """
7207
7208 - class Link(object):
7209 __slots__ = 'prev_link', 'next_link', 'key', 'value'
7210 - def __init__(self, prev=None, next=None, key=None, value=None):
7212
7215
7221
7222 - def __init__(self, maxsize, func=None):
7223 self.maxsize = maxsize
7224 self.mapping = {}
7225 self.tail = self.Link()
7226 self.head = self.Link(self.tail)
7227 self.head.prev_link = self.tail
7228 self.func = func
7229 if func is not None:
7230 self.__name__ = func.__name__
7231 self.__doc__ = func.__doc__
7232
7234 if self.func is None:
7235 [self.func] = func
7236 self.__name__ = func.__name__
7237 self.__doc__ = func.__doc__
7238 return self
7239 mapping, head, tail = self.mapping, self.head, self.tail
7240 link = mapping.get(func, head)
7241 if link is head:
7242 value = self.func(*func)
7243 if len(mapping) >= self.maxsize:
7244 old_prev, old_next, old_key, old_value = tail.next_link
7245 tail.next_link = old_next
7246 old_next.prev_link = tail
7247 del mapping[old_key]
7248 behind = head.prev_link
7249 link = self.Link(behind, head, func, value)
7250 mapping[func] = behind.next_link = head.prev_link = link
7251 else:
7252 link_prev, link_next, func, value = link
7253 link_prev.next_link = link_next
7254 link_next.prev_link = link_prev
7255 behind = head.prev_link
7256 behind.next_link = head.prev_link = link
7257 link.prev_link = behind
7258 link.next_link = head
7259 return value
7260
7261
7262 -class Idx(object):
7263
7264
7265
7266 @DataBlock(512)
7276
7277 @DataBlock(512)
7278 - class Node(object):
7279 attributes = Int16(0)
7280 num_keys = Int16(2)
7281 left_peer = Int32(4, neg_one_is_none=True)
7282 right_peer = Int32(8, neg_one_is_none=True)
7283 pool = Bytes(12, fill_to=512)
7284 - def __init__(self, byte_data, node_key, record_key):
7285 if len(byte_data) != 512:
7286 raise DbfError("incomplete header: only received %d bytes" % len(byte_data))
7287 self._data = byte_data
7288 self._node_key = node_key
7289 self._record_key = record_key
7308
7309 - def __init__(self, table, filename, size_limit=100):
7320 @DataBlock(header.key_length+4)
7321 class RecordKey(object):
7322 key = Bytes(0, header.key_length)
7323 rec_no = Int32(header.key_length, big_endian=True, one_based=True)
7324 self.NodeKey = NodeKey
7325 self.RecordKey = RecordKey
7326
7327 idx.seek(header.root_node)
7328 self.root_node = self.Node(idx.read(512), self.NodeKey, self.RecordKey)
7329
7330 self.read_node = LruCache(maxsize=size_limit, func=self.read_node)
7331
7332 self.current_node = None
7333 self.current_key = None
7334
7336
7337 table = self.table()
7338 if table is None:
7339 raise DbfError('the database linked to %r has been closed' % self.filename)
7340 node = self.root_node
7341 if not node.num_keys:
7342 yield
7343 return
7344 while "looking for a leaf":
7345
7346 if node.is_leaf():
7347 break
7348 node = self.read_node(node.keys()[0].rec_no)
7349 while "traversing nodes":
7350 for key in node.keys():
7351 yield table[key.rec_no]
7352 next_node = node.right_peer
7353 if next_node is None:
7354 return
7355 node = self.read_node(next_node)
7356 forward = __iter__
7357
7359 """
7360 reads the sector indicated, and returns a Node object
7361 """
7362 with open(self.filename, 'rb') as idx:
7363 idx.seek(offset)
7364 return self.Node(idx.read(512), self.NodeKey, self.RecordKey)
7365
7367
7368 table = self.table()
7369 if table is None:
7370 raise DbfError('the database linked to %r has been closed' % self.filename)
7371 node = self.root_node
7372 if not node.num_keys:
7373 yield
7374 return
7375 while "looking for last leaf":
7376
7377 if node.is_leaf():
7378 break
7379 node = self.read_node(node.keys()[-1].rec_no)
7380 while "traversing nodes":
7381 for key in reversed(node.keys()):
7382 yield table[key.rec_no]
7383 prev_node = node.left_peer
7384 if prev_node is None:
7385 return
7386 node = self.read_node(prev_node)
7387
7388
7389
7390
7391 table_types = {
7392 'db3' : Db3Table,
7393 'clp' : ClpTable,
7394 'fp' : FpTable,
7395 'vfp' : VfpTable,
7396 }
7397
7398 version_map = {
7399 '\x02' : 'FoxBASE',
7400 '\x03' : 'dBase III Plus',
7401 '\x04' : 'dBase IV',
7402 '\x05' : 'dBase V',
7403 '\x30' : 'Visual FoxPro',
7404 '\x31' : 'Visual FoxPro (auto increment field)',
7405 '\x32' : 'Visual FoxPro (VarChar, VarBinary, or BLOB enabled)',
7406 '\x43' : 'dBase IV SQL table files',
7407 '\x63' : 'dBase IV SQL system files',
7408 '\x83' : 'dBase III Plus w/memos',
7409 '\x8b' : 'dBase IV w/memos',
7410 '\x8e' : 'dBase IV w/SQL table',
7411 '\xf5' : 'FoxPro w/memos'}
7412
7413 code_pages = {
7414 '\x00' : ('ascii', "plain ol' ascii"),
7415 '\x01' : ('cp437', 'U.S. MS-DOS'),
7416 '\x02' : ('cp850', 'International MS-DOS'),
7417 '\x03' : ('cp1252', 'Windows ANSI'),
7418 '\x04' : ('mac_roman', 'Standard Macintosh'),
7419 '\x08' : ('cp865', 'Danish OEM'),
7420 '\x09' : ('cp437', 'Dutch OEM'),
7421 '\x0A' : ('cp850', 'Dutch OEM (secondary)'),
7422 '\x0B' : ('cp437', 'Finnish OEM'),
7423 '\x0D' : ('cp437', 'French OEM'),
7424 '\x0E' : ('cp850', 'French OEM (secondary)'),
7425 '\x0F' : ('cp437', 'German OEM'),
7426 '\x10' : ('cp850', 'German OEM (secondary)'),
7427 '\x11' : ('cp437', 'Italian OEM'),
7428 '\x12' : ('cp850', 'Italian OEM (secondary)'),
7429 '\x13' : ('cp932', 'Japanese Shift-JIS'),
7430 '\x14' : ('cp850', 'Spanish OEM (secondary)'),
7431 '\x15' : ('cp437', 'Swedish OEM'),
7432 '\x16' : ('cp850', 'Swedish OEM (secondary)'),
7433 '\x17' : ('cp865', 'Norwegian OEM'),
7434 '\x18' : ('cp437', 'Spanish OEM'),
7435 '\x19' : ('cp437', 'English OEM (Britain)'),
7436 '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'),
7437 '\x1B' : ('cp437', 'English OEM (U.S.)'),
7438 '\x1C' : ('cp863', 'French OEM (Canada)'),
7439 '\x1D' : ('cp850', 'French OEM (secondary)'),
7440 '\x1F' : ('cp852', 'Czech OEM'),
7441 '\x22' : ('cp852', 'Hungarian OEM'),
7442 '\x23' : ('cp852', 'Polish OEM'),
7443 '\x24' : ('cp860', 'Portugese OEM'),
7444 '\x25' : ('cp850', 'Potugese OEM (secondary)'),
7445 '\x26' : ('cp866', 'Russian OEM'),
7446 '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'),
7447 '\x40' : ('cp852', 'Romanian OEM'),
7448 '\x4D' : ('cp936', 'Chinese GBK (PRC)'),
7449 '\x4E' : ('cp949', 'Korean (ANSI/OEM)'),
7450 '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'),
7451 '\x50' : ('cp874', 'Thai (ANSI/OEM)'),
7452 '\x57' : ('cp1252', 'ANSI'),
7453 '\x58' : ('cp1252', 'Western European ANSI'),
7454 '\x59' : ('cp1252', 'Spanish ANSI'),
7455 '\x64' : ('cp852', 'Eastern European MS-DOS'),
7456 '\x65' : ('cp866', 'Russian MS-DOS'),
7457 '\x66' : ('cp865', 'Nordic MS-DOS'),
7458 '\x67' : ('cp861', 'Icelandic MS-DOS'),
7459 '\x68' : (None, 'Kamenicky (Czech) MS-DOS'),
7460 '\x69' : (None, 'Mazovia (Polish) MS-DOS'),
7461 '\x6a' : ('cp737', 'Greek MS-DOS (437G)'),
7462 '\x6b' : ('cp857', 'Turkish MS-DOS'),
7463 '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'),
7464 '\x79' : ('cp949', 'Korean Windows'),
7465 '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'),
7466 '\x7b' : ('cp932', 'Japanese Windows'),
7467 '\x7c' : ('cp874', 'Thai Windows'),
7468 '\x7d' : ('cp1255', 'Hebrew Windows'),
7469 '\x7e' : ('cp1256', 'Arabic Windows'),
7470 '\xc8' : ('cp1250', 'Eastern European Windows'),
7471 '\xc9' : ('cp1251', 'Russian Windows'),
7472 '\xca' : ('cp1254', 'Turkish Windows'),
7473 '\xcb' : ('cp1253', 'Greek Windows'),
7474 '\x96' : ('mac_cyrillic', 'Russian Macintosh'),
7475 '\x97' : ('mac_latin2', 'Macintosh EE'),
7476 '\x98' : ('mac_greek', 'Greek Macintosh'),
7477 '\xf0' : ('utf8', '8-bit unicode'),
7478 }
7479
7480
7481 default_codepage = code_pages.get(default_codepage, code_pages.get('\x00'))[0]
7482
7483
7484
7485
7486 -def pql_select(records, chosen_fields, condition, field_names):
7487 if chosen_fields != '*':
7488 field_names = chosen_fields.replace(' ', '').split(',')
7489 result = condition(records)
7490 result.modified = 0, 'record' + ('', 's')[len(result)>1]
7491 result.field_names = field_names
7492 return result
7493
7494 -def pql_update(records, command, condition, field_names):
7495 possible = condition(records)
7496 modified = pql_cmd(command, field_names)(possible)
7497 possible.modified = modified, 'record' + ('', 's')[modified>1]
7498 return possible
7499
7500 -def pql_delete(records, dead_fields, condition, field_names):
7501 deleted = condition(records)
7502 deleted.modified = len(deleted), 'record' + ('', 's')[len(deleted)>1]
7503 deleted.field_names = field_names
7504 if dead_fields == '*':
7505 for record in deleted:
7506 record.delete_record()
7507 record.write_record()
7508 else:
7509 keep = [f for f in field_names if f not in dead_fields.replace(' ', '').split(',')]
7510 for record in deleted:
7511 record.reset_record(keep_fields=keep)
7512 record.write_record()
7513 return deleted
7514
7515 -def pql_recall(records, all_fields, condition, field_names):
7516 if all_fields != '*':
7517 raise DbfError('SQL RECALL: fields must be * (only able to recover at the record level)')
7518 revivified = List()
7519 for record in condition(records):
7520 if is_deleted(record):
7521 revivified.append(record)
7522 undelete(record)
7523 revivified.modfied = len(revivified), 'record' + ('', 's')[len(revivified)>1]
7524 return revivified
7525
7526 -def pql_add(records, new_fields, condition, field_names):
7527 tables = set()
7528 possible = condition(records)
7529 for record in possible:
7530 tables.add(source_table(record))
7531 for table in tables:
7532 table.add_fields(new_fields)
7533 possible.modified = len(tables), 'table' + ('', 's')[len(tables)>1]
7534 possible.field_names = field_names
7535 return possible
7536
7537 -def pql_drop(records, dead_fields, condition, field_names):
7538 tables = set()
7539 possible = condition(records)
7540 for record in possible:
7541 tables.add(source_table(record))
7542 for table in tables:
7543 table.delete_fields(dead_fields)
7544 possible.modified = len(tables), 'table' + ('', 's')[len(tables)>1]
7545 possible.field_names = field_names
7546 return possible
7547
7548 -def pql_pack(records, command, condition, field_names):
7549 tables = set()
7550 possible = condition(records)
7551 for record in possible:
7552 tables.add(source_table(record))
7553 for table in tables:
7554 table.pack()
7555 possible.modified = len(tables), 'table' + ('', 's')[len(tables)>1]
7556 possible.field_names = field_names
7557 return possible
7558
7559 -def pql_resize(records, fieldname_newsize, condition, field_names):
7560 tables = set()
7561 possible = condition(records)
7562 for record in possible:
7563 tables.add(source_table(record))
7564 fieldname, newsize = fieldname_newsize.split()
7565 newsize = int(newsize)
7566 for table in tables:
7567 table.resize_field(fieldname, newsize)
7568 possible.modified = len(tables), 'table' + ('', 's')[len(tables)>1]
7569 possible.field_names = field_names
7570 return possible
7571
7573 """
7574 creates a function matching the pql criteria
7575 """
7576 function = """def func(records):
7577 '''%s
7578 '''
7579 _matched = dbf.List()
7580 for _rec in records:
7581 %s
7582
7583 if %s:
7584 _matched.append(_rec)
7585 return _matched"""
7586 fields = []
7587 for field in field_names(records):
7588 if field in criteria:
7589 fields.append(field)
7590 criteria = criteria.replace('recno()', 'recno(_rec)').replace('is_deleted()', 'is_deleted(_rec)')
7591 fields = '\n '.join(['%s = _rec.%s' % (field, field) for field in fields])
7592 g = dict()
7593 g['dbf'] = dbf
7594 g.update(pql_user_functions)
7595 function %= (criteria, fields, criteria)
7596 exec function in g
7597 return g['func']
7598
7599 -def pql_cmd(command, field_names):
7600 """
7601 creates a function matching to apply command to each record in records
7602 """
7603 function = """def func(records):
7604 '''%s
7605 '''
7606 _changed = 0
7607 for _rec in records:
7608 _tmp = dbf.create_template(_rec)
7609 %s
7610
7611 %s
7612
7613 %s
7614 if _tmp != _rec:
7615 dbf.gather(_rec, _tmp)
7616 _changed += 1
7617 return _changed"""
7618 fields = []
7619 for field in field_names:
7620 if field in command:
7621 fields.append(field)
7622 command = command.replace('recno()', 'recno(_rec)').replace('is_deleted()', 'is_deleted(_rec)')
7623 pre_fields = '\n '.join(['%s = _tmp.%s' % (field, field) for field in fields])
7624 post_fields = '\n '.join(['_tmp.%s = %s' % (field, field) for field in fields])
7625 g = pql_user_functions.copy()
7626 g['dbf'] = dbf
7627 g['recno'] = recno
7628 g['create_template'] = create_template
7629 g['gather'] = gather
7630 if ' with ' in command.lower():
7631 offset = command.lower().index(' with ')
7632 command = command[:offset] + ' = ' + command[offset + 6:]
7633 function %= (command, pre_fields, command, post_fields)
7634 exec function in g
7635 return g['func']
7636
7637 -def pql(records, command):
7638 """
7639 recognized pql commands are SELECT, UPDATE | REPLACE, DELETE, RECALL, ADD, DROP
7640 """
7641 close_table = False
7642 if isinstance(records, basestring):
7643 records = Table(records)
7644 close_table = True
7645 try:
7646 if not records:
7647 return List()
7648 pql_command = command
7649 if ' where ' in command:
7650 command, condition = command.split(' where ', 1)
7651 condition = pql_criteria(records, condition)
7652 else:
7653 def condition(records):
7654 return records[:]
7655 name, command = command.split(' ', 1)
7656 command = command.strip()
7657 name = name.lower()
7658 fields = field_names(records)
7659 if pql_functions.get(name) is None:
7660 raise DbfError('unknown SQL command: %s' % name.upper())
7661 result = pql_functions[name](records, command, condition, fields)
7662 tables = set()
7663 for record in result:
7664 tables.add(source_table(record))
7665 finally:
7666 if close_table:
7667 records.close()
7668 return result
7669
7670 pql_functions = {
7671 'select' : pql_select,
7672 'update' : pql_update,
7673 'replace': pql_update,
7674 'insert' : None,
7675 'delete' : pql_delete,
7676 'recall' : pql_recall,
7677 'add' : pql_add,
7678 'drop' : pql_drop,
7679 'count' : None,
7680 'pack' : pql_pack,
7681 'resize' : pql_resize,
7682 }
7683
7684
7685 -def _nop(value):
7686 """
7687 returns parameter unchanged
7688 """
7689 return value
7690
7692 """
7693 ensures each tuple is the same length, using filler[-missing] for the gaps
7694 """
7695 final = []
7696 for t in tuples:
7697 if len(t) < length:
7698 final.append( tuple([item for item in t] + filler[len(t)-length:]) )
7699 else:
7700 final.append(t)
7701 return tuple(final)
7702
7704 if cp not in code_pages:
7705 for code_page in sorted(code_pages.keys()):
7706 sd, ld = code_pages[code_page]
7707 if cp == sd or cp == ld:
7708 if sd is None:
7709 raise DbfError("Unsupported codepage: %s" % ld)
7710 cp = code_page
7711 break
7712 else:
7713 raise DbfError("Unsupported codepage: %s" % cp)
7714 sd, ld = code_pages[cp]
7715 return cp, sd, ld
7716
7721 """
7722 under development
7723 """
7724
7725 version = 'dBase IV w/memos (non-functional)'
7726 _versionabbr = 'db4'
7727
7728 @MutableDefault
7730 return {
7731 'C' : {'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':str, 'Init':add_vfp_character},
7732 'Y' : {'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':Decimal, 'Init':add_vfp_currency},
7733 'B' : {'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':float, 'Init':add_vfp_double},
7734 'F' : {'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':float, 'Init':add_vfp_numeric},
7735 'N' : {'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':int, 'Init':add_vfp_numeric},
7736 'I' : {'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':int, 'Init':add_vfp_integer},
7737 'L' : {'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':Logical, 'Init':add_logical},
7738 'D' : {'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':Date, 'Init':add_date},
7739 'T' : {'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':DateTime, 'Init':add_vfp_datetime},
7740 'M' : {'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':str, 'Init':add_memo},
7741 'G' : {'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':str, 'Init':add_memo},
7742 'P' : {'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':str, 'Init':add_memo},
7743 '0' : {'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':int, 'Init':None} }
7744
7745 _memoext = '.dbt'
7746 _memotypes = ('G', 'M', 'P')
7747 _memoClass = _VfpMemo
7748 _yesMemoMask = '\x8b'
7749 _noMemoMask = '\x04'
7750 _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
7751 _variable_fields = ('C', 'F', 'N')
7752 _binary_fields = ('G', 'P')
7753 _character_fields = ('C', 'M')
7754 _decimal_fields = ('F', 'N')
7755 _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
7756 _currency_fields = ('Y',)
7757 _supported_tables = ('\x04', '\x8b')
7758 _dbfTableHeader = ['\x00'] * 32
7759 _dbfTableHeader[0] = '\x8b'
7760 _dbfTableHeader[10] = '\x01'
7761 _dbfTableHeader[29] = '\x03'
7762 _dbfTableHeader = ''.join(_dbfTableHeader)
7763 _dbfTableHeaderExtra = ''
7764
7788
7797
7799 """
7800 marks record as deleted
7801 """
7802 template = isinstance(record, RecordTemplate)
7803 if not template and record._meta.status == CLOSED:
7804 raise DbfError("%s is closed; cannot delete record" % record._meta.filename)
7805 record_in_flux = not record._write_to_disk
7806 if not template and not record_in_flux:
7807 record._start_flux()
7808 try:
7809 record._data[0] = '*'
7810 if not template:
7811 record._dirty = True
7812 except:
7813 if not template and not record_in_flux:
7814 record._rollback_flux()
7815 raise
7816 if not template and not record_in_flux:
7817 record._commit_flux()
7818
7819 -def export(table_or_records, filename=None, field_names=None, format='csv', header=True, dialect='dbf', encoding=None):
7820 """
7821 writes the records using CSV or tab-delimited format, using the filename
7822 given if specified, otherwise the table name
7823 if table_or_records is a collection of records (not an actual table) they
7824 should all be of the same format
7825 """
7826 table = source_table(table_or_records[0])
7827 if filename is None:
7828 filename = table.filename
7829 if field_names is None:
7830 field_names = table.field_names
7831 if isinstance(field_names, basestring):
7832 field_names = [f.strip() for f in field_names.split(',')]
7833 format = format.lower()
7834 if format not in ('csv', 'tab', 'fixed'):
7835 raise DbfError("export format: csv, tab, or fixed -- not %s" % format)
7836 if format == 'fixed':
7837 format = 'txt'
7838 if encoding is None:
7839 encoding = table.codepage.name
7840 encoder = codecs.getencoder(encoding)
7841 if isinstance(field_names[0], unicode):
7842 header_names = [encoder(f) for f in field_names]
7843 else:
7844 header_names = field_names
7845 base, ext = os.path.splitext(filename)
7846 if ext.lower() in ('', '.dbf'):
7847 filename = base + "." + format
7848 try:
7849 if format == 'csv':
7850 fd = open(filename, 'wb')
7851 csvfile = csv.writer(fd, dialect=dialect)
7852 if header:
7853 csvfile.writerow(header_names)
7854 for record in table_or_records:
7855 fields = []
7856 for fieldname in field_names:
7857 data = record[fieldname]
7858 if isinstance(data, unicode):
7859 fields.append(encoder(data)[0])
7860 else:
7861 fields.append(data)
7862 csvfile.writerow(fields)
7863 elif format == 'tab':
7864 fd = open(filename, 'w')
7865 if header:
7866 fd.write('\t'.join(header_names) + '\n')
7867 for record in table_or_records:
7868 fields = []
7869 for fieldname in field_names:
7870 data = record[fieldname]
7871 if isinstance(data, unicode):
7872 fields.append(encoder(data)[0])
7873 else:
7874 fields.append(str(data))
7875 fd.write('\t'.join(fields) + '\n')
7876 else:
7877 fd = open(filename, 'w')
7878 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w')
7879 header.write("%-15s Size\n" % "Field Name")
7880 header.write("%-15s ----\n" % ("-" * 15))
7881 sizes = []
7882 for field in field_names:
7883 size = table.field_info(field).length
7884 sizes.append(size)
7885 field = encoder(field)[0]
7886 header.write("%-15s %3d\n" % (field, size))
7887 header.write('\nTotal Records in file: %d\n' % len(table_or_records))
7888 header.close()
7889 for record in table_or_records:
7890 fields = []
7891 for i, fieldname in enumerate(field_names):
7892 data = record[fieldname]
7893 if isinstance(data, unicode):
7894 fields.append("%-*s" % (sizes[i], encoder(data)[0]))
7895 else:
7896 fields.append("%-*s" % (sizes[i], data))
7897 fd.write(''.join(fields) + '\n')
7898 finally:
7899 fd.close()
7900 fd = None
7901 return len(table_or_records)
7902
7916
7918 """
7919 marked for deletion?
7920 """
7921 return record._data[0] == '*'
7922
7924 """
7925 physical record number
7926 """
7927 return record._recnum
7928
7929 -def reset(record, keep_fields=None):
7930 """
7931 sets record's fields back to original, except for fields in keep_fields
7932 """
7933 template = record_in_flux = False
7934 if isinstance(record, RecordTemplate):
7935 template = True
7936 else:
7937 record_in_flux = not record._write_to_disk
7938 if record._meta.status == CLOSED:
7939 raise DbfError("%s is closed; cannot modify record" % record._meta.filename)
7940 if keep_fields is None:
7941 keep_fields = []
7942 keep = {}
7943 for field in keep_fields:
7944 keep[field] = record[field]
7945 record._data[:] = record._meta.blankrecord[:]
7946 for field in keep_fields:
7947 record[field] = keep[field]
7948 if not template:
7949 if record._write_to_disk:
7950 record._write()
7951 else:
7952 record._dirty = True
7953
7955 """
7956 table associated with table | record | index
7957 """
7958 table = thingie._meta.table()
7959 if table is None:
7960 raise DbfError("table is no longer available")
7961 return table
7962
7964 """
7965 marks record as active
7966 """
7967 template = isinstance(record, RecordTemplate)
7968 if not template and record._meta.status == CLOSED:
7969 raise DbfError("%s is closed; cannot undelete record" % record._meta.filename)
7970 record_in_flux = not record._write_to_disk
7971 if not template and not record_in_flux:
7972 record._start_flux()
7973 try:
7974 record._data[0] = ' '
7975 if not template:
7976 record._dirty = True
7977 except:
7978 if not template and not record_in_flux:
7979 record._rollback_flux()
7980 raise
7981 if not template and not record_in_flux:
7982 record._commit_flux()
7983 -def write(record, **kwargs):
7995
7996 -def Process(records, start=0, stop=None, filter=None):
7997 """commits each record to disk before returning the next one; undoes all changes to that record if exception raised
7998 if records is a table, it will be opened and closed if necessary
7999 filter function should return True to skip record, False to keep"""
8000 already_open = True
8001 if isinstance(records, Table):
8002 already_open = records.status != CLOSED
8003 if not already_open:
8004 records.open()
8005 try:
8006 if stop is None:
8007 stop = len(records)
8008 for record in records[start:stop]:
8009 if filter is not None and filter(record):
8010 continue
8011 try:
8012 record._start_flux()
8013 yield record
8014 except:
8015 record._rollback_flux()
8016 raise
8017 else:
8018 record._commit_flux()
8019 finally:
8020 if not already_open:
8021 records.close()
8022
8023 -def Templates(records, start=0, stop=None, filter=None):
8024 """
8025 returns a template of each record instead of the record itself
8026 if records is a table, it will be opened and closed if necessary
8027 """
8028 already_open = True
8029 if isinstance(records, Table):
8030 already_open = records.status != CLOSED
8031 if not already_open:
8032 records.open()
8033 try:
8034 if stop is None:
8035 stop = len(records)
8036 for record in records[start:stop]:
8037 if filter is not None and filter(record):
8038 continue
8039 yield(create_template(record))
8040 finally:
8041 if not already_open:
8042 records.close()
8043
8045 """
8046 returns integers 0 - len(sequence)
8047 """
8048 for i in xrange(len(sequence)):
8049 yield i
8050
8061
8083
8085 """
8086 adds fields to an existing table
8087 """
8088 table = Table(table_name)
8089 table.open()
8090 try:
8091 table.add_fields(field_specs)
8092 finally:
8093 table.close()
8094
8096 """
8097 deletes fields from an existing table
8098 """
8099 table = Table(table_name)
8100 table.open()
8101 try:
8102 table.delete_fields(field_names)
8103 finally:
8104 table.close()
8105
8107 """
8108 prints the first record of a table
8109 """
8110 table = Table(table_name)
8111 table.open()
8112 try:
8113 print(str(table[0]))
8114 finally:
8115 table.close()
8116
8117 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None,
8118 dbf_type='db3', memo_size=64, min_field_size=1,
8119 encoding=None, errors=None):
8120 """
8121 creates a Character table from a csv file
8122 to_disk will create a table with the same name
8123 filename will be used if provided
8124 field_names default to f0, f1, f2, etc, unless specified (list)
8125 extra_fields can be used to add additional fields -- should be normal field specifiers (list)
8126 """
8127 with codecs.open(csvfile, 'r', encoding='latin-1', errors=errors) as fd:
8128 reader = csv.reader(fd)
8129 if field_names:
8130 if isinstance(field_names, basestring):
8131 field_names = field_names.split()
8132 if ' ' not in field_names[0]:
8133 field_names = ['%s M' % fn for fn in field_names]
8134 else:
8135 field_names = ['f0 M']
8136 if filename:
8137 to_disk = True
8138 else:
8139 filename = os.path.splitext(csvfile)[0]
8140 if to_disk:
8141 csv_table = Table(filename, [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding)
8142 else:
8143 csv_table = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding, on_disk=False)
8144 csv_table.open()
8145 fields_so_far = 1
8146 while reader:
8147 try:
8148 row = next(reader)
8149 except UnicodeEncodeError:
8150 row = ['']
8151 except StopIteration:
8152 break
8153 while fields_so_far < len(row):
8154 if fields_so_far == len(field_names):
8155 field_names.append('f%d M' % fields_so_far)
8156 csv_table.add_fields(field_names[fields_so_far])
8157 fields_so_far += 1
8158 csv_table.append(tuple(row))
8159 if extra_fields:
8160 csv_table.add_fields(extra_fields)
8161 csv_table.close()
8162 return csv_table
8163
8165 """
8166 returns the list of field names of a table
8167 """
8168 table = Table(table_name)
8169 return table.field_names
8170
8171 -def info(table_name):
8172 """
8173 prints table info
8174 """
8175 table = Table(table_name)
8176 print(str(table))
8177
8179 """
8180 renames a field in a table
8181 """
8182 table = Table(table_name)
8183 try:
8184 table.rename_field(oldfield, newfield)
8185 finally:
8186 table.close()
8187
8189 """
8190 returns the definition of a field (or all fields)
8191 """
8192 table = Table(table_name)
8193 return table.structure(field)
8194
8196 """
8197 just what it says ;)
8198 """
8199 for index, dummy in enumerate(records):
8200 chars = dummy._data
8201 print("%2d: " % (index,))
8202 for char in chars[1:]:
8203 print(" %2x " % (ord(char),))
8204 print()
8205
8206
8207
8208
8209 -def gather(record, data, drop=False):
8210 """
8211 saves data into a record's fields; writes to disk if not in flux
8212 keys with no matching field will raise a FieldMissingError
8213 exception unless drop_missing == True;
8214 if an Exception occurs the record is restored before reraising
8215 """
8216 if isinstance(record, Record) and record._meta.status == CLOSED:
8217 raise DbfError("%s is closed; cannot modify record" % record._meta.filename)
8218 record_in_flux = not record._write_to_disk
8219 if not record_in_flux:
8220 record._start_flux()
8221 try:
8222 record_fields = field_names(record)
8223 for key in field_names(data):
8224 value = data[key]
8225 if not key in record_fields:
8226 if drop:
8227 continue
8228 raise FieldMissingError(key)
8229 record[key] = value
8230 except:
8231 if not record_in_flux:
8232 record._rollback_flux()
8233 raise
8234 if not record_in_flux:
8235 record._commit_flux()
8236
8237 -def scan(table, direction='forward', filter=lambda rec: True):
8238 """
8239 moves record pointer forward 1; returns False if Eof/Bof reached
8240 table must be derived from _Navigation or have skip() method
8241 """
8242 if direction not in ('forward', 'reverse'):
8243 raise TypeError("direction should be 'forward' or 'reverse', not %r" % direction)
8244 if direction == 'forward':
8245 n = +1
8246 no_more_records = Eof
8247 else:
8248 n = -1
8249 no_more_records = Bof
8250 try:
8251 while True:
8252 table.skip(n)
8253 if filter(table.current_record):
8254 return True
8255 except no_more_records:
8256 return False
8257
8259 """
8260 returns as_type() of [fieldnames and] values.
8261 """
8262 if isinstance(as_type, types.FunctionType):
8263 return as_type(record)
8264 elif issubclass(as_type, _mappings):
8265 return as_type(zip(field_names(record), record))
8266 else:
8267 return as_type(record)
8268