1 """
2 =========
3 Copyright
4 =========
5 - Portions copyright: 2008-2012 Ad-Mail, Inc -- All rights reserved.
6 - Portions copyright: 2012-2013 Ethan Furman -- All rights reserved.
7 - Author: Ethan Furman
8 - Contact: ethan@stoneleaf.us
9
10 Redistribution and use in source and binary forms, with or without
11 modification, are permitted provided that the following conditions are met:
12 - Redistributions of source code must retain the above copyright
13 notice, this list of conditions and the following disclaimer.
14 - Redistributions in binary form must reproduce the above copyright
15 notice, this list of conditions and the following disclaimer in the
16 documentation and/or other materials provided with the distribution.
17 - Neither the name of Ad-Mail, Inc nor the
18 names of its contributors may be used to endorse or promote products
19 derived from this software without specific prior written permission.
20
21 THIS SOFTWARE IS PROVIDED ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
22 INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
23 AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
24 ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
27 OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
28 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
29 OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
30 ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 """
32
33 import codecs
34 import collections
35 import csv
36 import datetime
37 import os
38 import struct
39 import sys
40 import time
41 import weakref
42
43 from array import array
44 from bisect import bisect_left, bisect_right
45 from collections import defaultdict
46 from decimal import Decimal
47 from enum import Enum
48 from glob import glob
49 from math import floor
50 from os import SEEK_SET, SEEK_CUR, SEEK_END
51 import types
52
53 module = globals()
54
55 NoneType = type(None)
56
57
58
59 LOGICAL_BAD_IS_NONE = True
60
61
62 input_decoding = 'ascii'
63
64
65 default_codepage = 'ascii'
66
67
68 default_type = 'db3'
69
70 temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TMP") or os.environ.get("TEMP") or ""
71
72
73 _Template_Records = dict()
74
75
76 days_per_month = [31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31]
77 days_per_leap_month = [31, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31]
80 "adds 'export_to()' function"
81 @classmethod
83 namespace.update(cls.__members__)
84
87
89 "repr is in hex"
91 return '<%s.%s: %#02x>' % (
92 self.__class__.__name__,
93 self._name_,
94 self._value_,
95 )
96
98 "allows value aliases (not name aliases)"
99 - def __new__(cls, int_value, *value_aliases):
105
107 """
108 Automatically numbers enum members starting from __number__ (defaults to 0).
109
110 Includes support for a custom docstring per member.
111 """
112 __number__ = 0
113
121
123 """Can handle 0 or 1 argument; more requires a custom __init__.
124
125 0 = auto-number w/o docstring
126 1 = auto-number w/ docstring
127 2+ = needs custom __init__
128
129 """
130 if len(args) == 1 and isinstance(args[0], str):
131 self.__doc__ = args[0]
132 elif args:
133 raise TypeError('%s not dealt with -- need custom __init__' % (args,))
134
137 MONDAY = 1
138 TUESDAY = 2
139 WEDNESDAY = 3
140 THURSDAY = 4
141 FRIDAY = 5
142 SATURDAY = 6
143 SUNDAY = 7
144
146 """Return number of days needed to get from self to day."""
147 if self == day:
148 return 7
149 delta = day - self
150 if delta < 0:
151 delta += 7
152 return delta
153
155 """Return number of days needed to get from self to day."""
156 if self == day:
157 return -7
158 delta = day - self
159 if delta > 0:
160 delta -= 7
161 return delta
162
189 RelativeDay.export_to(module)
192 JANUARY = 1
193 FEBRUARY = 2
194 MARCH = 3
195 APRIL = 4
196 MAY = 5
197 JUNE = 6
198 JULY = 7
199 AUGUST = 8
200 SEPTEMBER = 9
201 OCTOBER = 10
202 NOVEMBER = 11
203 DECEMBER = 12
204
206 """Return number of months needed to get from self to month."""
207 if self == month:
208 return 12
209 delta = month - self
210 if delta < 0:
211 delta += 12
212 return delta
213
215 """Return number of months needed to get from self to month."""
216 if self == month:
217 return -12
218 delta = month - self
219 if delta > 0:
220 delta -= 12
221 return delta
222
259 RelativeMonth.export_to(module)
262 if year % 400 == 0:
263 return True
264 elif year % 100 == 0:
265 return False
266 elif year % 4 == 0:
267 return True
268 else:
269 return False
270
282 LatinByte.export_to(module)
316 FieldType.export_to(module)
319 @classmethod
321 alias = alias.lower()
322 if alias in ('system', ):
323 return cls.SYSTEM
324 elif alias in ('null', 'nullable'):
325 return cls.NULLABLE
326 elif alias in ('binary', 'nocptrans'):
327 return cls.BINARY
328 else:
329 raise ValueError('no FieldFlag %r' % alias)
330 @property
332 if self is NULLABLE:
333 return 'null'
334 else:
335 return self._name_.lower()
336 SYSTEM = 0x01
337 NULLABLE = 0x02
338 BINARY = 0x04
339 NOCPTRANS = 0x04
340
341 FieldFlag.export_to(module)
342
343 -class Field(AutoEnum):
344 __order__ = 'TYPE START LENGTH END DECIMALS FLAGS CLASS EMPTY NUL'
345 TYPE = "Char, Date, Logical, etc."
346 START = "Field offset in record"
347 LENGTH = "Length of field in record"
348 END = "End of field in record (exclusive)"
349 DECIMALS = "Number of decimal places if numeric"
350 FLAGS = "System, Binary, Nullable"
351 CLASS = "python class type"
352 EMPTY = "python function for empty field"
353 NUL = "python function for null field"
354 Field.export_to(module)
357 __order__ = 'IN_MEMORY ON_DISK'
358 IN_MEMORY = "dbf is kept in memory (disappears at program end)"
359 ON_DISK = "dbf is kept on disk"
360 DbfLocation.export_to(module)
367 DbfStatus.export_to(module)
370 """
371 doesn't create object until actually accessed
372 """
373
375 yo.fget = func
376 yo.__doc__ = doc or func.__doc__
377
380
382 if instance is None:
383 return yo
384 return yo.fget(instance)
385
388 """
389 Lives in the class, and on first access calls the supplied factory and
390 maps the result into the instance it was called on
391 """
392
394 self._name = func.__name__
395 self.func = func
396
399
400 - def __get__(self, instance, owner):
401 result = self.func()
402 if instance is not None:
403 setattr(instance, self._name, result)
404 return result
405
407 result = self.func()
408 return "MutableDefault(%r)" % (result, )
409
410
411 -def none(*args, **kwargs):
412 """
413 because we can't do `NoneType(*args, **kwargs)`
414 """
415 return None
416
421 """
422 Fatal errors elicit this response.
423 """
427
430 """
431 Data too large for field
432 """
433
434 - def __init__(self, message, data=None):
437
440 """
441 bad data in table
442 """
443
444 - def __init__(self, message, data=None):
447
450 """
451 Field does not exist in table
452 """
453
455 KeyError.__init__(self, '%s: no such field in table' % fieldname)
456 DbfError.__init__(self, '%s: no such field in table' % fieldname)
457 self.data = fieldname
458
461 """
462 invalid field specification
463 """
464
468
471 """
472 Data for table not in unicode
473 """
474
477
478
479 -class NotFoundError(DbfError, ValueError, KeyError, IndexError):
480 """
481 record criteria not met
482 """
483
484 - def __init__(self, message=None, data=None):
490
493 """
494 Normal operations elicit this response
495 """
496
497
498 -class Eof(DbfWarning, StopIteration):
499 """
500 End of file reached
501 """
502
503 message = 'End of file reached'
504
508
509
510 -class Bof(DbfWarning, StopIteration):
511 """
512 Beginning of file reached
513 """
514
515 message = 'Beginning of file reached'
516
520
523 """
524 Returned by indexing functions to suppress a record from becoming part of the index
525 """
526
527 message = 'Not indexing record'
528
531
532
533
534
535
536 Unknown = Other = object()
587
588 NullType.null = object.__new__(NullType)
589 Null = NullType()
593 """
594 used in Vapor Records -- compares unequal with everything
595 """
596
599
602
603 Vapor = Vapor()
604
605
606 -class Char(str):
607 """
608 Strips trailing whitespace, and ignores trailing whitespace for comparisons
609 """
610
612 if not isinstance(text, (basestring, cls)):
613 raise ValueError("Unable to automatically coerce %r to Char" % text)
614 result = str.__new__(cls, text.rstrip())
615 result.field_size = len(text)
616 return result
617
618 __hash__ = str.__hash__
619
621 """
622 ignores trailing whitespace
623 """
624 if not isinstance(other, (self.__class__, basestring)):
625 return NotImplemented
626 return str(self) == other.rstrip()
627
629 """
630 ignores trailing whitespace
631 """
632 if not isinstance(other, (self.__class__, basestring)):
633 return NotImplemented
634 return str(self) >= other.rstrip()
635
637 """
638 ignores trailing whitespace
639 """
640 if not isinstance(other, (self.__class__, basestring)):
641 return NotImplemented
642 return str(self) > other.rstrip()
643
645 """
646 ignores trailing whitespace
647 """
648 if not isinstance(other, (self.__class__, basestring)):
649 return NotImplemented
650 return str(self) <= other.rstrip()
651
653 """
654 ignores trailing whitespace
655 """
656 if not isinstance(other, (self.__class__, basestring)):
657 return NotImplemented
658 return str(self) < other.rstrip()
659
661 """
662 ignores trailing whitespace
663 """
664 if not isinstance(other, (self.__class__, basestring)):
665 return NotImplemented
666 return str(self) != other.rstrip()
667
669 """
670 ignores trailing whitespace
671 """
672 return bool(str(self))
673
675 result = self.__class__(str(self) + other)
676 result.field_size = self.field_size
677 return result
678
679 baseinteger = int
680 basestring = str, Char
683 """
684 adds null capable datetime.date constructs
685 """
686
687 __slots__ = ['_date']
688
689 - def __new__(cls, year=None, month=0, day=0):
706
708 if self and isinstance(other, (datetime.timedelta)):
709 return Date(self._date + other)
710 else:
711 return NotImplemented
712
714 if isinstance(other, self.__class__):
715 return self._date == other._date
716 if isinstance(other, datetime.date):
717 return self._date == other
718 if isinstance(other, type(None)):
719 return self._date is None
720 return NotImplemented
721
726
728 if name == '_date':
729 raise AttributeError('_date missing!')
730 elif self:
731 return getattr(self._date, name)
732 else:
733 raise AttributeError('NullDate object has no attribute %s' % name)
734
736 if isinstance(other, (datetime.date)):
737 return self._date >= other
738 elif isinstance(other, (Date)):
739 if other:
740 return self._date >= other._date
741 return False
742 return NotImplemented
743
745 if isinstance(other, (datetime.date)):
746 return self._date > other
747 elif isinstance(other, (Date)):
748 if other:
749 return self._date > other._date
750 return True
751 return NotImplemented
752
754 return hash(self._date)
755
757 if self:
758 if isinstance(other, (datetime.date)):
759 return self._date <= other
760 elif isinstance(other, (Date)):
761 if other:
762 return self._date <= other._date
763 return False
764 else:
765 if isinstance(other, (datetime.date)):
766 return True
767 elif isinstance(other, (Date)):
768 if other:
769 return True
770 return True
771 return NotImplemented
772
774 if self:
775 if isinstance(other, (datetime.date)):
776 return self._date < other
777 elif isinstance(other, (Date)):
778 if other:
779 return self._date < other._date
780 return False
781 else:
782 if isinstance(other, (datetime.date)):
783 return True
784 elif isinstance(other, (Date)):
785 if other:
786 return True
787 return False
788 return NotImplemented
789
791 if self:
792 if isinstance(other, (datetime.date)):
793 return self._date != other
794 elif isinstance(other, (Date)):
795 if other:
796 return self._date != other._date
797 return True
798 else:
799 if isinstance(other, (datetime.date)):
800 return True
801 elif isinstance(other, (Date)):
802 if other:
803 return True
804 return False
805 return NotImplemented
806
808 return self._date is not None
809
810 __radd__ = __add__
811
813 if self and isinstance(other, (datetime.date)):
814 return other - self._date
815 elif self and isinstance(other, (Date)):
816 return other._date - self._date
817 elif self and isinstance(other, (datetime.timedelta)):
818 return Date(other - self._date)
819 else:
820 return NotImplemented
821
823 if self:
824 return "Date(%d, %d, %d)" % self.timetuple()[:3]
825 else:
826 return "Date()"
827
829 if self:
830 return str(self._date)
831 return ""
832
834 if self and isinstance(other, (datetime.date)):
835 return self._date - other
836 elif self and isinstance(other, (Date)):
837 return self._date - other._date
838 elif self and isinstance(other, (datetime.timedelta)):
839 return Date(self._date - other)
840 else:
841 return NotImplemented
842
844 if self:
845 return self._date
846 return None
847
848 @classmethod
853
854 @classmethod
857
858 @classmethod
860 if yyyymmdd in ('', ' ', 'no date'):
861 return cls()
862 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
863
864 - def replace(self, year=None, month=None, day=None, delta_year=0, delta_month=0, delta_day=0):
865 if not self:
866 return self.__class__._null_date
867 old_year, old_month, old_day = self.timetuple()[:3]
868 if isinstance(month, RelativeMonth):
869 this_month = IsoMonth(old_month)
870 delta_month += month.months_from(this_month)
871 month = None
872 if isinstance(day, RelativeDay):
873 this_day = IsoDay(self.isoweekday())
874 delta_day += day.days_from(this_day)
875 day = None
876 year = (year or old_year) + delta_year
877 month = (month or old_month) + delta_month
878 day = (day or old_day) + delta_day
879 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
880 while not(0 < month < 13) or not (0 < day <= days_in_month[month]):
881 while month < 1:
882 year -= 1
883 month = 12 + month
884 while month > 12:
885 year += 1
886 month = month - 12
887 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
888 while day < 1:
889 month -= 1
890 day = days_in_month[month] + day
891 if not 0 < month < 13:
892 break
893 while day > days_in_month[month]:
894 day = day - days_in_month[month]
895 month += 1
896 if not 0 < month < 13:
897 break
898 return Date(year, month, day)
899
901 if self:
902 return self._date.strftime(format)
903 return ''
904
905 @classmethod
906 - def strptime(cls, date_string, format=None):
907 if format is not None:
908 return cls(*(time.strptime(date_string, format)[0:3]))
909 return cls(*(time.strptime(date_string, "%Y-%m-%d")[0:3]))
910
911 @classmethod
914
916 if self:
917 return "%04d%02d%02d" % self.timetuple()[:3]
918 else:
919 return ' '
920
921 Date.max = Date(datetime.date.max)
922 Date.min = Date(datetime.date.min)
923 Date._null_date = object.__new__(Date)
924 Date._null_date._date = None
925 NullDate = Date()
929 """
930 adds null capable datetime.datetime constructs
931 """
932
933 __slots__ = ['_datetime']
934
935 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsecond=0):
936 """year may be a datetime.datetime"""
937 if year is None or year is Null:
938 return cls._null_datetime
939 ndt = object.__new__(cls)
940 if isinstance(year, basestring):
941 return DateTime.strptime(year)
942 elif isinstance(year, DateTime):
943 ndt._datetime = year._datetime
944 elif isinstance(year, datetime.datetime):
945 microsecond = year.microsecond // 1000 * 1000
946 hour, minute, second = year.hour, year.minute, year.second
947 year, month, day = year.year, year.month, year.day
948 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond)
949 elif year is not None:
950 microsecond = microsecond // 1000 * 1000
951 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond)
952 return ndt
953
955 if self and isinstance(other, (datetime.timedelta)):
956 return DateTime(self._datetime + other)
957 else:
958 return NotImplemented
959
961 if isinstance(other, self.__class__):
962 return self._datetime == other._datetime
963 if isinstance(other, datetime.date):
964 return self._datetime == other
965 if isinstance(other, type(None)):
966 return self._datetime is None
967 return NotImplemented
968
973
975 if name == '_datetime':
976 raise AttributeError('_datetime missing!')
977 elif self:
978 return getattr(self._datetime, name)
979 else:
980 raise AttributeError('NullDateTime object has no attribute %s' % name)
981
983 if self:
984 if isinstance(other, (datetime.datetime)):
985 return self._datetime >= other
986 elif isinstance(other, (DateTime)):
987 if other:
988 return self._datetime >= other._datetime
989 return False
990 else:
991 if isinstance(other, (datetime.datetime)):
992 return False
993 elif isinstance(other, (DateTime)):
994 if other:
995 return False
996 return True
997 return NotImplemented
998
1000 if self:
1001 if isinstance(other, (datetime.datetime)):
1002 return self._datetime > other
1003 elif isinstance(other, (DateTime)):
1004 if other:
1005 return self._datetime > other._datetime
1006 return True
1007 else:
1008 if isinstance(other, (datetime.datetime)):
1009 return False
1010 elif isinstance(other, (DateTime)):
1011 if other:
1012 return False
1013 return False
1014 return NotImplemented
1015
1018
1020 if self:
1021 if isinstance(other, (datetime.datetime)):
1022 return self._datetime <= other
1023 elif isinstance(other, (DateTime)):
1024 if other:
1025 return self._datetime <= other._datetime
1026 return False
1027 else:
1028 if isinstance(other, (datetime.datetime)):
1029 return True
1030 elif isinstance(other, (DateTime)):
1031 if other:
1032 return True
1033 return True
1034 return NotImplemented
1035
1037 if self:
1038 if isinstance(other, (datetime.datetime)):
1039 return self._datetime < other
1040 elif isinstance(other, (DateTime)):
1041 if other:
1042 return self._datetime < other._datetime
1043 return False
1044 else:
1045 if isinstance(other, (datetime.datetime)):
1046 return True
1047 elif isinstance(other, (DateTime)):
1048 if other:
1049 return True
1050 return False
1051 return NotImplemented
1052
1054 if self:
1055 if isinstance(other, (datetime.datetime)):
1056 return self._datetime != other
1057 elif isinstance(other, (DateTime)):
1058 if other:
1059 return self._datetime != other._datetime
1060 return True
1061 else:
1062 if isinstance(other, (datetime.datetime)):
1063 return True
1064 elif isinstance(other, (DateTime)):
1065 if other:
1066 return True
1067 return False
1068 return NotImplemented
1069
1072
1073 __radd__ = __add__
1074
1084
1086 if self:
1087 return "DateTime(%5d, %2d, %2d, %2d, %2d, %2d, %2d)" % (
1088 self._datetime.timetuple()[:6] + (self._datetime.microsecond, )
1089 )
1090 else:
1091 return "DateTime()"
1092
1094 if self:
1095 return str(self._datetime)
1096 return ""
1097
1107
1108 @classmethod
1113
1115 if self:
1116 return Date(self.year, self.month, self.day)
1117 return Date()
1118
1120 if self:
1121 return self._datetime
1122 return None
1123
1124 @classmethod
1130
1131 @classmethod
1134
1135 @classmethod
1138
1139 - def replace(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None,
1140 delta_year=0, delta_month=0, delta_day=0, delta_hour=0, delta_minute=0, delta_second=0):
1141 if not self:
1142 return self.__class__._null_datetime
1143 old_year, old_month, old_day, old_hour, old_minute, old_second, old_micro = self.timetuple()[:7]
1144 if isinstance(month, RelativeMonth):
1145 this_month = IsoMonth(old_month)
1146 delta_month += month.months_from(this_month)
1147 month = None
1148 if isinstance(day, RelativeDay):
1149 this_day = IsoDay(self.isoweekday())
1150 delta_day += day.days_from(this_day)
1151 day = None
1152 year = (year or old_year) + delta_year
1153 month = (month or old_month) + delta_month
1154 day = (day or old_day) + delta_day
1155 hour = (hour or old_hour) + delta_hour
1156 minute = (minute or old_minute) + delta_minute
1157 second = (second or old_second) + delta_second
1158 microsecond = microsecond or old_micro
1159 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
1160 while ( not (0 < month < 13)
1161 or not (0 < day <= days_in_month[month])
1162 or not (0 <= hour < 24)
1163 or not (0 <= minute < 60)
1164 or not (0 <= second < 60)
1165 ):
1166 while month < 1:
1167 year -= 1
1168 month = 12 + month
1169 while month > 12:
1170 year += 1
1171 month = month - 12
1172 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)]
1173 while day < 1:
1174 month -= 1
1175 day = days_in_month[month] + day
1176 if not 0 < month < 13:
1177 break
1178 while day > days_in_month[month]:
1179 day = day - days_in_month[month]
1180 month += 1
1181 if not 0 < month < 13:
1182 break
1183 while hour < 1:
1184 day -= 1
1185 hour = 24 + hour
1186 while hour > 23:
1187 day += 1
1188 hour = hour - 24
1189 while minute < 0:
1190 hour -= 1
1191 minute = 60 + minute
1192 while minute > 59:
1193 hour += 1
1194 minute = minute - 60
1195 while second < 0:
1196 minute -= 1
1197 second = 60 + second
1198 while second > 59:
1199 minute += 1
1200 second = second - 60
1201 return DateTime(year, month, day, hour, minute, second, microsecond)
1202
1207
1208 @classmethod
1209 - def strptime(cls, datetime_string, format=None):
1216
1218 if self:
1219 return Time(self.hour, self.minute, self.second, self.microsecond)
1220 return Time()
1221
1222 @classmethod
1225
1226 @classmethod
1229
1230 DateTime.max = DateTime(datetime.datetime.max)
1231 DateTime.min = DateTime(datetime.datetime.min)
1232 DateTime._null_datetime = object.__new__(DateTime)
1233 DateTime._null_datetime._datetime = None
1234 NullDateTime = DateTime()
1235
1236
1237 -class Time:
1238 """
1239 adds null capable datetime.time constructs
1240 """
1241
1242 __slots__ = ['_time']
1243
1244 - def __new__(cls, hour=None, minute=0, second=0, microsecond=0):
1245 """
1246 hour may be a datetime.time or a str(Time)
1247 """
1248 if hour is None or hour is Null:
1249 return cls._null_time
1250 nt = object.__new__(cls)
1251 if isinstance(hour, basestring):
1252 hour = Time.strptime(hour)
1253 if isinstance(hour, Time):
1254 nt._time = hour._time
1255 elif isinstance(hour, (datetime.time)):
1256 microsecond = hour.microsecond // 1000 * 1000
1257 hour, minute, second = hour.hour, hour.minute, hour.second
1258 nt._time = datetime.time(hour, minute, second, microsecond)
1259 elif hour is not None:
1260 microsecond = microsecond // 1000 * 1000
1261 nt._time = datetime.time(hour, minute, second, microsecond)
1262 return nt
1263
1264
1266 if self and isinstance(other, (datetime.timedelta)):
1267 t = self._time
1268 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1269 t += other
1270 return Time(t.hour, t.minute, t.second, t.microsecond)
1271 else:
1272 return NotImplemented
1273
1275 if isinstance(other, self.__class__):
1276 return self._time == other._time
1277 if isinstance(other, datetime.time):
1278 return self._time == other
1279 if isinstance(other, type(None)):
1280 return self._time is None
1281 return NotImplemented
1282
1287
1289 if name == '_time':
1290 raise AttributeError('_time missing!')
1291 elif self:
1292 return getattr(self._time, name)
1293 else:
1294 raise AttributeError('NullTime object has no attribute %s' % name)
1295
1297 if self:
1298 if isinstance(other, (datetime.time)):
1299 return self._time >= other
1300 elif isinstance(other, (Time)):
1301 if other:
1302 return self._time >= other._time
1303 return False
1304 else:
1305 if isinstance(other, (datetime.time)):
1306 return False
1307 elif isinstance(other, (Time)):
1308 if other:
1309 return False
1310 return True
1311 return NotImplemented
1312
1314 if self:
1315 if isinstance(other, (datetime.time)):
1316 return self._time > other
1317 elif isinstance(other, (DateTime)):
1318 if other:
1319 return self._time > other._time
1320 return True
1321 else:
1322 if isinstance(other, (datetime.time)):
1323 return False
1324 elif isinstance(other, (Time)):
1325 if other:
1326 return False
1327 return False
1328 return NotImplemented
1329
1332
1334 if self:
1335 if isinstance(other, (datetime.time)):
1336 return self._time <= other
1337 elif isinstance(other, (Time)):
1338 if other:
1339 return self._time <= other._time
1340 return False
1341 else:
1342 if isinstance(other, (datetime.time)):
1343 return True
1344 elif isinstance(other, (Time)):
1345 if other:
1346 return True
1347 return True
1348 return NotImplemented
1349
1351 if self:
1352 if isinstance(other, (datetime.time)):
1353 return self._time < other
1354 elif isinstance(other, (Time)):
1355 if other:
1356 return self._time < other._time
1357 return False
1358 else:
1359 if isinstance(other, (datetime.time)):
1360 return True
1361 elif isinstance(other, (Time)):
1362 if other:
1363 return True
1364 return False
1365 return NotImplemented
1366
1368 if self:
1369 if isinstance(other, (datetime.time)):
1370 return self._time != other
1371 elif isinstance(other, (Time)):
1372 if other:
1373 return self._time != other._time
1374 return True
1375 else:
1376 if isinstance(other, (datetime.time)):
1377 return True
1378 elif isinstance(other, (Time)):
1379 if other:
1380 return True
1381 return False
1382 return NotImplemented
1383
1385 return self._time is not None
1386
1387 __radd__ = __add__
1388
1390 if self and isinstance(other, (Time, datetime.time)):
1391 t = self._time
1392 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1393 other = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond)
1394 other -= t
1395 return other
1396 else:
1397 return NotImplemented
1398
1400 if self:
1401 return "Time(%d, %d, %d, %d)" % (self.hour, self.minute, self.second, self.microsecond)
1402 else:
1403 return "Time()"
1404
1406 if self:
1407 return str(self._time)
1408 return ""
1409
1411 if self and isinstance(other, (Time, datetime.time)):
1412 t = self._time
1413 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1414 o = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond)
1415 return t - o
1416 elif self and isinstance(other, (datetime.timedelta)):
1417 t = self._time
1418 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond)
1419 t -= other
1420 return Time(t.hour, t.minute, t.second, t.microsecond)
1421 else:
1422 return NotImplemented
1423
1424 @classmethod
1426 "2.5 == 2 hours, 30 minutes, 0 seconds, 0 microseconds"
1427 if num < 0:
1428 raise ValueError("positive value required (got %r)" % num)
1429 if num == 0:
1430 return Time(0)
1431 hours = int(num)
1432 if hours:
1433 num = num % hours
1434 minutes = int(num * 60)
1435 if minutes:
1436 num = num * 60 % minutes
1437 else:
1438 num = num * 60
1439 seconds = int(num * 60)
1440 if seconds:
1441 num = num * 60 % seconds
1442 else:
1443 num = num * 60
1444 microseconds = int(num * 1000)
1445 return Time(hours, minutes, seconds, microseconds)
1446
1447 @staticmethod
1450
1451 - def replace(self, hour=None, minute=None, second=None, microsecond=None, delta_hour=0, delta_minute=0, delta_second=0):
1452 if not self:
1453 return self.__class__._null_time
1454 old_hour, old_minute, old_second, old_micro = self.hour, self.minute, self.second, self.microsecond
1455 hour = (hour or old_hour) + delta_hour
1456 minute = (minute or old_minute) + delta_minute
1457 second = (second or old_second) + delta_second
1458 microsecond = microsecond or old_micro
1459 while not (0 <= hour < 24) or not (0 <= minute < 60) or not (0 <= second < 60):
1460 while second < 0:
1461 minute -= 1
1462 second = 60 + second
1463 while second > 59:
1464 minute += 1
1465 second = second - 60
1466 while minute < 0:
1467 hour -= 1
1468 minute = 60 + minute
1469 while minute > 59:
1470 hour += 1
1471 minute = minute - 60
1472 while hour < 1:
1473 hour = 24 + hour
1474 while hour > 23:
1475 hour = hour - 24
1476 return Time(hour, minute, second, microsecond)
1477
1479 if self:
1480 return self._time.strftime(format)
1481 return ''
1482
1483 @classmethod
1484 - def strptime(cls, time_string, format=None):
1491
1493 if self:
1494 return self._time
1495 return None
1496
1498 "returns Time as a float"
1499 hour = self.hour
1500 minute = self.minute * (1.0 / 60)
1501 second = self.second * (1.0 / 3600)
1502 microsecond = self.microsecond * (1.0 / 3600000)
1503 return hour + minute + second + microsecond
1504
1505 Time.max = Time(datetime.time.max)
1506 Time.min = Time(datetime.time.min)
1507 Time._null_time = object.__new__(Time)
1508 Time._null_time._time = None
1509 NullTime = Time()
1513 "for matching various time ranges"
1514
1515 - def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None):
1516 params = vars()
1517 self._mask = {}
1518 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'):
1519 value = params[attr]
1520 if value is not None:
1521 self._mask[attr] = value
1522
1524 if not self._mask:
1525 return True
1526 for attr, value in self._mask.items():
1527 other_value = getattr(other, attr, None)
1528 try:
1529 if other_value == value or other_value in value:
1530 continue
1531 except TypeError:
1532 pass
1533 return False
1534 return True
1535
1537 items = []
1538 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'):
1539 if attr in self._mask:
1540 items.append('%s=%s' % (attr, self._mask[attr]))
1541 return "Period(%s)" % ', '.join(items)
1542
1545 """
1546 Logical field return type.
1547
1548 Accepts values of True, False, or None/Null.
1549 boolean value of Unknown is False (use Quantum if you want an exception instead.
1550 """
1551
1553 if value is None or value is Null or value is Other or value is Unknown:
1554 return cls.unknown
1555 elif isinstance(value, basestring):
1556 if value.lower() in ('t', 'true', 'y', 'yes', 'on'):
1557 return cls.true
1558 elif value.lower() in ('f', 'false', 'n', 'no', 'off'):
1559 return cls.false
1560 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''):
1561 return cls.unknown
1562 else:
1563 raise ValueError('unknown value for Logical: %s' % value)
1564 else:
1565 return (cls.false, cls.true)[bool(value)]
1566
1568 if isinstance(y, type(None)) or y is Unknown or x is Unknown:
1569 return Unknown
1570 try:
1571 i = int(y)
1572 except Exception:
1573 return NotImplemented
1574 return int(x) + i
1575
1576 __radd__ = __iadd__ = __add__
1577
1579 if isinstance(y, type(None)) or y is Unknown or x is Unknown:
1580 return Unknown
1581 try:
1582 i = int(y)
1583 except Exception:
1584 return NotImplemented
1585 return int(x) - i
1586
1587 __isub__ = __sub__
1588
1590 if isinstance(x, type(None)) or x is Unknown or y is Unknown:
1591 return Unknown
1592 try:
1593 i = int(x)
1594 except Exception:
1595 return NotImplemented
1596 return i - int(y)
1597
1599 if x == 0 or y == 0:
1600 return 0
1601 elif isinstance(y, type(None)) or y is Unknown or x is Unknown:
1602 return Unknown
1603 try:
1604 i = int(y)
1605 except Exception:
1606 return NotImplemented
1607 return int(x) * i
1608
1609 __rmul__ = __imul__ = __mul__
1610
1612 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1613 return Unknown
1614 try:
1615 i = int(y)
1616 except Exception:
1617 return NotImplemented
1618 return int(x).__div__(i)
1619
1620 __idiv__ = __div__
1621
1623 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown:
1624 return Unknown
1625 try:
1626 i = int(x)
1627 except Exception:
1628 return NotImplemented
1629 return i.__div__(int(y))
1630
1632 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1633 return Unknown
1634 try:
1635 i = int(y)
1636 except Exception:
1637 return NotImplemented
1638 return int(x).__truediv__(i)
1639
1640 __itruediv__ = __truediv__
1641
1643 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1644 return Unknown
1645 try:
1646 i = int(x)
1647 except Exception:
1648 return NotImplemented
1649 return i.__truediv__(int(y))
1650
1652 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1653 return Unknown
1654 try:
1655 i = int(y)
1656 except Exception:
1657 return NotImplemented
1658 return int(x).__floordiv__(i)
1659
1660 __ifloordiv__ = __floordiv__
1661
1663 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1664 return Unknown
1665 try:
1666 i = int(x)
1667 except Exception:
1668 return NotImplemented
1669 return i.__floordiv__(int(y))
1670
1672 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1673 return (Unknown, Unknown)
1674 try:
1675 i = int(y)
1676 except Exception:
1677 return NotImplemented
1678 return divmod(int(x), i)
1679
1681 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown:
1682 return (Unknown, Unknown)
1683 try:
1684 i = int(x)
1685 except Exception:
1686 return NotImplemented
1687 return divmod(i, int(y))
1688
1690 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown:
1691 return Unknown
1692 try:
1693 i = int(y)
1694 except Exception:
1695 return NotImplemented
1696 return int(x) % i
1697
1698 __imod__ = __mod__
1699
1701 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown:
1702 return Unknown
1703 try:
1704 i = int(x)
1705 except Exception:
1706 return NotImplemented
1707 return i % int(y)
1708
1710 if not isinstance(y, (x.__class__, bool, type(None), baseinteger)):
1711 return NotImplemented
1712 if isinstance(y, type(None)) or y is Unknown:
1713 return Unknown
1714 i = int(y)
1715 if i == 0:
1716 return 1
1717 if x is Unknown:
1718 return Unknown
1719 return int(x) ** i
1720
1721 __ipow__ = __pow__
1722
1724 if not isinstance(x, (y.__class__, bool, type(None), baseinteger)):
1725 return NotImplemented
1726 if y is Unknown:
1727 return Unknown
1728 i = int(y)
1729 if i == 0:
1730 return 1
1731 if x is Unknown or isinstance(x, type(None)):
1732 return Unknown
1733 return int(x) ** i
1734
1739
1740 __ilshift__ = __lshift__
1741
1746
1751
1752 __irshift__ = __rshift__
1753
1758
1764
1770
1775
1780
1782 if x.value is None:
1783 raise ValueError("unable to return complex() of %r" % x)
1784 return complex(x.value)
1785
1787 if x.value is None:
1788 raise ValueError("unable to return int() of %r" % x)
1789 return int(x.value)
1790
1792 if x.value is None:
1793 raise ValueError("unable to return float() of %r" % x)
1794 return float(x.value)
1795
1797 """
1798 AND (conjunction) x & y:
1799 True iff both x, y are True
1800 False iff at least one of x, y is False
1801 Unknown otherwise
1802 """
1803 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)):
1804 if x == 0 or y == 0:
1805 return 0
1806 elif x is Unknown or y is Unknown:
1807 return Unknown
1808 return int(x) & int(y)
1809 elif x in (False, Falsth) or y in (False, Falsth):
1810 return Falsth
1811 elif x in (True, Truth) and y in (True, Truth):
1812 return Truth
1813 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1814 return Unknown
1815 return NotImplemented
1816
1817 __rand__ = __and__
1818
1820 "OR (disjunction): x | y => True iff at least one of x, y is True"
1821 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)):
1822 if x is Unknown or y is Unknown:
1823 return Unknown
1824 return int(x) | int(y)
1825 elif x in (True, Truth) or y in (True, Truth):
1826 return Truth
1827 elif x in (False, Falsth) and y in (False, Falsth):
1828 return Falsth
1829 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1830 return Unknown
1831 return NotImplemented
1832
1833 __ror__ = __or__
1834
1836 "XOR (parity) x ^ y: True iff only one of x,y is True"
1837 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)):
1838 if x is Unknown or y is Unknown:
1839 return Unknown
1840 return int(x) ^ int(y)
1841 elif x in (True, Truth, False, Falsth) and y in (True, Truth, False, Falsth):
1842 return {
1843 (True, True) : Falsth,
1844 (True, False) : Truth,
1845 (False, True) : Truth,
1846 (False, False): Falsth,
1847 }[(x, y)]
1848 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown:
1849 return Unknown
1850 return NotImplemented
1851
1852 __rxor__ = __xor__
1853
1855 "boolean value of Unknown is assumed False"
1856 return x.value is True
1857
1859 if isinstance(y, x.__class__):
1860 return x.value == y.value
1861 elif isinstance(y, (bool, NoneType, baseinteger)):
1862 return x.value == y
1863 return NotImplemented
1864
1866 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1867 return x.value == None
1868 elif isinstance(y, x.__class__):
1869 return x.value >= y.value
1870 elif isinstance(y, (bool, baseinteger)):
1871 return x.value >= y
1872 return NotImplemented
1873
1875 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1876 return False
1877 elif isinstance(y, x.__class__):
1878 return x.value > y.value
1879 elif isinstance(y, (bool, baseinteger)):
1880 return x.value > y
1881 return NotImplemented
1882
1884 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1885 return x.value == None
1886 elif isinstance(y, x.__class__):
1887 return x.value <= y.value
1888 elif isinstance(y, (bool, baseinteger)):
1889 return x.value <= y
1890 return NotImplemented
1891
1893 if isinstance(y, type(None)) or x is Unknown or y is Unknown:
1894 return False
1895 elif isinstance(y, x.__class__):
1896 return x.value < y.value
1897 elif isinstance(y, (bool, baseinteger)):
1898 return x.value < y
1899 return NotImplemented
1900
1902 if isinstance(y, x.__class__):
1903 return x.value != y.value
1904 elif isinstance(y, (bool, type(None), baseinteger)):
1905 return x.value != y
1906 return NotImplemented
1907
1909 return hash(x.value)
1910
1912 if x.value is None:
1913 raise ValueError("unable to return int() of %r" % x)
1914 return int(x.value)
1915
1917 return "Logical(%r)" % x.string
1918
1921
1922 Logical.true = object.__new__(Logical)
1923 Logical.true.value = True
1924 Logical.true.string = 'T'
1925 Logical.false = object.__new__(Logical)
1926 Logical.false.value = False
1927 Logical.false.string = 'F'
1928 Logical.unknown = object.__new__(Logical)
1929 Logical.unknown.value = None
1930 Logical.unknown.string = '?'
1931 Truth = Logical(True)
1932 Falsth = Logical(False)
1933 Unknown = Logical()
1937 """
1938 Logical field return type that implements boolean algebra
1939
1940 Accepts values of True/On, False/Off, or None/Null/Unknown/Other
1941 """
1942
1944 if value is None or value is Null or value is Other or value is Unknown:
1945 return cls.unknown
1946 elif isinstance(value, basestring):
1947 if value.lower() in ('t', 'true', 'y', 'yes', 'on'):
1948 return cls.true
1949 elif value.lower() in ('f', 'false', 'n', 'no', 'off'):
1950 return cls.false
1951 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''):
1952 return cls.unknown
1953 else:
1954 raise ValueError('unknown value for Quantum: %s' % value)
1955 else:
1956 return (cls.false, cls.true)[bool(value)]
1957
1959 "OR (disjunction): x | y => True iff at least one of x, y is True"
1960 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
1961 return NotImplemented
1962 if x.value is True or y is not Other and y == True:
1963 return x.true
1964 elif x.value is False and y is not Other and y == False:
1965 return x.false
1966 return Other
1967
1969 "IMP (material implication) x >> y => False iff x == True and y == False"
1970 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
1971 return NotImplemented
1972 if (x.value is False
1973 or (x.value is True and y is not Other and y == True)):
1974 return x.true
1975 elif x.value is True and y is not Other and y == False:
1976 return False
1977 return Other
1978
1980 "IMP (material implication) x >> y => False iff x = True and y = False"
1981 if not isinstance(x, (y.__class__, bool, NullType, type(None))):
1982 return NotImplemented
1983 if (x is not Other and x == False
1984 or (x is not Other and x == True and y.value is True)):
1985 return y.true
1986 elif x is not Other and x == True and y.value is False:
1987 return y.false
1988 return Other
1989
1991 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if x is False"
1992 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
1993 return NotImplemented
1994 if x.value is True and y is not Other and y == True:
1995 return x.true
1996 if x.value is True and y is not Other and y == False:
1997 return x.false
1998 return Other
1999
2001 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if y is False"
2002 if not isinstance(x, (y.__class__, bool, NullType, type(None))):
2003 return NotImplemented
2004 if x is not Other and x == True and y.value is True:
2005 return y.true
2006 if x is not Other and x == True and y.value is False:
2007 return y.false
2008 return Other
2009
2011 "NAND (negative AND) x.D(y): False iff x and y are both True"
2012 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2013 return NotImplemented
2014 if x.value is False or y is not Other and y == False:
2015 return x.true
2016 elif x.value is True and y is not Other and y == True:
2017 return x.false
2018 return Other
2019
2021 "EQV (equivalence) x.E(y): True iff x and y are the same"
2022 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2023 return NotImplemented
2024 elif (
2025 (x.value is True and y is not Other and y == True)
2026 or
2027 (x.value is False and y is not Other and y == False)
2028 ):
2029 return x.true
2030 elif (
2031 (x.value is True and y is not Other and y == False)
2032 or
2033 (x.value is False and y is not Other and y == True)
2034 ):
2035 return x.false
2036 return Other
2037
2039 "XOR (parity) x ^ y: True iff only one of x,y is True"
2040 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2041 return NotImplemented
2042 if (
2043 (x.value is True and y is not Other and y == False)
2044 or
2045 (x.value is False and y is not Other and y == True)
2046 ):
2047 return x.true
2048 if (
2049 (x.value is False and y is not Other and y == False)
2050 or
2051 (x.value is True and y is not Other and y == True)
2052 ):
2053 return x.false
2054 return Other
2055
2057 "AND (conjunction) x & y: True iff both x, y are True"
2058 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2059 return NotImplemented
2060 if x.value is True and y is not Other and y == True:
2061 return x.true
2062 elif x.value is False or y is not Other and y == False:
2063 return x.false
2064 return Other
2065
2067 "NEG (negation) -x: True iff x = False"
2068 if x is x.true:
2069 return x.false
2070 elif x is x.false:
2071 return x.true
2072 return Other
2073
2074 @classmethod
2087
2089 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2090 return NotImplemented
2091 if (
2092 (x.value is True and y is not Other and y == True)
2093 or
2094 (x.value is False and y is not Other and y == False)
2095 ):
2096 return x.true
2097 elif (
2098 (x.value is True and y is not Other and y == False)
2099 or
2100 (x.value is False and y is not Other and y == True)
2101 ):
2102 return x.false
2103 return Other
2104
2106 return hash(x.value)
2107
2109 if not isinstance(y, (x.__class__, bool, NullType, type(None))):
2110 return NotImplemented
2111 if (
2112 (x.value is True and y is not Other and y == False)
2113 or
2114 (x.value is False and y is not Other and y == True)
2115 ):
2116 return x.true
2117 elif (
2118 (x.value is True and y is not Other and y == True)
2119 or
2120 (x.value is False and y is not Other and y == False)
2121 ):
2122 return x.false
2123 return Other
2124
2126 if x is Other:
2127 raise TypeError('True/False value of %r is unknown' % x)
2128 return x.value is True
2129
2131 return "Quantum(%r)" % x.string
2132
2135
2136 __add__ = A
2137 __and__ = K
2138 __mul__ = K
2139 __neg__ = N
2140 __or__ = A
2141 __radd__ = A
2142 __rand__ = K
2143 __rshift__ = None
2144 __rmul__ = K
2145 __ror__ = A
2146 __rrshift__ = None
2147 __rxor__ = J
2148 __xor__ = J
2149
2150 Quantum.true = object.__new__(Quantum)
2151 Quantum.true.value = True
2152 Quantum.true.string = 'Y'
2153 Quantum.false = object.__new__(Quantum)
2154 Quantum.false.value = False
2155 Quantum.false.string = 'N'
2156 Quantum.unknown = object.__new__(Quantum)
2157 Quantum.unknown.value = None
2158 Quantum.unknown.string = '?'
2159 Quantum.set_implication('material')
2160 On = Quantum(True)
2161 Off = Quantum(False)
2162 Other = Quantum()
2163
2164
2165
2166 from xmlrpc.client import Marshaller
2167 Marshaller.dispatch[Char] = Marshaller.dump_unicode
2168 Marshaller.dispatch[Logical] = Marshaller.dump_bool
2169 Marshaller.dispatch[DateTime] = Marshaller.dump_datetime
2170 del Marshaller
2175 """
2176 Navigation base class that provides VPFish movement methods
2177 """
2178
2179 _index = -1
2180
2182 """
2183 implemented by subclass; must return True if underlying structure meets need
2184 """
2185 raise NotImplementedError()
2186
2187 - def _get_index(self, direction, n=1, start=None):
2188 """
2189 returns index of next available record towards direction
2190 """
2191 if start is not None:
2192 index = start
2193 else:
2194 index = self._index
2195 if direction == 'reverse':
2196 move = -1 * n
2197 limit = 0
2198 index += move
2199 if index < limit:
2200 return -1
2201 else:
2202 return index
2203 elif direction == 'forward':
2204 move = +1 * n
2205 limit = len(self) - 1
2206 index += move
2207 if index > limit:
2208 return len(self)
2209 else:
2210 return index
2211 else:
2212 raise ValueError("direction should be 'forward' or 'reverse', not %r" % direction)
2213
2214 @property
2216 """
2217 returns True if no more usable records towards the beginning of the table
2218 """
2219 self._nav_check()
2220 index = self._get_index('reverse')
2221 return index == -1
2222
2224 """
2225 sets record index to bottom of table (end of table)
2226 """
2227 self._nav_check()
2228 self._index = len(self)
2229 return self._index
2230
2231 @property
2243
2244 @property
2246 """
2247 returns current index
2248 """
2249 self._nav_check()
2250 return self._index
2251
2252 @property
2254 """
2255 returns True if no more usable records towards the end of the table
2256 """
2257 self._nav_check()
2258 index = self._get_index('forward')
2259 return index == len(self)
2260
2261 @property
2272
2273 - def goto(self, where):
2274 """
2275 changes the record pointer to the first matching (deleted) record
2276 where should be either an integer, or 'top' or 'bottom'.
2277 top -> before first record
2278 bottom -> after last record
2279 """
2280 self._nav_check()
2281 max = len(self)
2282 if isinstance(where, baseinteger):
2283 if not -max <= where < max:
2284 raise IndexError("Record %d does not exist" % where)
2285 if where < 0:
2286 where += max
2287 self._index = where
2288 return self._index
2289 move = getattr(self, where, None)
2290 if move is None:
2291 raise DbfError("unable to go to %r" % where)
2292 return move()
2293
2294 @property
2305
2306 @property
2317
2318 @property
2329
2330 - def skip(self, n=1):
2331 """
2332 move index to the next nth available record
2333 """
2334 self._nav_check()
2335 if n < 0:
2336 n *= -1
2337 direction = 'reverse'
2338 else:
2339 direction = 'forward'
2340 self._index = index = self._get_index(direction, n)
2341 if index < 0:
2342 raise Bof()
2343 elif index >= len(self):
2344 raise Eof()
2345 else:
2346 return index
2347
2349 """
2350 sets record index to top of table (beginning of table)
2351 """
2352 self._nav_check()
2353 self._index = -1
2354 return self._index
2355
2358 """
2359 Provides routines to extract and save data within the fields of a
2360 dbf record.
2361 """
2362
2363 __slots__ = ('_recnum', '_meta', '_data', '_old_data', '_dirty',
2364 '_memos', '_write_to_disk', '__weakref__')
2365
2366 - def __new__(cls, recnum, layout, kamikaze=b'', _fromdisk=False):
2367 """
2368 record = ascii array of entire record;
2369 layout=record specification;
2370 memo = memo object for table
2371 """
2372 record = object.__new__(cls)
2373 record._dirty = False
2374 record._recnum = recnum
2375 record._meta = layout
2376 record._memos = {}
2377 record._write_to_disk = True
2378 record._old_data = None
2379 header = layout.header
2380 record._data = layout.blankrecord[:]
2381 if kamikaze and len(record._data) != len(kamikaze):
2382 raise BadDataError("record data is not the correct length (should be %r, not %r)" %
2383 (len(record._data), len(kamikaze)), data=kamikaze[:])
2384 if recnum == -1:
2385 return record
2386 elif type(kamikaze) == array:
2387 record._data = kamikaze[:]
2388 elif type(kamikaze) == bytes:
2389 if kamikaze:
2390 record._data = array('B', kamikaze)
2391 else:
2392 raise BadDataError("%r recieved for record data" % kamikaze)
2393 if record._data[0] == NULL:
2394 record._data[0] = SPACE
2395 if record._data[0] not in (SPACE, ASTERISK):
2396 raise DbfError("record data not correct -- first character should be a ' ' or a '*'.")
2397 if not _fromdisk and layout.location == ON_DISK:
2398 record._update_disk()
2399 return record
2400
2402 for field in self._meta.user_fields:
2403 if self[field] == value:
2404 return True
2405 return False
2406
2412
2414 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2415 return NotImplemented
2416 if isinstance(other, (Record, RecordTemplate)):
2417 if field_names(self) != field_names(other):
2418 return False
2419 for field in self._meta.user_fields:
2420 s_value, o_value = self[field], other[field]
2421 if s_value is not o_value and s_value != o_value:
2422 return False
2423 elif isinstance(other, dict):
2424 if sorted(field_names(self)) != sorted(other.keys()):
2425 return False
2426 for field in self._meta.user_fields:
2427 s_value, o_value = self[field], other[field]
2428 if s_value is not o_value and s_value != o_value:
2429 return False
2430 else:
2431 if len(self) != len(other):
2432 return False
2433 for s_value, o_value in zip(self, other):
2434 if s_value is not o_value and s_value != o_value:
2435 return False
2436 return True
2437
2443
2446
2462
2464 if isinstance(item, baseinteger):
2465 fields = self._meta.user_fields
2466 field_count = len(fields)
2467 if not -field_count <= item < field_count:
2468 raise NotFoundError("Field offset %d is not in record" % item)
2469 field = fields[item]
2470 if field in self._memos:
2471 return self._memos[field]
2472 return self[field]
2473 elif isinstance(item, slice):
2474 sequence = []
2475 if isinstance(item.start, basestring) or isinstance(item.stop, basestring):
2476 field_names = dbf.field_names(self)
2477 start, stop, step = item.start, item.stop, item.step
2478 if start not in field_names or stop not in field_names:
2479 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2480 if step is not None and not isinstance(step, baseinteger):
2481 raise DbfError("step value must be an int, not %r" % type(step))
2482 start = field_names.index(start)
2483 stop = field_names.index(stop) + 1
2484 item = slice(start, stop, step)
2485 for index in self._meta.fields[item]:
2486 sequence.append(self[index])
2487 return sequence
2488 elif isinstance(item, basestring):
2489 return self.__getattr__(item)
2490 else:
2491 raise TypeError("%r is not a field name" % item)
2492
2495
2497 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2498 return NotImplemented
2499 return not self == other
2500
2525
2527 if self._meta.status != READ_WRITE:
2528 raise DbfError("%s not in read/write mode" % self._meta.filename)
2529 if self._write_to_disk:
2530 raise DbfError("unable to modify fields individually except in `with` or `Process()`")
2531 if isinstance(name, basestring):
2532 self.__setattr__(name, value)
2533 elif isinstance(name, baseinteger):
2534 self.__setattr__(self._meta.fields[name], value)
2535 elif isinstance(name, slice):
2536 sequence = []
2537 field_names = dbf.field_names(self)
2538 if isinstance(name.start, basestring) or isinstance(name.stop, basestring):
2539 start, stop, step = name.start, name.stop, name.step
2540 if start not in field_names or stop not in field_names:
2541 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2542 if step is not None and not isinstance(step, baseinteger):
2543 raise DbfError("step value must be an int, not %r" % type(step))
2544 start = field_names.index(start)
2545 stop = field_names.index(stop) + 1
2546 name = slice(start, stop, step)
2547 for field in self._meta.fields[name]:
2548 sequence.append(field)
2549 if len(sequence) != len(value):
2550 raise DbfError("length of slices not equal")
2551 for field, val in zip(sequence, value):
2552 self[field] = val
2553 else:
2554 raise TypeError("%s is not a field name" % name)
2555
2557 result = []
2558 for seq, field in enumerate(field_names(self)):
2559 result.append("%3d - %-10s: %r" % (seq, field, self[field]))
2560 return '\n'.join(result)
2561
2563 return self._data.tobytes().decode('latin1')
2564
2582
2583 @classmethod
2585 """
2586 creates a blank record data chunk
2587 """
2588 record = object.__new__(cls)
2589 record._dirty = False
2590 record._recnum = -1
2591 record._meta = layout
2592 record._data = array('B', b' ' * layout.header.record_length)
2593 layout.memofields = []
2594 signature = [layout.table().codepage.name]
2595 for index, name in enumerate(layout.fields):
2596 if name == '_nullflags':
2597 record._data[layout['_nullflags'][START]:layout['_nullflags'][END]] = array('B', [0] * layout['_nullflags'][LENGTH])
2598 for index, name in enumerate(layout.fields):
2599 signature.append(name)
2600 if name != '_nullflags':
2601 type = FieldType(layout[name][TYPE])
2602 start = layout[name][START]
2603 size = layout[name][LENGTH]
2604 end = layout[name][END]
2605 blank = layout.fieldtypes[type]['Blank']
2606 record._data[start:end] = array('B', blank(size))
2607 if layout[name][TYPE] in layout.memo_types:
2608 layout.memofields.append(name)
2609 decimals = layout[name][DECIMALS]
2610 signature[-1] = '_'.join([str(x) for x in (signature[-1], type.symbol, size, decimals)])
2611 layout.blankrecord = record._data[:]
2612 data_types = []
2613 for fieldtype, defs in sorted(layout.fieldtypes.items()):
2614 if fieldtype != _NULLFLAG:
2615 data_types.append("%s_%s_%s" % (fieldtype.symbol, defs['Empty'], defs['Class']))
2616 layout.record_sig = ('___'.join(signature), '___'.join(data_types))
2617
2619 """
2620 rerun all indices with this record
2621 """
2622 if self._meta.status == CLOSED:
2623 raise DbfError("%s is closed; cannot alter indices" % self._meta.filename)
2624 elif not self._write_to_disk:
2625 raise DbfError("unable to reindex record until it is written to disk")
2626 for dbfindex in self._meta.table()._indexen:
2627 dbfindex(self)
2628
2630 """
2631 calls appropriate routine to convert value stored in field from array
2632 """
2633 fielddef = self._meta[name]
2634 flags = fielddef[FLAGS]
2635 nullable = flags & NULLABLE and '_nullflags' in self._meta
2636 binary = flags & BINARY
2637 if nullable:
2638 byte, bit = divmod(index, 8)
2639 null_def = self._meta['_nullflags']
2640 null_data = self._data[null_def[START]:null_def[END]]
2641 try:
2642 if null_data[byte] >> bit & 1:
2643 return Null
2644 except IndexError:
2645 print(null_data)
2646 print(index)
2647 print(byte, bit)
2648 print(len(self._data), self._data)
2649 print(null_def)
2650 print(null_data)
2651 raise
2652
2653 record_data = self._data[fielddef[START]:fielddef[END]]
2654 field_type = fielddef[TYPE]
2655 retrieve = self._meta.fieldtypes[field_type]['Retrieve']
2656 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder)
2657 return datum
2658
2670
2683
2685 """
2686 calls appropriate routine to convert value to bytes, and save it in record
2687 """
2688 fielddef = self._meta[name]
2689 field_type = fielddef[TYPE]
2690 flags = fielddef[FLAGS]
2691 binary = flags & BINARY
2692 nullable = flags & NULLABLE and '_nullflags' in self._meta
2693 update = self._meta.fieldtypes[field_type]['Update']
2694 if nullable:
2695 byte, bit = divmod(index, 8)
2696 null_def = self._meta['_nullflags']
2697 null_data = self._data[null_def[START]:null_def[END]]
2698 if value is Null:
2699 null_data[byte] |= 1 << bit
2700 value = None
2701 else:
2702 null_data[byte] &= 0xff ^ 1 << bit
2703 self._data[null_def[START]:null_def[END]] = null_data
2704 if value is not Null:
2705 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder))
2706 size = fielddef[LENGTH]
2707 if len(bytes) > size:
2708 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size))
2709 blank = array('B', b' ' * size)
2710 start = fielddef[START]
2711 end = start + size
2712 blank[:len(bytes)] = bytes[:]
2713 self._data[start:end] = blank[:]
2714 self._dirty = True
2715
2717 layout = self._meta
2718 if self._recnum < 0:
2719 raise DbfError("cannot update a packed record")
2720 if layout.location == ON_DISK:
2721 header = layout.header
2722 if location == '':
2723 location = self._recnum * header.record_length + header.start
2724 if data is None:
2725 data = self._data
2726 layout.dfd.seek(location)
2727 layout.dfd.write(data)
2728 self._dirty = False
2729 table = layout.table()
2730 if table is not None:
2731 for index in table._indexen:
2732 index(self)
2733
2739
2742 """
2743 Provides routines to mimic a dbf record.
2744 """
2745
2746 __slots__ = ('_meta', '_data', '_old_data', '_memos', '_write_to_disk', '__weakref__')
2747
2758
2760 """
2761 Calls appropriate routine to convert value stored in field from
2762 array
2763 """
2764 fielddef = self._meta[name]
2765 flags = fielddef[FLAGS]
2766 nullable = flags & NULLABLE and '_nullflags' in self._meta
2767 binary = flags & BINARY
2768 if nullable:
2769 byte, bit = divmod(index, 8)
2770 null_def = self._meta['_nullflags']
2771 null_data = self._data[null_def[START]:null_def[END]]
2772 if null_data[byte] >> bit & 1:
2773 return Null
2774 record_data = self._data[fielddef[START]:fielddef[END]]
2775 field_type = fielddef[TYPE]
2776 retrieve = self._meta.fieldtypes[field_type]['Retrieve']
2777 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder)
2778 return datum
2779
2790
2792 """
2793 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes
2794 """
2795 if not self._write_to_disk:
2796 raise DbfError("template already in a state of flux")
2797 self._old_data = self._data[:]
2798 self._write_to_disk = False
2799
2801 """
2802 calls appropriate routine to convert value to ascii bytes, and save it in record
2803 """
2804 fielddef = self._meta[name]
2805 field_type = fielddef[TYPE]
2806 flags = fielddef[FLAGS]
2807 binary = flags & BINARY
2808 nullable = flags & NULLABLE and '_nullflags' in self._meta
2809 update = self._meta.fieldtypes[field_type]['Update']
2810 if nullable:
2811 byte, bit = divmod(index, 8)
2812 null_def = self._meta['_nullflags']
2813 null_data = self._data[null_def[START]:null_def[END]]
2814
2815 if value is Null:
2816 null_data[byte] |= 1 << bit
2817 value = None
2818 else:
2819 null_data[byte] &= 0xff ^ 1 << bit
2820
2821 self._data[null_def[START]:null_def[END]] = null_data
2822 if value is not Null:
2823 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder))
2824 size = fielddef[LENGTH]
2825 if len(bytes) > size:
2826 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size))
2827 blank = array('B', b' ' * size)
2828 start = fielddef[START]
2829 end = start + size
2830 blank[:len(bytes)] = bytes[:]
2831 self._data[start:end] = blank[:]
2832
2833 - def __new__(cls, layout, original_record=None, defaults=None):
2864
2867
2869 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2870 return NotImplemented
2871 if isinstance(other, (Record, RecordTemplate)):
2872 if field_names(self) != field_names(other):
2873 return False
2874 for field in self._meta.user_fields:
2875 s_value, o_value = self[field], other[field]
2876 if s_value is not o_value and s_value != o_value:
2877 return False
2878 elif isinstance(other, dict):
2879 if sorted(field_names(self)) != sorted(other.keys()):
2880 return False
2881 for field in self._meta.user_fields:
2882 s_value, o_value = self[field], other[field]
2883 if s_value is not o_value and s_value != o_value:
2884 return False
2885 else:
2886 if len(self) != len(other):
2887 return False
2888 for s_value, o_value in zip(self, other):
2889 if s_value is not o_value and s_value != o_value:
2890 return False
2891 return True
2892
2895
2911
2913 fields = self._meta.user_fields
2914 if isinstance(item, baseinteger):
2915 field_count = len(fields)
2916 if not -field_count <= item < field_count:
2917 raise NotFoundError("Field offset %d is not in record" % item)
2918 field = fields[item]
2919 if field in self._memos:
2920 return self._memos[field]
2921 return self[field]
2922 elif isinstance(item, slice):
2923 sequence = []
2924 if isinstance(item.start, basestring) or isinstance(item.stop, basestring):
2925 start, stop, step = item.start, item.stop, item.step
2926 if start not in fields or stop not in fields:
2927 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2928 if step is not None and not isinstance(step, baseinteger):
2929 raise DbfError("step value must be an int, not %r" % type(step))
2930 start = fields.index(start)
2931 stop = fields.index(stop) + 1
2932 item = slice(start, stop, step)
2933 for index in self._meta.fields[item]:
2934 sequence.append(self[index])
2935 return sequence
2936 elif isinstance(item, basestring):
2937 return self.__getattr__(item)
2938 else:
2939 raise TypeError("%r is not a field name" % item)
2940
2943
2945 if not isinstance(other, (Record, RecordTemplate, dict, tuple)):
2946 return NotImplemented
2947 return not self == other
2948
2968
2970 if isinstance(name, basestring):
2971 self.__setattr__(name, value)
2972 elif isinstance(name, baseinteger):
2973 self.__setattr__(self._meta.fields[name], value)
2974 elif isinstance(name, slice):
2975 sequence = []
2976 field_names = dbf.field_names(self)
2977 if isinstance(name.start, basestring) or isinstance(name.stop, basestring):
2978 start, stop, step = name.start, name.stop, name.step
2979 if start not in field_names or stop not in field_names:
2980 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop))
2981 if step is not None and not isinstance(step, baseinteger):
2982 raise DbfError("step value must be an int, not %r" % type(step))
2983 start = field_names.index(start)
2984 stop = field_names.index(stop) + 1
2985 name = slice(start, stop, step)
2986 for field in self._meta.fields[name]:
2987 sequence.append(field)
2988 if len(sequence) != len(value):
2989 raise DbfError("length of slices not equal")
2990 for field, val in zip(sequence, value):
2991 self[field] = val
2992 else:
2993 raise TypeError("%s is not a field name" % name)
2994
2995
2997 return self._data.tobytes()
2998
3000 result = []
3001 for seq, field in enumerate(field_names(self)):
3002 result.append("%3d - %-10s: %r" % (seq, field, self[field]))
3003 return '\n'.join(result)
3004
3007 """
3008 Provides routines to mimic a dbf record, but all values are non-existent.
3009 """
3010
3011 __slots__ = ('_recno', '_sequence')
3012
3013 - def __new__(cls, position, sequence):
3014 """
3015 record = ascii array of entire record
3016 layout=record specification
3017 memo = memo object for table
3018 """
3019 if position not in ('bof', 'eof'):
3020 raise ValueError("position should be 'bof' or 'eof', not %r" % position)
3021 vapor = object.__new__(cls)
3022 vapor._recno = (-1, None)[position == 'eof']
3023 vapor._sequence = sequence
3024 return vapor
3025
3028
3033
3034
3036 if name[0:2] == '__' and name[-2:] == '__':
3037 raise AttributeError('Method %s is not implemented.' % name)
3038 else:
3039 return Vapor
3040
3042 if isinstance(item, baseinteger):
3043 return Vapor
3044 elif isinstance(item, slice):
3045 raise TypeError('slice notation not allowed on Vapor records')
3046 elif isinstance(item, basestring):
3047 return self.__getattr__(item)
3048 else:
3049 raise TypeError("%r is not a field name" % item)
3050
3052 raise TypeError("Vapor records have no length")
3053
3058
3060 """
3061 Vapor records are always False
3062 """
3063 return False
3064
3070
3072 if isinstance(name, (basestring, baseinteger)):
3073 raise TypeError("cannot change Vapor record")
3074 elif isinstance(name, slice):
3075 raise TypeError("slice notation not allowed on Vapor records")
3076 else:
3077 raise TypeError("%s is not a field name" % name)
3078
3080 return "RecordVaporWare(position=%r, sequence=%r)" % (('bof', 'eof')[recno(self) is None], self._sequence)
3081
3083 return 'VaporRecord(%r)' % recno(self)
3084
3085 @property
3091
3094 """
3095 Provides access to memo fields as dictionaries
3096 Must override _init, _get_memo, and _put_memo to
3097 store memo contents to disk
3098 """
3099
3101 """
3102 Initialize disk file usage
3103 """
3104
3106 """
3107 Retrieve memo contents from disk
3108 """
3109
3111 """
3112 Store memo contents to disk
3113 """
3114
3116 """
3117 Resets memo structure back to zero memos
3118 """
3119 self.memory.clear()
3120 self.nextmemo = 1
3121
3123 self.meta = meta
3124 self.memory = {}
3125 self.nextmemo = 1
3126 self._init()
3127 self.meta.newmemofile = False
3128
3130 """
3131 Gets the memo in block
3132 """
3133 if self.meta.ignorememos or not block:
3134 return ''
3135 if self.meta.location == ON_DISK:
3136 return self._get_memo(block)
3137 else:
3138 return self.memory[block]
3139
3141 """
3142 Stores data in memo file, returns block number
3143 """
3144 if self.meta.ignorememos or data == '':
3145 return 0
3146 if self.meta.location == IN_MEMORY:
3147 thismemo = self.nextmemo
3148 self.nextmemo += 1
3149 self.memory[thismemo] = data
3150 else:
3151 thismemo = self._put_memo(data)
3152 return thismemo
3153
3156 """
3157 dBase III specific
3158 """
3159
3161 self.meta.memo_size= 512
3162 self.record_header_length = 2
3163 if self.meta.location == ON_DISK and not self.meta.ignorememos:
3164 if self.meta.newmemofile:
3165 self.meta.mfd = open(self.meta.memoname, 'w+b')
3166 self.meta.mfd.write(pack_long_int(1) + b'\x00' * 508)
3167 else:
3168 try:
3169 self.meta.mfd = open(self.meta.memoname, 'r+b')
3170 self.meta.mfd.seek(0)
3171 next = self.meta.mfd.read(4)
3172 self.nextmemo = unpack_long_int(next)
3173 except Exception:
3174 exc = sys.exc_info()[1]
3175 raise DbfError("memo file appears to be corrupt: %r" % exc.args) from None
3176
3178 block = int(block)
3179 self.meta.mfd.seek(block * self.meta.memo_size)
3180 eom = -1
3181 data = b''
3182 while eom == -1:
3183 newdata = self.meta.mfd.read(self.meta.memo_size)
3184 if not newdata:
3185 return data
3186 data += newdata
3187 eom = data.find(b'\x1a\x1a')
3188 return data[:eom]
3189
3191 data = data
3192 length = len(data) + self.record_header_length
3193 blocks = length // self.meta.memo_size
3194 if length % self.meta.memo_size:
3195 blocks += 1
3196 thismemo = self.nextmemo
3197 self.nextmemo = thismemo + blocks
3198 self.meta.mfd.seek(0)
3199 self.meta.mfd.write(pack_long_int(self.nextmemo))
3200 self.meta.mfd.seek(thismemo * self.meta.memo_size)
3201 self.meta.mfd.write(data)
3202 self.meta.mfd.write(b'\x1a\x1a')
3203 double_check = self._get_memo(thismemo)
3204 if len(double_check) != len(data):
3205 uhoh = open('dbf_memo_dump.err', 'wb')
3206 uhoh.write('thismemo: %d' % thismemo)
3207 uhoh.write('nextmemo: %d' % self.nextmemo)
3208 uhoh.write('saved: %d bytes' % len(data))
3209 uhoh.write(data)
3210 uhoh.write('retrieved: %d bytes' % len(double_check))
3211 uhoh.write(double_check)
3212 uhoh.close()
3213 raise DbfError("unknown error: memo not saved")
3214 return thismemo
3215
3223
3225 """
3226 Visual Foxpro 6 specific
3227 """
3228
3230 if self.meta.location == ON_DISK and not self.meta.ignorememos:
3231 self.record_header_length = 8
3232 if self.meta.newmemofile:
3233 if self.meta.memo_size == 0:
3234 self.meta.memo_size = 1
3235 elif 1 < self.meta.memo_size < 33:
3236 self.meta.memo_size *= 512
3237 self.meta.mfd = open(self.meta.memoname, 'w+b')
3238 nextmemo = 512 // self.meta.memo_size
3239 if nextmemo * self.meta.memo_size < 512:
3240 nextmemo += 1
3241 self.nextmemo = nextmemo
3242 self.meta.mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \
3243 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504)
3244 else:
3245 try:
3246 self.meta.mfd = open(self.meta.memoname, 'r+b')
3247 self.meta.mfd.seek(0)
3248 header = self.meta.mfd.read(512)
3249 self.nextmemo = unpack_long_int(header[:4], bigendian=True)
3250 self.meta.memo_size = unpack_short_int(header[6:8], bigendian=True)
3251 except Exception:
3252 exc = sys.exc_info()[1]
3253 raise DbfError("memo file appears to be corrupt: %r" % exc.args) from None
3254
3256 self.meta.mfd.seek(block * self.meta.memo_size)
3257 header = self.meta.mfd.read(8)
3258 length = unpack_long_int(header[4:], bigendian=True)
3259 return self.meta.mfd.read(length)
3260
3262 data = data
3263 self.meta.mfd.seek(0)
3264 thismemo = unpack_long_int(self.meta.mfd.read(4), bigendian=True)
3265 self.meta.mfd.seek(0)
3266 length = len(data) + self.record_header_length
3267 blocks = length // self.meta.memo_size
3268 if length % self.meta.memo_size:
3269 blocks += 1
3270 self.meta.mfd.write(pack_long_int(thismemo + blocks, bigendian=True))
3271 self.meta.mfd.seek(thismemo * self.meta.memo_size)
3272 self.meta.mfd.write(b'\x00\x00\x00\x01' + pack_long_int(len(data), bigendian=True) + data)
3273 return thismemo
3274
3276 if self.meta.location == ON_DISK and not self.meta.ignorememos:
3277 mfd = self.meta.mfd
3278 mfd.seek(0)
3279 mfd.truncate(0)
3280 nextmemo = 512 // self.meta.memo_size
3281 if nextmemo * self.meta.memo_size < 512:
3282 nextmemo += 1
3283 self.nextmemo = nextmemo
3284 mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \
3285 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504)
3286 mfd.flush()
3287
3288
3289 -class DbfCsv(csv.Dialect):
3300 csv.register_dialect('dbf', DbfCsv)
3304 """
3305 used because you cannot weakref None
3306 """
3307
3310
3311 _DeadObject = _DeadObject()
3312
3313
3314
3315
3316 VFPTIME = 1721425
3319 """
3320 Returns a two-bye integer from the value, or raises DbfError
3321 """
3322
3323 if value > 65535:
3324 raise DataOverflowError("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value)
3325 if bigendian:
3326 return struct.pack('>H', value)
3327 else:
3328 return struct.pack('<H', value)
3329
3331 """
3332 Returns a four-bye integer from the value, or raises DbfError
3333 """
3334
3335 if value > 4294967295:
3336 raise DataOverflowError("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value)
3337 if bigendian:
3338 return struct.pack('>L', value)
3339 else:
3340 return struct.pack('<L', value)
3341
3343 """
3344 Returns an 11 byte, upper-cased, null padded string suitable for field names;
3345 raises DbfError if the string is bigger than 10 bytes
3346 """
3347 if len(string) > 10:
3348 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string)))
3349 return struct.pack('11s', string.upper())
3350
3352 """
3353 Returns the value in the two-byte integer passed in
3354 """
3355 if bigendian:
3356 return struct.unpack('>H', bytes)[0]
3357 else:
3358 return struct.unpack('<H', bytes)[0]
3359
3361 """
3362 Returns the value in the four-byte integer passed in
3363 """
3364 if bigendian:
3365 return int(struct.unpack('>L', bytes)[0])
3366 else:
3367 return int(struct.unpack('<L', bytes)[0])
3368
3370 """
3371 Returns a normal, lower-cased string from a null-padded byte string
3372 """
3373 field = struct.unpack('%ds' % len(chars), chars)[0]
3374 name = []
3375 for ch in field:
3376 if ch == NULL:
3377 break
3378 name.append(ch)
3379 return bytes(name).lower()
3380
3382 """
3383 return scientific notation with not more than decimals-1 decimal places
3384 """
3385 value = str(value)
3386 sign = ''
3387 if value[0] in ('+-'):
3388 sign = value[0]
3389 if sign == '+':
3390 sign = ''
3391 value = value[1:]
3392 if 'e' in value:
3393 e = value.find('e')
3394 if e - 1 <= decimals:
3395 return sign + value
3396 integer, mantissa, power = value[0], value[1:e], value[e+1:]
3397 mantissa = mantissa[:decimals]
3398 value = sign + integer + mantissa + 'e' + power
3399 return value
3400 integer, mantissa = value[0], value[1:]
3401 if integer == '0':
3402 for e, integer in enumerate(mantissa):
3403 if integer not in ('.0'):
3404 break
3405 mantissa = '.' + mantissa[e+1:]
3406 mantissa = mantissa[:decimals]
3407 value = sign + integer + mantissa + 'e-%03d' % e
3408 return value
3409 e = mantissa.find('.')
3410 mantissa = '.' + mantissa.replace('.','')
3411 mantissa = mantissa[:decimals]
3412 value = sign + integer + mantissa + 'e+%03d' % e
3413 return value
3414
3416 """
3417 called if a data type is not supported for that style of table
3418 """
3419 return something
3420
3422 """
3423 Returns the string in bytes as fielddef[CLASS] or fielddef[EMPTY]
3424 """
3425 data = bytes.tobytes()
3426 if fielddef[FLAGS] & BINARY:
3427 return data
3428 data = fielddef[CLASS](decoder(data)[0])
3429 if not data.strip():
3430 cls = fielddef[EMPTY]
3431 if cls is NoneType:
3432 return None
3433 return cls(data)
3434 return data
3435
3437 """
3438 returns the string as bytes (not unicode) as fielddef[CLASS] or fielddef[EMPTY]
3439 """
3440 length = fielddef[LENGTH]
3441 if string == None:
3442 return length * b' '
3443 if fielddef[FLAGS] & BINARY:
3444 if not isinstance(string, bytes):
3445 raise ValueError('binary field: %r not in bytes format' % string)
3446 return string
3447 else:
3448 if not isinstance(string, basestring):
3449 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3450 string = encoder(string.strip())[0]
3451 return string
3452
3454 """
3455 Returns the currency value in bytes
3456 """
3457 value = struct.unpack('<q', bytes)[0]
3458 return fielddef[CLASS](("%de-4" % value).strip())
3459
3461 """
3462 Returns the value to be stored in the record's disk data
3463 """
3464 if value == None:
3465 value = 0
3466 currency = int(value * 10000)
3467 if not -9223372036854775808 < currency < 9223372036854775808:
3468 raise DataOverflowError("value %s is out of bounds" % value)
3469 return struct.pack('<q', currency)
3470
3472 """
3473 Returns the ascii coded date as fielddef[CLASS] or fielddef[EMPTY]
3474 """
3475 text = bytes.tobytes()
3476 if text == b' ':
3477 cls = fielddef[EMPTY]
3478 if cls is NoneType:
3479 return None
3480 return cls()
3481 year = int(text[0:4])
3482 month = int(text[4:6])
3483 day = int(text[6:8])
3484 return fielddef[CLASS](year, month, day)
3485
3487 """
3488 Returns the Date or datetime.date object ascii-encoded (yyyymmdd)
3489 """
3490 if moment == None:
3491 return b' '
3492 return ("%04d%02d%02d" % moment.timetuple()[:3]).encode('ascii')
3493
3495 """
3496 Returns the double in bytes as fielddef[CLASS] ('default' == float)
3497 """
3498 typ = fielddef[CLASS]
3499 if typ == 'default':
3500 typ = float
3501 return typ(struct.unpack('<d', bytes)[0])
3502
3504 """
3505 returns the value to be stored in the record's disk data
3506 """
3507 if value == None:
3508 value = 0
3509 return struct.pack('<d', float(value))
3510
3512 """
3513 Returns the binary number stored in bytes in little-endian
3514 format as fielddef[CLASS]
3515 """
3516 typ = fielddef[CLASS]
3517 if typ == 'default':
3518 typ = int
3519 return typ(struct.unpack('<i', bytes)[0])
3520
3522 """
3523 Returns value in little-endian binary format
3524 """
3525 if value == None:
3526 value = 0
3527 try:
3528 value = int(value)
3529 except Exception:
3530 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) from None
3531 if not -2147483648 < value < 2147483647:
3532 raise DataOverflowError("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value)
3533 return struct.pack('<i', int(value))
3534
3536 """
3537 Returns True if bytes is 't', 'T', 'y', or 'Y'
3538 None if '?'
3539 False otherwise
3540 """
3541 cls = fielddef[CLASS]
3542 empty = fielddef[EMPTY]
3543 bytes = bytes.tobytes()
3544 if bytes in b'tTyY':
3545 return cls(True)
3546 elif bytes in b'fFnN':
3547 return cls(False)
3548 elif bytes in b'? ':
3549 if empty is NoneType:
3550 return None
3551 return empty()
3552 elif LOGICAL_BAD_IS_NONE:
3553 return None
3554 else:
3555 raise BadDataError('Logical field contained %r' % bytes)
3556 return typ(bytes)
3557
3559 """
3560 Returns 'T' if logical is True, 'F' if False, '?' otherwise
3561 """
3562 if data is Unknown or data is None or data is Null or data is Other:
3563 return b'?'
3564 if data == True:
3565 return b'T'
3566 if data == False:
3567 return b'F'
3568 raise ValueError("unable to automatically coerce %r to Logical" % data)
3569
3571 """
3572 Returns the block of data from a memo file
3573 """
3574 stringval = bytes.tobytes().strip()
3575 if not stringval or memo is None:
3576 cls = fielddef[EMPTY]
3577 if cls is NoneType:
3578 return None
3579 return cls()
3580 block = int(stringval)
3581 data = memo.get_memo(block)
3582 if fielddef[FLAGS] & BINARY:
3583 return data
3584 return fielddef[CLASS](decoder(data)[0])
3585
3586 -def update_memo(string, fielddef, memo, decoder, encoder):
3587 """
3588 Writes string as a memo, returns the block number it was saved into
3589 """
3590 if memo is None:
3591 raise DbfError('Memos are being ignored, unable to update')
3592 if fielddef[FLAGS] & BINARY:
3593 if string == None:
3594 string = b''
3595 if not isinstance(string, bytes):
3596 raise ValueError('binary field: %r not in bytes format' % string)
3597 else:
3598 if string == None:
3599 string = ''
3600 if not isinstance(string, basestring):
3601 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3602 string = encoder(string)[0]
3603 block = memo.put_memo(string)
3604 if block == 0:
3605 block = b''
3606 return ("%*s" % (fielddef[LENGTH], block)).encode('ascii')
3607
3609 """
3610 Returns the number stored in bytes as integer if field spec for
3611 decimals is 0, float otherwise
3612 """
3613 string = bytes.tobytes().replace(b'\x00', b'').strip()
3614 cls = fielddef[CLASS]
3615 if not string or string[0:1] == b'*':
3616 cls = fielddef[EMPTY]
3617 if cls is NoneType:
3618 return None
3619 return cls()
3620 if cls == 'default':
3621 if fielddef[DECIMALS] == 0:
3622 return int(string)
3623 else:
3624 return float(string)
3625 else:
3626 return cls(string)
3627
3629 """
3630 returns value as ascii representation, rounding decimal
3631 portion as necessary
3632 """
3633 if value == None:
3634 return fielddef[LENGTH] * b' '
3635 try:
3636 value = float(value)
3637 except Exception:
3638 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) from None
3639 decimalsize = fielddef[DECIMALS]
3640 totalsize = fielddef[LENGTH]
3641 if decimalsize:
3642 decimalsize += 1
3643 maxintegersize = totalsize - decimalsize
3644 integersize = len("%.0f" % floor(value))
3645 if integersize > maxintegersize:
3646 if integersize != 1:
3647 raise DataOverflowError('Integer portion too big')
3648 string = scinot(value, decimalsize)
3649 if len(string) > totalsize:
3650 raise DataOverflowError('Value representation too long for field')
3651 return ("%*.*f" % (fielddef[LENGTH], fielddef[DECIMALS], value)).encode('ascii')
3652
3654 """
3655 returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00
3656 may not be accurate; BC dates are nulled.
3657 """
3658
3659
3660 if bytes == array('B', [0] * 8):
3661 cls = fielddef[EMPTY]
3662 if cls is NoneType:
3663 return None
3664 return cls()
3665 cls = fielddef[CLASS]
3666 time = unpack_long_int(bytes[4:])
3667 microseconds = (time % 1000) * 1000
3668 time = time // 1000
3669 hours = time // 3600
3670 mins = time % 3600 // 60
3671 secs = time % 3600 % 60
3672 time = datetime.time(hours, mins, secs, microseconds)
3673 possible = unpack_long_int(bytes[:4])
3674 possible -= VFPTIME
3675 possible = max(0, possible)
3676 date = datetime.date.fromordinal(possible)
3677 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond)
3678
3680 """
3681 Sets the date/time stored in moment
3682 moment must have fields:
3683 year, month, day, hour, minute, second, microsecond
3684 """
3685 data = [0] * 8
3686 if moment:
3687 hour = moment.hour
3688 minute = moment.minute
3689 second = moment.second
3690 millisecond = moment.microsecond // 1000
3691 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond
3692 data[4:] = update_integer(time)
3693 data[:4] = update_integer(moment.toordinal() + VFPTIME)
3694 return bytes(data)
3695
3697 """
3698 Returns the block of data from a memo file
3699 """
3700 if memo is None:
3701 block = 0
3702 else:
3703 block = struct.unpack('<i', bytes)[0]
3704 if not block:
3705 cls = fielddef[EMPTY]
3706 if cls is NoneType:
3707 return None
3708 return cls()
3709 data = memo.get_memo(block)
3710 if fielddef[FLAGS] & BINARY:
3711 return data
3712 return fielddef[CLASS](decoder(data)[0])
3713
3715 """
3716 Writes string as a memo, returns the block number it was saved into
3717 """
3718 if memo is None:
3719 raise DbfError('Memos are being ignored, unable to update')
3720 if string == None:
3721 return struct.pack('<i', 0)
3722 if fielddef[FLAGS] & BINARY:
3723
3724
3725 if not isinstance(string, bytes):
3726 raise ValueError('binary field: %r not in bytes format' % string)
3727 string = bytes(string)
3728 else:
3729
3730
3731 if not isinstance(string, basestring):
3732 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string))
3733 string = encoder(string)[0]
3734 block = memo.put_memo(string)
3735 return struct.pack('<i', block)
3736
3738 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3739 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3740 length = int(format[0][1:-1])
3741 if not 0 < length < 256:
3742 raise FieldSpecError("Character fields must be between 1 and 255, not %d" % length)
3743 decimals = 0
3744 flag = 0
3745 for f in format[1:]:
3746 flag |= FieldFlag.lookup(f)
3747 return length, decimals, flag
3748
3758
3768
3778
3789
3791 if len(format) > 1 or format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]):
3792 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags))
3793 length, decimals = format[0][1:-1].split(',')
3794 length = int(length)
3795 decimals = int(decimals)
3796 flag = 0
3797 for f in format[1:]:
3798 flag |= FieldFlag.lookup(f)
3799 if not 0 < length < 20:
3800 raise FieldSpecError("Numeric fields must be between 1 and 19 digits, not %d" % length)
3801 if decimals and not 0 < decimals <= length - 2:
3802 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals))
3803 return length, decimals, flag
3804
3806 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3807 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3808 length = int(format[0][1:-1])
3809 if not 0 < length < 65519:
3810 raise FieldSpecError("Character fields must be between 1 and 65,519")
3811 decimals = 0
3812 flag = 0
3813 for f in format[1:]:
3814 flag |= FieldFlag.lookup(f)
3815 return length, decimals, flag
3816
3818 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]):
3819 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags))
3820 length = int(format[0][1:-1])
3821 if not 0 < length < 255:
3822 raise FieldSpecError("Character fields must be between 1 and 255")
3823 decimals = 0
3824 flag = 0
3825 for f in format[1:]:
3826 flag |= FieldFlag.lookup(f)
3827 return length, decimals, flag
3828
3838
3848
3858
3868
3880
3892
3894 if format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]):
3895 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags))
3896 length, decimals = format[0][1:-1].split(',')
3897 length = int(length)
3898 decimals = int(decimals)
3899 flag = 0
3900 for f in format[1:]:
3901 flag |= FieldFlag.lookup(f)
3902 if not 0 < length < 21:
3903 raise FieldSpecError("Numeric fields must be between 1 and 20 digits, not %d" % length)
3904 if decimals and not 0 < decimals <= length - 2:
3905 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals))
3906 return length, decimals, flag
3907
3908 -def field_spec_error_text(format, flags):
3909 """
3910 generic routine for error text for the add...() functions
3911 """
3912 flg = ''
3913 if flags:
3914 flg = ' [ ' + ' | '.join(flags) + ' ]'
3915 frmt = ''
3916 if format:
3917 frmt = ' ' + ' '.join(format)
3918 return flg, frmt
3919
3921 """
3922 extends all iters to longest one, using last value from each as necessary
3923 """
3924 iters = [iter(x) for x in iters]
3925 last = [None] * len(iters)
3926 while "any iters have items left":
3927 alive = len(iters)
3928 for i, iterator in enumerate(iters):
3929 try:
3930 value = next(iterator)
3931 last[i] = value
3932 except StopIteration:
3933 alive -= 1
3934 if alive:
3935 yield tuple(last)
3936 alive = len(iters)
3937 continue
3938 break
3939
3940
3941
3942
3943 -class Tables(object):
3944 """
3945 context manager for multiple tables and/or indices
3946 """
3948 if len(tables) == 1 and not isinstance(tables[0], (Table, basestring)):
3949 tables = tables[0]
3950 yo._tables = []
3951 yo._entered = []
3952 for table in tables:
3953 if isinstance(table, basestring):
3954 table = Table(table)
3955 yo._tables.append(table)
3957 for table in yo._tables:
3958 table.__enter__()
3959 yo._entered.append(table)
3960 return tuple(yo._tables)
3962 while yo._entered:
3963 table = yo._entered.pop()
3964 try:
3965 table.__exit__()
3966 except Exception:
3967 pass
3968
3970 """
3971 Represents the index where the match criteria is if True,
3972 or would be if False
3973
3974 Used by Index.index_search
3975 """
3976
3978 "value is the number, found is True/False"
3979 result = int.__new__(cls, value)
3980 result.found = found
3981 return result
3982
3985
3988 """
3989 tuple with named attributes for representing a field's dbf type,
3990 length, decimal portion, and python class
3991 """
3992
3993 __slots__= ()
3994
3996 if len(args) != 4:
3997 raise TypeError("%s should be called with Type, Length, Decimal size, and Class" % cls.__name__)
3998 return tuple.__new__(cls, args)
3999
4000 @property
4003
4004 @property
4007
4008 @property
4011
4012 @property
4015
4016
4017 -class CodePage(tuple):
4018 """
4019 tuple with named attributes for representing a tables codepage
4020 """
4021
4022 __slots__= ()
4023
4024 - def __new__(cls, name):
4025 "call with name of codepage (e.g. 'cp1252')"
4026 code, name, desc = _codepage_lookup(name)
4027 return tuple.__new__(cls, (name, desc, code))
4028
4029 - def __repr__(self):
4030 return "CodePage(%r, %r, %r)" % (self[0], self[1], self[2])
4031
4032 - def __str__(self):
4033 return "%s (%s)" % (self[0], self[1])
4034
4035 @property
4038
4039 @property
4042
4043 @property
4046
4047
4048 -class Iter(_Navigation):
4049 """
4050 Provides iterable behavior for a table
4051 """
4052
4053 - def __init__(self, table, include_vapor=False):
4054 """
4055 Return a Vapor record as the last record in the iteration
4056 if include_vapor is True
4057 """
4058 self._table = table
4059 self._record = None
4060 self._include_vapor = include_vapor
4061 self._exhausted = False
4062
4065
4067 while not self._exhausted:
4068 if self._index == len(self._table):
4069 break
4070 if self._index >= (len(self._table) - 1):
4071 self._index = max(self._index, len(self._table))
4072 if self._include_vapor:
4073 return RecordVaporWare('eof', self._table)
4074 break
4075 self._index += 1
4076 record = self._table[self._index]
4077 return record
4078 self._exhausted = True
4079 raise StopIteration
4080
4081
4082 -class Table(_Navigation):
4083 """
4084 Base class for dbf style tables
4085 """
4086
4087 _version = 'basic memory table'
4088 _versionabbr = 'dbf'
4089 _max_fields = 255
4090 _max_records = 4294967296
4091
4092 @MutableDefault
4094 return {
4095 CHAR: {
4096 'Type':'Character', 'Init':add_character, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_character, 'Update':update_character,
4097 'Class':str, 'Empty':str, 'flags':tuple(),
4098 },
4099 DATE: {
4100 'Type':'Date', 'Init':add_date, 'Blank':lambda x: b' ', 'Retrieve':retrieve_date, 'Update':update_date,
4101 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
4102 },
4103 NUMERIC: {
4104 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric,
4105 'Class':'default', 'Empty':none, 'flags':tuple(),
4106 },
4107 LOGICAL: {
4108 'Type':'Logical', 'Init':add_logical, 'Blank':lambda x: b'?', 'Retrieve':retrieve_logical, 'Update':update_logical,
4109 'Class':bool, 'Empty':none, 'flags':tuple(),
4110 },
4111 MEMO: {
4112 'Type':'Memo', 'Init':add_memo, 'Blank':lambda x: b' ', 'Retrieve':retrieve_memo, 'Update':update_memo,
4113 'Class':str, 'Empty':str, 'flags':tuple(),
4114 },
4115 FLOAT: {
4116 'Type':'Numeric', 'Init':add_numeric, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_numeric, 'Update':update_numeric,
4117 'Class':'default', 'Empty':none, 'flags':tuple(),
4118 },
4119 }
4120 @MutableDefault
4123 _memoext = ''
4124 _memoClass = _DbfMemo
4125 _yesMemoMask = 0
4126 _noMemoMask = 0
4127 _binary_types = tuple()
4128 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC)
4129 _currency_types = tuple()
4130 _date_types = (DATE, )
4131 _datetime_types = tuple()
4132 _decimal_types = (NUMERIC, FLOAT)
4133 _fixed_types = (MEMO, DATE, LOGICAL)
4134 _logical_types = (LOGICAL, )
4135 _memo_types = (MEMO, )
4136 _numeric_types = (NUMERIC, FLOAT)
4137 _variable_types = (CHAR, NUMERIC, FLOAT)
4138 _dbfTableHeader = array('B', [0] * 32)
4139 _dbfTableHeader[0] = 0
4140 _dbfTableHeader[8:10] = array('B', pack_short_int(33))
4141 _dbfTableHeader[10] = 1
4142 _dbfTableHeader[29] = 0
4143 _dbfTableHeader = _dbfTableHeader.tobytes()
4144 _dbfTableHeaderExtra = b''
4145 _supported_tables = ()
4146 _pack_count = 0
4147 backup = None
4148
4150 """
4151 implements the weakref structure for seperate indexes
4152 """
4153
4155 self._indexen = set()
4156
4158 self._indexen = set([s for s in self._indexen if s() is not None])
4159 return (s() for s in self._indexen if s() is not None)
4160
4162 self._indexen = set([s for s in self._indexen if s() is not None])
4163 return len(self._indexen)
4164
4165 - def add(self, new_index):
4166 self._indexen.add(weakref.ref(new_index))
4167 self._indexen = set([s for s in self._indexen if s() is not None])
4168
4188
4190 """
4191 represents the data block that defines a tables type and layout
4192 """
4193
4195 if len(data) != 32:
4196 raise BadDataError('table header should be 32 bytes, but is %d bytes' % len(data))
4197 self.packDate = pack_date
4198 self.unpackDate = unpack_date
4199 self._data = array('B', data + b'\x0d')
4200
4202 """
4203 get/set code page of table
4204 """
4205 if cp is None:
4206 return self._data[29]
4207 else:
4208 cp, sd, ld = _codepage_lookup(cp)
4209 self._data[29] = cp
4210 return cp
4211
4212 @property
4214 """
4215 main data structure
4216 """
4217 date = self.packDate(Date.today())
4218 self._data[1:4] = array('B', date)
4219 return self._data.tobytes()
4220
4221 @data.setter
4223 if len(bytes) < 32:
4224 raise BadDataError("length for data of %d is less than 32" % len(bytes))
4225 self._data[:] = array('B', bytes)
4226
4227 @property
4229 "extra dbf info (located after headers, before data records)"
4230 fieldblock = self._data[32:]
4231 for i in range(len(fieldblock) // 32 + 1):
4232 cr = i * 32
4233 if fieldblock[cr] == CR:
4234 break
4235 else:
4236 raise BadDataError("corrupt field structure")
4237 cr += 33
4238 return self._data[cr:].tobytes()
4239
4240 @extra.setter
4242 fieldblock = self._data[32:]
4243 for i in range(len(fieldblock) // 32 + 1):
4244 cr = i * 32
4245 if fieldblock[cr] == CR:
4246 break
4247 else:
4248 raise BadDataError("corrupt field structure")
4249 cr += 33
4250 self._data[cr:] = array('B', data)
4251 self._data[8:10] = array('B', pack_short_int(len(self._data)))
4252
4253 @property
4255 "number of fields (read-only)"
4256 fieldblock = self._data[32:]
4257 for i in range(len(fieldblock) // 32 + 1):
4258 cr = i * 32
4259 if fieldblock[cr] == CR:
4260 break
4261 else:
4262 raise BadDataError("corrupt field structure")
4263 return len(fieldblock[:cr]) // 32
4264
4265 @property
4267 """
4268 field block structure
4269 """
4270 fieldblock = self._data[32:]
4271 for i in range(len(fieldblock) // 32 + 1):
4272 cr = i * 32
4273 if fieldblock[cr] == CR:
4274 break
4275 else:
4276 raise BadDataError("corrupt field structure")
4277 return fieldblock[:cr].tobytes()
4278
4279 @fields.setter
4281 fieldblock = self._data[32:]
4282 for i in range(len(fieldblock) // 32 + 1):
4283 cr = i * 32
4284 if fieldblock[cr] == CR:
4285 break
4286 else:
4287 raise BadDataError("corrupt field structure")
4288 cr += 32
4289 fieldlen = len(block)
4290 if fieldlen % 32 != 0:
4291 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
4292 self._data[32:cr] = array('B', block)
4293 self._data[8:10] = array('B', pack_short_int(len(self._data)))
4294 fieldlen = fieldlen // 32
4295 recordlen = 1
4296 for i in range(fieldlen):
4297 recordlen += block[i*32+16]
4298 self._data[10:12] = array('B', pack_short_int(recordlen))
4299
4300 @property
4302 """
4303 number of records (maximum 16,777,215)
4304 """
4305 return unpack_long_int(self._data[4:8].tobytes())
4306
4307 @record_count.setter
4310
4311 @property
4313 """
4314 length of a record (read_only) (max of 65,535)
4315 """
4316 return unpack_short_int(self._data[10:12].tobytes())
4317
4318 @record_length.setter
4324
4325 @property
4327 """
4328 starting position of first record in file (must be within first 64K)
4329 """
4330 return unpack_short_int(self._data[8:10].tobytes())
4331
4332 @start.setter
4335
4336 @property
4338 """
4339 date of last table modification (read-only)
4340 """
4341 return self.unpackDate(self._data[1:4].tobytes())
4342
4343 @property
4345 """
4346 dbf version
4347 """
4348 return self._data[0]
4349
4350 @version.setter
4353
4355 """
4356 implements the weakref table for records
4357 """
4358
4360 self._meta = meta
4361 self._max_count = count
4362 self._weakref_list = {}
4363 self._accesses = 0
4364 self._dead_check = 1024
4365
4367
4368 if index < 0:
4369 if self._max_count + index < 0:
4370 raise IndexError('index %d smaller than available records' % index)
4371 index = self._max_count + index
4372 if index >= self._max_count:
4373 raise IndexError('index %d greater than available records' % index)
4374 maybe = self._weakref_list.get(index)
4375 if maybe:
4376 maybe = maybe()
4377 self._accesses += 1
4378 if self._accesses >= self._dead_check:
4379 dead = []
4380 for key, value in self._weakref_list.items():
4381 if value() is None:
4382 dead.append(key)
4383 for key in dead:
4384 del self._weakref_list[key]
4385 if not maybe:
4386 meta = self._meta
4387 if meta.status == CLOSED:
4388 raise DbfError("%s is closed; record %d is unavailable" % (meta.filename, index))
4389 header = meta.header
4390 if index < 0:
4391 index += header.record_count
4392 size = header.record_length
4393 location = index * size + header.start
4394 meta.dfd.seek(location)
4395 if meta.dfd.tell() != location:
4396 raise ValueError("unable to seek to offset %d in file" % location)
4397 bytes = meta.dfd.read(size)
4398 if not bytes:
4399 raise ValueError("unable to read record data from %s at location %d" % (meta.filename, location))
4400 maybe = Record(recnum=index, layout=meta, kamikaze=bytes, _fromdisk=True)
4401 self._weakref_list[index] = weakref.ref(maybe)
4402 return maybe
4403
4405 self._weakref_list[self._max_count] = weakref.ref(record)
4406 self._max_count += 1
4407
4409 for key in list(self._weakref_list.keys()):
4410 del self._weakref_list[key]
4411 self._max_count = 0
4412
4414 for maybe in self._weakref_list.values():
4415 maybe = maybe()
4416 if maybe and not maybe._write_to_disk:
4417 raise DbfError("some records have not been written to disk")
4418
4420 if not self._max_count:
4421 raise IndexError('no records exist')
4422 record = self[self._max_count-1]
4423 self._max_count -= 1
4424 return record
4425
4427 """
4428 constructs fieldblock for disk table
4429 """
4430 fieldblock = array('B', b'')
4431 memo = False
4432 nulls = False
4433 meta = self._meta
4434 header = meta.header
4435 header.version = header.version & self._noMemoMask
4436 meta.fields = [f for f in meta.fields if f != '_nullflags']
4437 for field in meta.fields:
4438 layout = meta[field]
4439 if meta.fields.count(field) > 1:
4440 raise BadDataError("corrupted field structure (noticed in _build_header_fields)")
4441 fielddef = array('B', [0] * 32)
4442 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0]))
4443 fielddef[11] = layout[TYPE]
4444 fielddef[12:16] = array('B', pack_long_int(layout[START]))
4445 fielddef[16] = layout[LENGTH]
4446 fielddef[17] = layout[DECIMALS]
4447 fielddef[18] = layout[FLAGS]
4448 fieldblock.extend(fielddef)
4449 if layout[TYPE] in meta.memo_types:
4450 memo = True
4451 if layout[FLAGS] & NULLABLE:
4452 nulls = True
4453 if memo:
4454 header.version = header.version | self._yesMemoMask
4455 if meta.memo is None:
4456 meta.memo = self._memoClass(meta)
4457 else:
4458 if os.path.exists(meta.memoname):
4459 if meta.mfd is not None:
4460 meta.mfd.close()
4461
4462 os.remove(meta.memoname)
4463 meta.memo = None
4464 if nulls:
4465 start = layout[START] + layout[LENGTH]
4466 length, one_more = divmod(len(meta.fields), 8)
4467 if one_more:
4468 length += 1
4469 fielddef = array('B', [0] * 32)
4470 fielddef[:11] = array('B', pack_str(b'_nullflags'))
4471 fielddef[11] = 0x30
4472 fielddef[12:16] = array('B', pack_long_int(start))
4473 fielddef[16] = length
4474 fielddef[17] = 0
4475 fielddef[18] = BINARY | SYSTEM
4476 fieldblock.extend(fielddef)
4477 meta.fields.append('_nullflags')
4478 nullflags = (
4479 _NULLFLAG,
4480 start,
4481 length,
4482 start + length,
4483 0,
4484 BINARY | SYSTEM,
4485 none,
4486 none,
4487 )
4488 meta['_nullflags'] = nullflags
4489 header.fields = fieldblock.tobytes()
4490 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
4491 meta.user_field_count = len(meta.user_fields)
4492 Record._create_blank_data(meta)
4493
4495 """
4496 checks memo file for problems
4497 """
4498 raise NotImplementedError("_check_memo_integrity must be implemented by subclass")
4499
4501 """
4502 builds the FieldList of names, types, and descriptions from the disk file
4503 """
4504 raise NotImplementedError("_initialize_fields must be implemented by subclass")
4505
4507 """
4508 Returns field information Name Type(Length[, Decimals])
4509 """
4510 name = self._meta.fields[i]
4511 fielddef = self._meta[name]
4512 type = FieldType(fielddef[TYPE])
4513 length = fielddef[LENGTH]
4514 decimals = fielddef[DECIMALS]
4515 set_flags = fielddef[FLAGS]
4516 flags = []
4517 if type in (GENERAL, PICTURE):
4518 printable_flags = NULLABLE, SYSTEM
4519 else:
4520 printable_flags = BINARY, NULLABLE, SYSTEM
4521 for flg in printable_flags:
4522 if flg & set_flags == flg:
4523 flags.append(FieldFlag(flg))
4524 set_flags &= 255 ^ flg
4525 if flags:
4526 flags = ' ' + ' '.join(f.text for f in flags)
4527 else:
4528 flags = ''
4529 if type in self._fixed_types:
4530 description = "%s %s%s" % (name, type.symbol, flags)
4531 elif type in self._numeric_types:
4532 description = "%s %s(%d,%d)%s" % (name, type.symbol, length, decimals, flags)
4533 else:
4534 description = "%s %s(%d)%s" % (name, type.symbol, length, flags)
4535 return description
4536
4538 """
4539 standardizes field specs
4540 """
4541 if specs is None:
4542 specs = self.field_names
4543 elif isinstance(specs, basestring):
4544 specs = specs.strip(sep).split(sep)
4545 else:
4546 specs = list(specs)
4547 specs = [s.strip() for s in specs]
4548 return specs
4549
4556
4557 @staticmethod
4559 """
4560 Returns a group of three bytes, in integer form, of the date
4561 """
4562
4563 return bytes([date.year - 1900, date.month, date.day])
4564
4565 @staticmethod
4567 """
4568 Returns a Date() of the packed three-byte date passed in
4569 """
4570 year, month, day = struct.unpack('<BBB', bytestr)
4571 year += 1900
4572 return Date(year, month, day)
4573
4596
4598 """
4599 data can be a record, template, dict, or tuple
4600 """
4601 if not isinstance(data, (Record, RecordTemplate, dict, tuple)):
4602 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(data))
4603 for record in Iter(self):
4604 if data == record:
4605 return True
4606 return False
4607
4612
4616
4618 if name in (
4619 'binary_types',
4620 'character_types',
4621 'currency_types',
4622 'date_types',
4623 'datetime_types',
4624 'decimal_types',
4625 'fixed_types',
4626 'logical_types',
4627 'memo_types',
4628 'numeric_types',
4629 'variable_types',
4630 ):
4631 return getattr(self, '_'+name)
4632 if name in ('_table', ):
4633 if self._meta.location == ON_DISK:
4634 self._table = self._Table(len(self), self._meta)
4635 else:
4636 self._table = []
4637 return object.__getattribute__(self, name)
4638
4652
4653 - def __init__(self, filename, field_specs=None, memo_size=128, ignore_memos=False,
4654 codepage=None, default_data_types=None, field_data_types=None,
4655 dbf_type=None, on_disk=True,
4656 ):
4657 """
4658 open/create dbf file
4659 filename should include path if needed
4660 field_specs can be either a ;-delimited string or a list of strings
4661 memo_size is always 512 for db3 memos
4662 ignore_memos is useful if the memo file is missing or corrupt
4663 read_only will load records into memory, then close the disk file
4664 keep_memos will also load any memo fields into memory
4665 meta_only will ignore all records, keeping only basic table information
4666 codepage will override whatever is set in the table itself
4667 """
4668
4669 if not on_disk:
4670 if field_specs is None:
4671 raise DbfError("field list must be specified for memory tables")
4672 self._indexen = self._Indexen()
4673 self._meta = meta = self._MetaData()
4674 meta.max_fields = self._max_fields
4675 meta.max_records = self._max_records
4676 meta.table = weakref.ref(self)
4677 meta.filename = filename
4678 meta.fields = []
4679 meta.user_fields = []
4680 meta.user_field_count = 0
4681 meta.fieldtypes = fieldtypes = self._field_types
4682 meta.fixed_types = self._fixed_types
4683 meta.variable_types = self._variable_types
4684 meta.character_types = self._character_types
4685 meta.currency_types = self._currency_types
4686 meta.decimal_types = self._decimal_types
4687 meta.numeric_types = self._numeric_types
4688 meta.memo_types = self._memo_types
4689 meta.ignorememos = meta.original_ignorememos = ignore_memos
4690 meta.memo_size = memo_size
4691 meta.input_decoder = codecs.getdecoder(input_decoding)
4692 meta.output_encoder = codecs.getencoder(input_decoding)
4693 meta.header = header = self._TableHeader(self._dbfTableHeader, self._pack_date, self._unpack_date)
4694 header.extra = self._dbfTableHeaderExtra
4695 if default_data_types is None:
4696 default_data_types = dict()
4697 elif default_data_types == 'enhanced':
4698 default_data_types = {
4699 'C' : dbf.Char,
4700 'L' : dbf.Logical,
4701 'D' : dbf.Date,
4702 'T' : dbf.DateTime,
4703 }
4704 self._meta._default_data_types = default_data_types
4705 if field_data_types is None:
4706 field_data_types = dict()
4707 self._meta._field_data_types = field_data_types
4708 for field, types in default_data_types.items():
4709 field = FieldType(field)
4710 if not isinstance(types, tuple):
4711 types = (types, )
4712 for result_name, result_type in ezip(('Class', 'Empty', 'Null'), types):
4713 fieldtypes[field][result_name] = result_type
4714 if not on_disk:
4715 self._table = []
4716 meta.location = IN_MEMORY
4717 meta.memoname = filename
4718 meta.header.data
4719 else:
4720 base, ext = os.path.splitext(filename)
4721 if ext.lower() != '.dbf':
4722 meta.filename = filename + '.dbf'
4723 searchname = filename + '.[Db][Bb][Ff]'
4724 else:
4725 meta.filename = filename
4726 searchname = filename
4727 matches = glob(searchname)
4728 if len(matches) == 1:
4729 meta.filename = matches[0]
4730 elif matches:
4731 raise DbfError("please specify exactly which of %r you want" % (matches, ))
4732 case = [('l','u')[c.isupper()] for c in meta.filename[-4:]]
4733 if case == ['l','l','l','l']:
4734 meta.memoname = base + self._memoext.lower()
4735 elif case == ['l','u','u','u']:
4736 meta.memoname = base + self._memoext.upper()
4737 else:
4738 meta.memoname = base + ''.join([c.lower() if case[i] == 'l' else c.upper() for i, c in enumerate(self._memoext)])
4739 meta.location = ON_DISK
4740 if codepage is not None:
4741 header.codepage(codepage)
4742 cp, sd, ld = _codepage_lookup(codepage)
4743 self._meta.decoder = codecs.getdecoder(sd)
4744 self._meta.encoder = codecs.getencoder(sd)
4745 if field_specs:
4746 if meta.location == ON_DISK:
4747 meta.dfd = open(meta.filename, 'w+b')
4748 meta.newmemofile = True
4749 if codepage is None:
4750 header.codepage(default_codepage)
4751 cp, sd, ld = _codepage_lookup(header.codepage())
4752 meta.decoder = codecs.getdecoder(sd)
4753 meta.encoder = codecs.getencoder(sd)
4754 meta.status = READ_WRITE
4755 self.add_fields(field_specs)
4756 else:
4757 try:
4758 dfd = meta.dfd = open(meta.filename, 'r+b')
4759 except IOError:
4760 e= sys.exc_info()[1]
4761 raise DbfError(str(e)) from None
4762 dfd.seek(0)
4763 meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date)
4764 if not header.version in self._supported_tables:
4765 dfd.close()
4766 dfd = None
4767 raise DbfError(
4768 "%s does not support %s [%x]" %
4769 (self._version,
4770 version_map.get(header.version, 'Unknown: %s' % header.version),
4771 header.version))
4772 if codepage is None:
4773 cp, sd, ld = _codepage_lookup(header.codepage())
4774 self._meta.decoder = codecs.getdecoder(sd)
4775 self._meta.encoder = codecs.getencoder(sd)
4776 fieldblock = dfd.read(header.start - 32)
4777 for i in range(len(fieldblock) // 32 + 1):
4778 fieldend = i * 32
4779 if fieldblock[fieldend] == CR:
4780 break
4781 else:
4782 raise BadDataError("corrupt field structure in header")
4783 if len(fieldblock[:fieldend]) % 32 != 0:
4784 raise BadDataError("corrupt field structure in header")
4785 old_length = header.data[10:12]
4786 header.fields = fieldblock[:fieldend]
4787 header.data = header.data[:10] + old_length + header.data[12:]
4788 header.extra = fieldblock[fieldend + 1:]
4789 self._initialize_fields()
4790 self._check_memo_integrity()
4791 dfd.seek(0)
4792
4793 for field in meta.fields:
4794 field_type = meta[field][TYPE]
4795 default_field_type = (
4796 fieldtypes[field_type]['Class'],
4797 fieldtypes[field_type]['Empty'],
4798 )
4799 specific_field_type = field_data_types.get(field)
4800 if specific_field_type is not None and not isinstance(specific_field_type, tuple):
4801 specific_field_type = (specific_field_type, )
4802 classes = []
4803 for result_name, result_type in ezip(
4804 ('class', 'empty'),
4805 specific_field_type or default_field_type,
4806 ):
4807 classes.append(result_type)
4808 meta[field] = meta[field][:-2] + tuple(classes)
4809 meta.status = READ_ONLY
4810 self.close()
4811
4813 """
4814 iterates over the table's records
4815 """
4816 return Iter(self)
4817
4819 """
4820 returns number of records in table
4821 """
4822 return self._meta.header.record_count
4823
4824 - def __new__(cls, filename, field_specs=None, memo_size=128, ignore_memos=False,
4825 codepage=None, default_data_types=None, field_data_types=None,
4826 dbf_type=None, on_disk=True,
4827 ):
4828 if dbf_type is None and isinstance(filename, Table):
4829 return filename
4830 if field_specs and dbf_type is None:
4831 dbf_type = default_type
4832 if dbf_type is not None:
4833 dbf_type = dbf_type.lower()
4834 table = table_types.get(dbf_type)
4835 if table is None:
4836 raise DbfError("Unknown table type: %s" % dbf_type)
4837 return object.__new__(table)
4838 else:
4839 base, ext = os.path.splitext(filename)
4840 if ext.lower() != '.dbf':
4841 filename = filename + '.dbf'
4842 possibles = guess_table_type(filename)
4843 if len(possibles) == 1:
4844 return object.__new__(possibles[0][2])
4845 else:
4846 for type, desc, cls in possibles:
4847 if type == default_type:
4848 return object.__new__(cls)
4849 else:
4850 types = ', '.join(["%s" % item[1] for item in possibles])
4851 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']'
4852 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
4853
4855 """
4856 True if table has any records
4857 """
4858 return self._meta.header.record_count != 0
4859
4862
4885
4886 @property
4887 - def codepage(self):
4888 """
4889 code page used for text translation
4890 """
4891 return CodePage(code_pages[self._meta.header.codepage()][0])
4892
4893 @codepage.setter
4894 - def codepage(self, codepage):
4895 if not isinstance(codepage, CodePage):
4896 raise TypeError("codepage should be a CodePage, not a %r" % type(codepage))
4897 meta = self._meta
4898 if meta.status != READ_WRITE:
4899 raise DbfError('%s not in read/write mode, unable to change codepage' % meta.filename)
4900 meta.header.codepage(codepage.code)
4901 meta.decoder = codecs.getdecoder(codepage.name)
4902 meta.encoder = codecs.getencoder(codepage.name)
4903 self._update_disk(headeronly=True)
4904
4905 @property
4911
4912 @property
4914 """
4915 a list of the user fields in the table
4916 """
4917 return self._meta.user_fields[:]
4918
4919 @property
4921 """
4922 table's file name, including path (if specified on open)
4923 """
4924 return self._meta.filename
4925
4926 @property
4928 """
4929 date of last update
4930 """
4931 return self._meta.header.update
4932
4933 @property
4935 """
4936 table's memo name (if path included in filename on open)
4937 """
4938 return self._meta.memoname
4939
4940 @property
4942 """
4943 number of bytes in a record (including deleted flag and null field size
4944 """
4945 return self._meta.header.record_length
4946
4947 @property
4953
4954 @property
4956 """
4957 CLOSED, READ_ONLY, or READ_WRITE
4958 """
4959 return self._meta.status
4960
4961 @property
4963 """
4964 returns the dbf type of the table
4965 """
4966 return self._version
4967
4969 """
4970 adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]]
4971 backup table is created with _backup appended to name
4972 then zaps table, recreates current structure, and copies records back from the backup
4973 """
4974 meta = self._meta
4975 if meta.status != READ_WRITE:
4976 raise DbfError('%s not in read/write mode, unable to add fields (%s)' % (meta.filename, meta.status))
4977 header = meta.header
4978 fields = self.structure() + self._list_fields(field_specs, sep=';')
4979 if (len(fields) + ('_nullflags' in meta)) > meta.max_fields:
4980 raise DbfError(
4981 "Adding %d more field%s would exceed the limit of %d"
4982 % (len(fields), ('','s')[len(fields)==1], meta.max_fields)
4983 )
4984 old_table = None
4985 if self:
4986 old_table = self.create_backup()
4987 self.zap()
4988 if meta.mfd is not None and not meta.ignorememos:
4989 meta.mfd.close()
4990 meta.mfd = None
4991 meta.memo = None
4992 if not meta.ignorememos:
4993 meta.newmemofile = True
4994 offset = 1
4995 for name in meta.fields:
4996 del meta[name]
4997 meta.fields[:] = []
4998
4999 meta.blankrecord = None
5000 for field in fields:
5001 field = field.lower()
5002 pieces = field.split()
5003 name = pieces.pop(0)
5004 if '(' in pieces[0]:
5005 loc = pieces[0].index('(')
5006 pieces.insert(0, pieces[0][:loc])
5007 pieces[1] = pieces[1][loc:]
5008 format = FieldType(pieces.pop(0))
5009 if pieces and '(' in pieces[0]:
5010 for i, p in enumerate(pieces):
5011 if ')' in p:
5012 pieces[0:i+1] = [''.join(pieces[0:i+1])]
5013 break
5014 if name[0] == '_' or name[0].isdigit() or not name.replace('_', '').isalnum():
5015 raise FieldSpecError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name)
5016
5017 if name in meta.fields:
5018 raise DbfError("Field '%s' already exists" % name)
5019 field_type = format
5020 if len(name) > 10:
5021 raise FieldSpecError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name)))
5022 if not field_type in meta.fieldtypes.keys():
5023 raise FieldSpecError("Unknown field type: %s" % field_type)
5024 init = self._meta.fieldtypes[field_type]['Init']
5025 flags = self._meta.fieldtypes[field_type]['flags']
5026 try:
5027 length, decimals, flags = init(pieces, flags)
5028 except FieldSpecError:
5029 exc = sys.exc_info()[1]
5030 raise FieldSpecError(exc.message + ' (%s:%s)' % (meta.filename, name)) from None
5031 start = offset
5032 end = offset + length
5033 offset = end
5034 meta.fields.append(name)
5035 cls = meta.fieldtypes[field_type]['Class']
5036 empty = meta.fieldtypes[field_type]['Empty']
5037 meta[name] = (
5038 field_type,
5039 start,
5040 length,
5041 end,
5042 decimals,
5043 flags,
5044 cls,
5045 empty,
5046 )
5047 self._build_header_fields()
5048 self._update_disk()
5049 if old_table is not None:
5050 old_table.open()
5051 for record in old_table:
5052 self.append(scatter(record))
5053 old_table.close()
5054
5096
5097 - def append(self, data=b'', drop=False, multiple=1):
5098 """
5099 adds <multiple> blank records, and fills fields with dict/tuple values if present
5100 """
5101 meta = self._meta
5102 if meta.status != READ_WRITE:
5103 raise DbfError('%s not in read/write mode, unable to append records' % meta.filename)
5104 if not self.field_count:
5105 raise DbfError("No fields defined, cannot append")
5106 empty_table = len(self) == 0
5107 dictdata = False
5108 tupledata = False
5109 header = meta.header
5110 kamikaze = b''
5111 if header.record_count == meta.max_records:
5112 raise DbfError("table %r is full; unable to add any more records" % self)
5113 if isinstance(data, (Record, RecordTemplate)):
5114 if data._meta.record_sig[0] == self._meta.record_sig[0]:
5115 kamikaze = data._data
5116 else:
5117 if isinstance(data, dict):
5118 dictdata = data
5119 data = b''
5120 elif isinstance(data, tuple):
5121 if len(data) > self.field_count:
5122 raise DbfError("incoming data has too many values")
5123 tupledata = data
5124 data = b''
5125 elif data:
5126 raise TypeError("data to append must be a tuple, dict, record, or template; not a %r" % type(data))
5127 newrecord = Record(recnum=header.record_count, layout=meta, kamikaze=kamikaze)
5128 if kamikaze and meta.memofields:
5129 newrecord._start_flux()
5130 for field in meta.memofields:
5131 newrecord[field] = data[field]
5132 newrecord._commit_flux()
5133
5134 self._table.append(newrecord)
5135 header.record_count += 1
5136 if not kamikaze:
5137 try:
5138 if dictdata:
5139 gather(newrecord, dictdata, drop=drop)
5140 elif tupledata:
5141 newrecord._start_flux()
5142 for index, item in enumerate(tupledata):
5143 newrecord[index] = item
5144 newrecord._commit_flux()
5145 elif data:
5146 newrecord._start_flux()
5147 data_fields = field_names(data)
5148 my_fields = self.field_names
5149 for field in data_fields:
5150 if field not in my_fields:
5151 if not drop:
5152 raise DbfError("field %r not in table %r" % (field, self))
5153 else:
5154 newrecord[field] = data[field]
5155 newrecord._commit_flux()
5156 except Exception:
5157 self._table.pop()
5158 header.record_count = header.record_count - 1
5159 self._update_disk()
5160 raise
5161 multiple -= 1
5162 if multiple:
5163 data = newrecord._data
5164 single = header.record_count
5165 total = single + multiple
5166 while single < total:
5167 multi_record = Record(single, meta, kamikaze=data)
5168 multi_record._start_flux()
5169 self._table.append(multi_record)
5170 for field in meta.memofields:
5171 multi_record[field] = newrecord[field]
5172 single += 1
5173 multi_record._commit_flux()
5174 header.record_count = total
5175 newrecord = multi_record
5176 self._update_disk(headeronly=True)
5177
5192
5194 """
5195 creates a backup table
5196 """
5197 meta = self._meta
5198 already_open = meta.status != CLOSED
5199 if not already_open:
5200 self.open()
5201 if on_disk is None:
5202 on_disk = meta.location
5203 if not on_disk and new_name is None:
5204 new_name = self.filename + '_backup'
5205 if new_name is None:
5206 upper = self.filename.isupper()
5207 directory, filename = os.path.split(self.filename)
5208 name, ext = os.path.splitext(filename)
5209 extra = ('_backup', '_BACKUP')[upper]
5210 new_name = os.path.join(temp_dir or directory, name + extra + ext)
5211 bkup = Table(new_name, self.structure(), codepage=self.codepage.name, dbf_type=self._versionabbr, on_disk=on_disk)
5212
5213 bkup._meta.encoder = self._meta.encoder
5214 bkup._meta.decoder = self._meta.decoder
5215 bkup.open()
5216 for record in self:
5217 bkup.append(record)
5218 bkup.close()
5219 self.backup = new_name
5220 if not already_open:
5221 self.close()
5222 return bkup
5223
5225 """
5226 creates an in-memory index using the function key
5227 """
5228 meta = self._meta
5229 if meta.status == CLOSED:
5230 raise DbfError('%s is closed' % meta.filename)
5231 return Index(self, key)
5232
5234 """
5235 returns a record template that can be used like a record
5236 """
5237 return RecordTemplate(self._meta, original_record=record, defaults=defaults)
5238
5240 """
5241 removes field(s) from the table
5242 creates backup files with _backup appended to the file name,
5243 then modifies current structure
5244 """
5245 meta = self._meta
5246 if meta.status != READ_WRITE:
5247 raise DbfError('%s not in read/write mode, unable to delete fields' % meta.filename)
5248 doomed = self._list_fields(doomed)
5249 header = meta.header
5250 for victim in doomed:
5251 if victim not in meta.user_fields:
5252 raise DbfError("field %s not in table -- delete aborted" % victim)
5253 old_table = None
5254 if self:
5255 old_table = self.create_backup()
5256 self.zap()
5257 if meta.mfd is not None and not meta.ignorememos:
5258 meta.mfd.close()
5259 meta.mfd = None
5260 meta.memo = None
5261 if not meta.ignorememos:
5262 meta.newmemofile = True
5263 if '_nullflags' in meta.fields:
5264 doomed.append('_nullflags')
5265 for victim in doomed:
5266 layout = meta[victim]
5267 meta.fields.pop(meta.fields.index(victim))
5268 start = layout[START]
5269 end = layout[END]
5270 for field in meta.fields:
5271 if meta[field][START] == end:
5272 specs = list(meta[field])
5273 end = specs[END]
5274 specs[START] = start
5275 specs[END] = start + specs[LENGTH]
5276 start = specs[END]
5277 meta[field] = tuple(specs)
5278 self._build_header_fields()
5279 self._update_disk()
5280 for name in list(meta):
5281 if name not in meta.fields:
5282 del meta[name]
5283 if old_table is not None:
5284 old_table.open()
5285 for record in old_table:
5286 self.append(scatter(record), drop=True)
5287 old_table.close()
5288
5322
5331
5332 - def index(self, record, start=None, stop=None):
5333 """
5334 returns the index of record between start and stop
5335 start and stop default to the first and last record
5336 """
5337 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
5338 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
5339 meta = self._meta
5340 if meta.status == CLOSED:
5341 raise DbfError('%s is closed' % meta.filename)
5342 if start is None:
5343 start = 0
5344 if stop is None:
5345 stop = len(self)
5346 for i in range(start, stop):
5347 if record == (self[i]):
5348 return i
5349 else:
5350 raise NotFoundError("dbf.Table.index(x): x not in table", data=record)
5351
5352 - def new(self, filename, field_specs=None, memo_size=None, ignore_memos=None, codepage=None, default_data_types=None, field_data_types=None, on_disk=True):
5353 """
5354 returns a new table of the same type
5355 """
5356 if field_specs is None:
5357 field_specs = self.structure()
5358 if on_disk:
5359 path, name = os.path.split(filename)
5360 if path == "":
5361 filename = os.path.join(os.path.split(self.filename)[0], filename)
5362 elif name == "":
5363 filename = os.path.join(path, os.path.split(self.filename)[1])
5364 if memo_size is None:
5365 memo_size = self._meta.memo_size
5366 if ignore_memos is None:
5367 ignore_memos = self._meta.ignorememos
5368 if codepage is None:
5369 codepage = self._meta.header.codepage()
5370 if default_data_types is None:
5371 default_data_types = self._meta._default_data_types
5372 if field_data_types is None:
5373 field_data_types = self._meta._field_data_types
5374 return Table(filename, field_specs, memo_size, ignore_memos, codepage, default_data_types, field_data_types, dbf_type=self._versionabbr, on_disk=on_disk)
5375
5377 """
5378 returns True if field allows Nulls
5379 """
5380 if field not in self.field_names:
5381 raise MissingField(field)
5382 return bool(self._meta[field][FLAGS] & NULLABLE)
5383
5422
5452
5453 - def query(self, criteria):
5454 """
5455 criteria is a string that will be converted into a function that returns
5456 a List of all matching records
5457 """
5458 meta = self._meta
5459 if meta.status == CLOSED:
5460 raise DbfError('%s is closed' % meta.filename)
5461 return pql(self, criteria)
5462
5464 """
5465 reprocess all indices for this table
5466 """
5467 meta = self._meta
5468 if meta.status == CLOSED:
5469 raise DbfError('%s is closed' % meta.filename)
5470 for dbfindex in self._indexen:
5471 dbfindex._reindex()
5472
5474 """
5475 renames an existing field
5476 """
5477 meta = self._meta
5478 if meta.status != READ_WRITE:
5479 raise DbfError('%s not in read/write mode, unable to change field names' % meta.filename)
5480 if self:
5481 self.create_backup()
5482 if not oldname in self._meta.user_fields:
5483 raise FieldMissingError("field --%s-- does not exist -- cannot rename it." % oldname)
5484 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_', '').isalnum():
5485 raise FieldSpecError("field names cannot start with _ or digits, and can only contain the _, letters, and digits")
5486 newname = newname.lower()
5487 if newname in self._meta.fields:
5488 raise DbfError("field --%s-- already exists" % newname)
5489 if len(newname) > 10:
5490 raise FieldSpecError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname)))
5491 self._meta[newname] = self._meta[oldname]
5492 self._meta.fields[self._meta.fields.index(oldname)] = newname
5493 self._build_header_fields()
5494 self._update_disk(headeronly=True)
5495
5497 """
5498 resizes field (C only at this time)
5499 creates backup file, then modifies current structure
5500 """
5501 meta = self._meta
5502 if meta.status != READ_WRITE:
5503 raise DbfError('%s not in read/write mode, unable to change field size' % meta.filename)
5504 if not 0 < new_size < 256:
5505 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)")
5506 chosen = self._list_fields(chosen)
5507 for candidate in chosen:
5508 if candidate not in self._meta.user_fields:
5509 raise DbfError("field %s not in table -- resize aborted" % candidate)
5510 elif self.field_info(candidate).field_type != FieldType.CHAR:
5511 raise DbfError("field %s is not Character -- resize aborted" % candidate)
5512 if self:
5513 old_table = self.create_backup()
5514 self.zap()
5515 if meta.mfd is not None and not meta.ignorememos:
5516 meta.mfd.close()
5517 meta.mfd = None
5518 meta.memo = None
5519 if not meta.ignorememos:
5520 meta.newmemofile = True
5521 struct = self.structure()
5522 meta.user_fields[:] = []
5523 new_struct = []
5524 for field_spec in struct:
5525 name, spec = field_spec.split(' ', 1)
5526 if name in chosen:
5527 spec = "C(%d)" % new_size
5528 new_struct.append(' '.join([name, spec]))
5529 self.add_fields(';'.join(new_struct))
5530 if old_table is not None:
5531 old_table.open()
5532 for record in old_table:
5533 self.append(scatter(record), drop=True)
5534 old_table.close()
5535
5537 """
5538 return field specification list suitable for creating same table layout
5539 fields should be a list of fields or None for all fields in table
5540 """
5541 field_specs = []
5542 fields = self._list_fields(fields)
5543 try:
5544 for name in fields:
5545 field_specs.append(self._field_layout(self.field_names.index(name)))
5546 except ValueError:
5547 raise DbfError("field %s does not exist" % name) from None
5548 return field_specs
5549
5566
5569 """
5570 Provides an interface for working with dBase III tables.
5571 """
5572
5573 _version = 'dBase III Plus'
5574 _versionabbr = 'db3'
5575
5576 @MutableDefault
5578 return {
5579 CHAR: {
5580 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_character,
5581 'Class':str, 'Empty':str, 'flags':tuple(),
5582 },
5583 DATE: {
5584 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date,
5585 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
5586 },
5587 NUMERIC: {
5588 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric,
5589 'Class':'default', 'Empty':none, 'flags':tuple(),
5590 },
5591 LOGICAL: {
5592 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical,
5593 'Class':bool, 'Empty':none, 'flags':tuple(),
5594 },
5595 MEMO: {
5596 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo,
5597 'Class':str, 'Empty':str, 'flags':tuple(),
5598 },
5599 FLOAT: {
5600 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric,
5601 'Class':'default', 'Empty':none, 'flags':tuple(),
5602 } }
5603
5604 _memoext = '.dbt'
5605 _memoClass = _Db3Memo
5606 _yesMemoMask = 0x80
5607 _noMemoMask = 0x7f
5608 _binary_types = ()
5609 _character_types = (CHAR, MEMO)
5610 _currency_types = tuple()
5611 _date_types = (DATE, )
5612 _datetime_types = tuple()
5613 _decimal_types = (NUMERIC, FLOAT)
5614 _fixed_types = (DATE, LOGICAL, MEMO)
5615 _logical_types = (LOGICAL, )
5616 _memo_types = (MEMO, )
5617 _numeric_types = (NUMERIC, FLOAT)
5618 _variable_types = (CHAR, NUMERIC, FLOAT)
5619 _dbfTableHeader = array('B', [0] * 32)
5620 _dbfTableHeader[0] = 3
5621 _dbfTableHeader[8:10] = array('B', pack_short_int(33))
5622 _dbfTableHeader[10] = 1
5623 _dbfTableHeader[29] = 3
5624 _dbfTableHeader = _dbfTableHeader.tobytes()
5625 _dbfTableHeaderExtra = b''
5626 _supported_tables = (0x03, 0x83)
5627
5629 """
5630 dBase III and Clipper
5631 """
5632 if not self._meta.ignorememos:
5633 memo_fields = False
5634 for field in self._meta.fields:
5635 if self._meta[field][TYPE] in self._memo_types:
5636 memo_fields = True
5637 break
5638 if memo_fields and self._meta.header.version != 0x83:
5639 self._meta.dfd.close()
5640 self._meta.dfd = None
5641 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos")
5642 elif memo_fields and not os.path.exists(self._meta.memoname):
5643 self._meta.dfd.close()
5644 self._meta.dfd = None
5645 raise BadDataError("Table structure corrupt: memo fields exist without memo file")
5646 if memo_fields:
5647 try:
5648 self._meta.memo = self._memoClass(self._meta)
5649 except Exception:
5650 exc = sys.exc_info()[1]
5651 self._meta.dfd.close()
5652 self._meta.dfd = None
5653 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1]) from None
5654
5656 """
5657 builds the FieldList of names, types, and descriptions
5658 """
5659 old_fields = defaultdict(dict)
5660 meta = self._meta
5661 for name in meta.fields:
5662 old_fields[name]['type'] = meta[name][TYPE]
5663 old_fields[name]['empty'] = meta[name][EMPTY]
5664 old_fields[name]['class'] = meta[name][CLASS]
5665 meta.fields[:] = []
5666 offset = 1
5667 fieldsdef = meta.header.fields
5668 if len(fieldsdef) % 32 != 0:
5669 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
5670 if len(fieldsdef) // 32 != meta.header.field_count:
5671 raise BadDataError("Header shows %d fields, but field definition block has %d fields" % (meta.header.field_count, len(fieldsdef) // 32))
5672 total_length = meta.header.record_length
5673 for i in range(meta.header.field_count):
5674 fieldblock = fieldsdef[i*32:(i+1)*32]
5675 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0]
5676 type = fieldblock[11]
5677 if not type in meta.fieldtypes:
5678 raise BadDataError("Unknown field type: %s" % type)
5679 start = offset
5680 length = fieldblock[16]
5681 offset += length
5682 end = start + length
5683 decimals = fieldblock[17]
5684 flags = fieldblock[18]
5685 if name in meta.fields:
5686 raise BadDataError('Duplicate field name found: %s' % name)
5687 meta.fields.append(name)
5688 if name in old_fields and old_fields[name]['type'] == type:
5689 cls = old_fields[name]['class']
5690 empty = old_fields[name]['empty']
5691 else:
5692 cls = meta.fieldtypes[type]['Class']
5693 empty = meta.fieldtypes[type]['Empty']
5694 meta[name] = (
5695 type,
5696 start,
5697 length,
5698 end,
5699 decimals,
5700 flags,
5701 cls,
5702 empty,
5703 )
5704 if offset != total_length:
5705 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
5706 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5707 meta.user_field_count = len(meta.user_fields)
5708 Record._create_blank_data(meta)
5709
5712 """
5713 Provides an interface for working with Clipper tables.
5714 """
5715
5716 _version = 'Clipper 5'
5717 _versionabbr = 'clp'
5718
5719 @MutableDefault
5721 return {
5722 CHAR: {
5723 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_clp_character,
5724 'Class':str, 'Empty':str, 'flags':tuple(),
5725 },
5726 DATE: {
5727 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date,
5728 'Class':datetime.date, 'Empty':none, 'flags':tuple(),
5729 },
5730 NUMERIC: {
5731 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric,
5732 'Class':'default', 'Empty':none, 'flags':tuple(),
5733 },
5734 LOGICAL: {
5735 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical,
5736 'Class':bool, 'Empty':none, 'flags':tuple(),
5737 },
5738 MEMO: {
5739 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo,
5740 'Class':str, 'Empty':str, 'flags':tuple(),
5741 },
5742 FLOAT: {
5743 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric,
5744 'Class':'default', 'Empty':none, 'flags':tuple(),
5745 } }
5746
5747 _memoext = '.dbt'
5748 _memoClass = _Db3Memo
5749 _yesMemoMask = 0x80
5750 _noMemoMask = 0x7f
5751 _binary_types = ()
5752 _character_types = (CHAR, MEMO)
5753 _currency_types = tuple()
5754 _date_types = (DATE, )
5755 _datetime_types = tuple()
5756 _decimal_types = (NUMERIC, FLOAT)
5757 _fixed_types = (DATE, LOGICAL, MEMO)
5758 _logical_types = (LOGICAL, )
5759 _memo_types = (MEMO, )
5760 _numeric_types = (NUMERIC, FLOAT)
5761 _variable_types = (CHAR, NUMERIC, FLOAT)
5762 _dbfTableHeader = array('B', [0] * 32)
5763 _dbfTableHeader[0] = 3
5764 _dbfTableHeader[8:10] = array('B', pack_short_int(33))
5765 _dbfTableHeader[10] = 1
5766 _dbfTableHeader[29] = 3
5767 _dbfTableHeader = _dbfTableHeader.tobytes()
5768 _dbfTableHeaderExtra = b''
5769 _supported_tables = (0x03, 0x83)
5770
5772 """
5773 represents the data block that defines a tables type and layout
5774 """
5775
5776 @property
5778 "field block structure"
5779 fieldblock = self._data[32:]
5780 for i in range(len(fieldblock)//32+1):
5781 cr = i * 32
5782 if fieldblock[cr] == CR:
5783 break
5784 else:
5785 raise BadDataError("corrupt field structure")
5786 return fieldblock[:cr].tobytes()
5787
5788 @fields.setter
5790 fieldblock = self._data[32:]
5791 for i in range(len(fieldblock)//32+1):
5792 cr = i * 32
5793 if fieldblock[cr] == CR:
5794 break
5795 else:
5796 raise BadDataError("corrupt field structure")
5797 cr += 32
5798 fieldlen = len(block)
5799 if fieldlen % 32 != 0:
5800 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
5801 self._data[32:cr] = array('B', block)
5802 self._data[8:10] = array('B', pack_short_int(len(self._data)))
5803 fieldlen = fieldlen // 32
5804 recordlen = 1
5805 for i in range(fieldlen):
5806 recordlen += block[i*32+16]
5807 if block[i*32+11] == CHAR:
5808 recordlen += block[i*32+17] * 256
5809 self._data[10:12] = array('B', pack_short_int(recordlen))
5810
5811
5813 """
5814 constructs fieldblock for disk table
5815 """
5816 fieldblock = array('B', b'')
5817 memo = False
5818 nulls = False
5819 meta = self._meta
5820 header = meta.header
5821 header.version = header.version & self._noMemoMask
5822 meta.fields = [f for f in meta.fields if f != '_nullflags']
5823 total_length = 1
5824 for field in meta.fields:
5825 layout = meta[field]
5826 if meta.fields.count(field) > 1:
5827 raise BadDataError("corrupted field structure (noticed in _build_header_fields)")
5828 fielddef = array('B', [0] * 32)
5829 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0]))
5830 fielddef[11] = layout[TYPE]
5831 fielddef[12:16] = array('B', pack_long_int(layout[START]))
5832 total_length += layout[LENGTH]
5833 if layout[TYPE] == CHAR:
5834 fielddef[16] = layout[LENGTH] % 256
5835 fielddef[17] = layout[LENGTH] // 256
5836 else:
5837 fielddef[16] = layout[LENGTH]
5838 fielddef[17] = layout[DECIMALS]
5839 fielddef[18] = layout[FLAGS]
5840 fieldblock.extend(fielddef)
5841 if layout[TYPE] in meta.memo_types:
5842 memo = True
5843 if layout[FLAGS] & NULLABLE:
5844 nulls = True
5845 if memo:
5846 header.version = header.version | self._yesMemoMask
5847 if meta.memo is None:
5848 meta.memo = self._memoClass(meta)
5849 else:
5850 if os.path.exists(meta.memoname):
5851 if meta.mfd is not None:
5852 meta.mfd.close()
5853
5854 os.remove(meta.memoname)
5855 meta.memo = None
5856 if nulls:
5857 start = layout[START] + layout[LENGTH]
5858 length, one_more = divmod(len(meta.fields), 8)
5859 if one_more:
5860 length += 1
5861 fielddef = array('B', [0] * 32)
5862 fielddef[:11] = array('B', pack_str(b'_nullflags'))
5863 fielddef[11] = FieldType._NULLFLAG
5864 fielddef[12:16] = array('B', pack_long_int(start))
5865 fielddef[16] = length
5866 fielddef[17] = 0
5867 fielddef[18] = BINARY | SYSTEM
5868 fieldblock.extend(fielddef)
5869 meta.fields.append('_nullflags')
5870 nullflags = (
5871 _NULLFLAG,
5872 start,
5873 length,
5874 start + length,
5875 0,
5876 BINARY | SYSTEM,
5877 none,
5878 none,
5879 )
5880 meta['_nullflags'] = nullflags
5881 header.fields = fieldblock.tobytes()
5882 header.record_length = total_length
5883 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5884 meta.user_field_count = len(meta.user_fields)
5885 Record._create_blank_data(meta)
5886
5888 """
5889 builds the FieldList of names, types, and descriptions
5890 """
5891 meta = self._meta
5892 old_fields = defaultdict(dict)
5893 for name in meta.fields:
5894 old_fields[name]['type'] = meta[name][TYPE]
5895 old_fields[name]['empty'] = meta[name][EMPTY]
5896 old_fields[name]['class'] = meta[name][CLASS]
5897 meta.fields[:] = []
5898 offset = 1
5899 fieldsdef = meta.header.fields
5900 if len(fieldsdef) % 32 != 0:
5901 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
5902 if len(fieldsdef) // 32 != meta.header.field_count:
5903 raise BadDataError("Header shows %d fields, but field definition block has %d fields"
5904 (meta.header.field_count, len(fieldsdef) // 32))
5905 total_length = meta.header.record_length
5906 for i in range(meta.header.field_count):
5907 fieldblock = fieldsdef[i*32:(i+1)*32]
5908 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0]
5909 type = fieldblock[11]
5910 if not type in meta.fieldtypes:
5911 raise BadDataError("Unknown field type: %s" % type)
5912 start = offset
5913 length = fieldblock[16]
5914 decimals = fieldblock[17]
5915 if type == CHAR:
5916 length += decimals * 256
5917 offset += length
5918 end = start + length
5919 flags = fieldblock[18]
5920 if name in meta.fields:
5921 raise BadDataError('Duplicate field name found: %s' % name)
5922 meta.fields.append(name)
5923 if name in old_fields and old_fields[name]['type'] == type:
5924 cls = old_fields[name]['class']
5925 empty = old_fields[name]['empty']
5926 else:
5927 cls = meta.fieldtypes[type]['Class']
5928 empty = meta.fieldtypes[type]['Empty']
5929 meta[name] = (
5930 type,
5931 start,
5932 length,
5933 end,
5934 decimals,
5935 flags,
5936 cls,
5937 empty,
5938 )
5939 if offset != total_length:
5940 raise BadDataError("Header shows record length of %d, but calculated record length is %d"
5941 (total_length, offset))
5942 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
5943 meta.user_field_count = len(meta.user_fields)
5944 Record._create_blank_data(meta)
5945
5948 """
5949 Provides an interface for working with FoxPro 2 tables
5950 """
5951
5952 _version = 'Foxpro'
5953 _versionabbr = 'fp'
5954
5955 @MutableDefault
5957 return {
5958 CHAR: {
5959 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character,
5960 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ),
5961 },
5962 FLOAT: {
5963 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric,
5964 'Class':'default', 'Empty':none, 'flags':('null', ),
5965 },
5966 NUMERIC: {
5967 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric,
5968 'Class':'default', 'Empty':none, 'flags':('null', ),
5969 },
5970 LOGICAL: {
5971 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical,
5972 'Class':bool, 'Empty':none, 'flags':('null', ),
5973 },
5974 DATE: {
5975 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date,
5976 'Class':datetime.date, 'Empty':none, 'flags':('null', ),
5977 },
5978 MEMO: {
5979 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo,
5980 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ),
5981 },
5982 GENERAL: {
5983 'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo,
5984 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
5985 },
5986 PICTURE: {
5987 'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo,
5988 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
5989 },
5990 _NULLFLAG: {
5991 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':None,
5992 'Class':none, 'Empty':none, 'flags':('binary', 'system', ),
5993 } }
5994
5995 _memoext = '.fpt'
5996 _memoClass = _VfpMemo
5997 _yesMemoMask = 0xf5
5998 _noMemoMask = 0x03
5999 _binary_types = (GENERAL, MEMO, PICTURE)
6000
6001 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC)
6002 _currency_types = tuple()
6003 _date_types = (DATE, )
6004 _datetime_types = tuple()
6005
6006 _fixed_types = (DATE, GENERAL, LOGICAL, MEMO, PICTURE)
6007 _logical_types = (LOGICAL, )
6008 _memo_types = (GENERAL, MEMO, PICTURE)
6009 _numeric_types = (FLOAT, NUMERIC)
6010 _text_types = (CHAR, MEMO)
6011 _variable_types = (CHAR, FLOAT, NUMERIC)
6012 _supported_tables = (0x03, 0xf5)
6013 _dbfTableHeader = array('B', [0] * 32)
6014 _dbfTableHeader[0] = 0x30
6015 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263))
6016 _dbfTableHeader[10] = 1
6017 _dbfTableHeader[29] = 3
6018 _dbfTableHeader = _dbfTableHeader.tobytes()
6019 _dbfTableHeaderExtra = b'\x00' * 263
6020
6044
6046 """
6047 builds the FieldList of names, types, and descriptions
6048 """
6049 meta = self._meta
6050 old_fields = defaultdict(dict)
6051 for name in meta.fields:
6052 old_fields[name]['type'] = meta[name][TYPE]
6053 old_fields[name]['class'] = meta[name][CLASS]
6054 old_fields[name]['empty'] = meta[name][EMPTY]
6055 meta.fields[:] = []
6056 offset = 1
6057 fieldsdef = meta.header.fields
6058 if len(fieldsdef) % 32 != 0:
6059 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
6060 if len(fieldsdef) // 32 != meta.header.field_count:
6061 raise BadDataError("Header shows %d fields, but field definition block has %d fields"
6062 (meta.header.field_count, len(fieldsdef) // 32))
6063 total_length = meta.header.record_length
6064 for i in range(meta.header.field_count):
6065 fieldblock = fieldsdef[i*32:(i+1)*32]
6066 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0]
6067 type = fieldblock[11]
6068 if not type in meta.fieldtypes:
6069 raise BadDataError("Unknown field type: %s" % type)
6070 start = offset
6071 length = fieldblock[16]
6072 offset += length
6073 end = start + length
6074 decimals = fieldblock[17]
6075 flags = fieldblock[18]
6076 if name in meta.fields:
6077 raise BadDataError('Duplicate field name found: %s' % name)
6078 meta.fields.append(name)
6079 if name in old_fields and old_fields[name]['type'] == type:
6080 cls = old_fields[name]['class']
6081 empty = old_fields[name]['empty']
6082 else:
6083 cls = meta.fieldtypes[type]['Class']
6084 empty = meta.fieldtypes[type]['Empty']
6085 meta[name] = (
6086 type,
6087 start,
6088 length,
6089 end,
6090 decimals,
6091 flags,
6092 cls,
6093 empty,
6094 )
6095 if offset != total_length:
6096 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
6097 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
6098 meta.user_field_count = len(meta.user_fields)
6099 Record._create_blank_data(meta)
6100
6101 @staticmethod
6103 """
6104 Returns a group of three bytes, in integer form, of the date
6105 """
6106
6107 return bytes([date.year - 2000, date.month, date.day])
6108
6109 @staticmethod
6111 """
6112 Returns a Date() of the packed three-byte date passed in
6113 """
6114 year, month, day = struct.unpack('<BBB', bytestr)
6115 year += 2000
6116 return Date(year, month, day)
6117
6119 """
6120 Provides an interface for working with Visual FoxPro 6 tables
6121 """
6122
6123 _version = 'Visual Foxpro'
6124 _versionabbr = 'vfp'
6125
6126 @MutableDefault
6128 return {
6129 CHAR: {
6130 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character,
6131 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ),
6132 },
6133 CURRENCY: {
6134 'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_currency,
6135 'Class':Decimal, 'Empty':none, 'flags':('null', ),
6136 },
6137 DOUBLE: {
6138 'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_double,
6139 'Class':float, 'Empty':none, 'flags':('null', ),
6140 },
6141 FLOAT: {
6142 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric,
6143 'Class':'default', 'Empty':none, 'flags':('null', ),
6144 },
6145 NUMERIC: {
6146 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric,
6147 'Class':'default', 'Empty':none, 'flags':('null', ),
6148 },
6149 INTEGER: {
6150 'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':lambda x: b'\x00' * 4, 'Init':add_vfp_integer,
6151 'Class':int, 'Empty':none, 'flags':('null', ),
6152 },
6153 LOGICAL: {
6154 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical,
6155 'Class':bool, 'Empty':none, 'flags':('null', ),
6156 },
6157 DATE: {
6158 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date,
6159 'Class':datetime.date, 'Empty':none, 'flags':('null', ),
6160 },
6161 DATETIME: {
6162 'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_datetime,
6163 'Class':datetime.datetime, 'Empty':none, 'flags':('null', ),
6164 },
6165 MEMO: {
6166 'Type':'Memo', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_memo,
6167 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ),
6168 },
6169 GENERAL: {
6170 'Type':'General', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo,
6171 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6172 },
6173 PICTURE: {
6174 'Type':'Picture', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo,
6175 'Class':bytes, 'Empty':bytes, 'flags':('null', ),
6176 },
6177 _NULLFLAG: {
6178 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':int,
6179 'Class':none, 'Empty':none, 'flags':('binary', 'system',),
6180 } }
6181
6182 _memoext = '.fpt'
6183 _memoClass = _VfpMemo
6184 _yesMemoMask = 0x30
6185 _noMemoMask = 0x30
6186
6187 _binary_types = (DOUBLE, GENERAL, INTEGER, MEMO, PICTURE, DATETIME, CURRENCY)
6188
6189 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC)
6190 _currency_types = (CURRENCY, )
6191 _date_types = (DATE, DATETIME)
6192 _datetime_types = (DATETIME, )
6193
6194 _fixed_types = (DOUBLE, DATE, GENERAL, INTEGER, LOGICAL, MEMO, PICTURE, DATETIME, CURRENCY)
6195 _logical_types = (LOGICAL, )
6196 _memo_types = (GENERAL, MEMO, PICTURE)
6197
6198 _numeric_types = (DOUBLE, FLOAT, INTEGER, NUMERIC, CURRENCY)
6199 _variable_types = (CHAR, FLOAT, NUMERIC)
6200 _supported_tables = (0x30, 0x31)
6201 _dbfTableHeader = array('B', [0] * 32)
6202 _dbfTableHeader[0] = 0x30
6203 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263))
6204 _dbfTableHeader[10] = 1
6205 _dbfTableHeader[29] = 3
6206 _dbfTableHeader = _dbfTableHeader.tobytes()
6207 _dbfTableHeaderExtra = b'\x00' * 263
6208
6210 """
6211 builds the FieldList of names, types, and descriptions
6212 """
6213 meta = self._meta
6214 old_fields = defaultdict(dict)
6215 for name in meta.fields:
6216 old_fields[name]['type'] = meta[name][TYPE]
6217 old_fields[name]['class'] = meta[name][CLASS]
6218 old_fields[name]['empty'] = meta[name][EMPTY]
6219 meta.fields[:] = []
6220 offset = 1
6221 fieldsdef = meta.header.fields
6222 meta.nullflags = None
6223 total_length = meta.header.record_length
6224 for i in range(meta.header.field_count):
6225 fieldblock = fieldsdef[i*32:(i+1)*32]
6226 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0]
6227 type = fieldblock[11]
6228 if not type in meta.fieldtypes:
6229 raise BadDataError("Unknown field type: %s" % type)
6230 start = unpack_long_int(fieldblock[12:16])
6231 length = fieldblock[16]
6232 offset += length
6233 end = start + length
6234 decimals = fieldblock[17]
6235 flags = fieldblock[18]
6236 if name in meta.fields:
6237 raise BadDataError('Duplicate field name found: %s' % name)
6238 meta.fields.append(name)
6239 if name in old_fields and old_fields[name]['type'] == type:
6240 cls = old_fields[name]['class']
6241 empty = old_fields[name]['empty']
6242 else:
6243 cls = meta.fieldtypes[type]['Class']
6244 empty = meta.fieldtypes[type]['Empty']
6245 meta[name] = (
6246 type,
6247 start,
6248 length,
6249 end,
6250 decimals,
6251 flags,
6252 cls,
6253 empty,
6254 )
6255 if offset != total_length:
6256 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset))
6257 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM]
6258 meta.user_field_count = len(meta.user_fields)
6259 Record._create_blank_data(meta)
6260
6261
6262 -class List(_Navigation):
6263 """
6264 list of Dbf records, with set-like behavior
6265 """
6266
6267 _desc = ''
6268
6269 - def __init__(self, records=None, desc=None, key=None):
6270 self._list = []
6271 self._set = set()
6272 self._tables = dict()
6273 if key is not None:
6274 self.key = key
6275 if key.__doc__ is None:
6276 key.__doc__ = 'unknown'
6277 key = self.key
6278 self._current = -1
6279 if isinstance(records, self.__class__) and key is records.key:
6280 self._list = records._list[:]
6281 self._set = records._set.copy()
6282 self._current = 0
6283 elif records is not None:
6284 for record in records:
6285 value = key(record)
6286 item = (source_table(record), recno(record), value)
6287 if value not in self._set:
6288 self._set.add(value)
6289 self._list.append(item)
6290 self._current = 0
6291 if desc is not None:
6292 self._desc = desc
6293
6295 self._still_valid_check()
6296 key = self.key
6297 if isinstance(other, (Table, list)):
6298 other = self.__class__(other, key=key)
6299 if isinstance(other, self.__class__):
6300 other._still_valid_check()
6301 result = self.__class__()
6302 result._set = self._set.copy()
6303 result._list[:] = self._list[:]
6304 result._tables = {}
6305 result._tables.update(self._tables)
6306 result.key = self.key
6307 if key is other.key:
6308 for item in other._list:
6309 result._maybe_add(item)
6310 else:
6311 for rec in other:
6312 result._maybe_add((source_table(rec), recno(rec), key(rec)))
6313 return result
6314 return NotImplemented
6315
6317 self._still_valid_check()
6318 if not isinstance(data, (Record, RecordTemplate, tuple, dict)):
6319 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, ))
6320 try:
6321 item = self.key(data)
6322 if not isinstance(item, tuple):
6323 item = (item, )
6324 return item in self._set
6325 except Exception:
6326 for record in self:
6327 if record == data:
6328 return True
6329 return False
6330
6332 self._still_valid_check()
6333 if isinstance(key, baseinteger):
6334 item = self._list.pop[key]
6335 self._set.remove(item[2])
6336 elif isinstance(key, slice):
6337 self._set.difference_update([item[2] for item in self._list[key]])
6338 self._list.__delitem__(key)
6339 elif isinstance(key, (Record, RecordTemplate, dict, tuple)):
6340 index = self.index(key)
6341 item = self._list.pop[index]
6342 self._set.remove(item[2])
6343 else:
6344 raise TypeError('%r should be an int, slice, record, template, tuple, or dict -- not a %r' % (key, type(key)))
6345
6347 self._still_valid_check()
6348 if isinstance(key, baseinteger):
6349 count = len(self._list)
6350 if not -count <= key < count:
6351 raise NotFoundError("Record %d is not in list." % key)
6352 return self._get_record(*self._list[key])
6353 elif isinstance(key, slice):
6354 result = self.__class__()
6355 result._list[:] = self._list[key]
6356 result._set = set(result._list)
6357 result.key = self.key
6358 return result
6359 elif isinstance(key, (Record, RecordTemplate, dict, tuple)):
6360 index = self.index(key)
6361 return self._get_record(*self._list[index])
6362 else:
6363 raise TypeError('%r should be an int, slice, record, record template, tuple, or dict -- not a %r' % (key, type(key)))
6364
6368
6372
6376
6378 self._still_valid_check()
6379 key = self.key
6380 if isinstance(other, (Table, list)):
6381 other = self.__class__(other, key=key)
6382 if isinstance(other, self.__class__):
6383 other._still_valid_check()
6384 result = other.__class__()
6385 result._set = other._set.copy()
6386 result._list[:] = other._list[:]
6387 result._tables = {}
6388 result._tables.update(self._tables)
6389 result.key = other.key
6390 if key is other.key:
6391 for item in self._list:
6392 result._maybe_add(item)
6393 else:
6394 for rec in self:
6395 result._maybe_add((source_table(rec), recno(rec), key(rec)))
6396 return result
6397 return NotImplemented
6398
6400 self._still_valid_check()
6401 if self._desc:
6402 return "%s(key=(%s), desc=%s)" % (self.__class__, self.key.__doc__, self._desc)
6403 else:
6404 return "%s(key=(%s))" % (self.__class__, self.key.__doc__)
6405
6407 self._still_valid_check()
6408 key = self.key
6409 if isinstance(other, (Table, list)):
6410 other = self.__class__(other, key=key)
6411 if isinstance(other, self.__class__):
6412 other._still_valid_check()
6413 result = other.__class__()
6414 result._list[:] = other._list[:]
6415 result._set = other._set.copy()
6416 result._tables = {}
6417 result._tables.update(other._tables)
6418 result.key = key
6419 lost = set()
6420 if key is other.key:
6421 for item in self._list:
6422 if item[2] in result._list:
6423 result._set.remove(item[2])
6424 lost.add(item)
6425 else:
6426 for rec in self:
6427 value = key(rec)
6428 if value in result._set:
6429 result._set.remove(value)
6430 lost.add((source_table(rec), recno(rec), value))
6431 result._list = [item for item in result._list if item not in lost]
6432 lost = set(result._tables.keys())
6433 for table, _1, _2 in result._list:
6434 if table in result._tables:
6435 lost.remove(table)
6436 if not lost:
6437 break
6438 for table in lost:
6439 del result._tables[table]
6440 return result
6441 return NotImplemented
6442
6444 self._still_valid_check()
6445 key = self.key
6446 if isinstance(other, (Table, list)):
6447 other = self.__class__(other, key=key)
6448 if isinstance(other, self.__class__):
6449 other._still_valid_check()
6450 result = self.__class__()
6451 result._list[:] = self._list[:]
6452 result._set = self._set.copy()
6453 result._tables = {}
6454 result._tables.update(self._tables)
6455 result.key = key
6456 lost = set()
6457 if key is other.key:
6458 for item in other._list:
6459 if item[2] in result._set:
6460 result._set.remove(item[2])
6461 lost.add(item[2])
6462 else:
6463 for rec in other:
6464 value = key(rec)
6465 if value in result._set:
6466 result._set.remove(value)
6467 lost.add(value)
6468 result._list = [item for item in result._list if item[2] not in lost]
6469 lost = set(result._tables.keys())
6470 for table, _1, _2 in result._list:
6471 if table in result._tables:
6472 lost.remove(table)
6473 if not lost:
6474 break
6475 for table in lost:
6476 del result._tables[table]
6477 return result
6478 return NotImplemented
6479
6487
6488 - def _get_record(self, table=None, rec_no=None, value=None):
6489 if table is rec_no is None:
6490 table, rec_no, value = self._list[self._index]
6491 return table[rec_no]
6492
6493 - def _purge(self, record, old_record_number, offset):
6494 partial = source_table(record), old_record_number
6495 records = sorted(self._list, key=lambda item: (item[0], item[1]))
6496 for item in records:
6497 if partial == item[:2]:
6498 found = True
6499 break
6500 elif partial[0] is item[0] and partial[1] < item[1]:
6501 found = False
6502 break
6503 else:
6504 found = False
6505 if found:
6506 self._list.pop(self._list.index(item))
6507 self._set.remove(item[2])
6508 start = records.index(item) + found
6509 for item in records[start:]:
6510 if item[0] is not partial[0]:
6511 break
6512 i = self._list.index(item)
6513 self._set.remove(item[2])
6514 item = item[0], (item[1] - offset), item[2]
6515 self._list[i] = item
6516 self._set.add(item[2])
6517 return found
6518
6520 for table, last_pack in self._tables.items():
6521 if last_pack != getattr(table, '_pack_count'):
6522 raise DbfError("table has been packed; list is invalid")
6523
6524 _nav_check = _still_valid_check
6525
6529
6531 self._list = []
6532 self._set = set()
6533 self._index = -1
6534 self._tables.clear()
6535
6551
6552 - def index(self, record, start=None, stop=None):
6553 """
6554 returns the index of record between start and stop
6555 start and stop default to the first and last record
6556 """
6557 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
6558 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
6559 self._still_valid_check()
6560 if start is None:
6561 start = 0
6562 if stop is None:
6563 stop = len(self)
6564 for i in range(start, stop):
6565 if record == (self[i]):
6566 return i
6567 else:
6568 raise NotFoundError("dbf.List.index(x): x not in List", data=record)
6569
6570 - def insert(self, i, record):
6576
6577 - def key(self, record):
6583
6584 - def pop(self, index=None):
6592
6593 - def query(self, criteria):
6594 """
6595 criteria is a callback that returns a truthy value for matching record
6596 """
6597 return pql(self, criteria)
6598
6600 self._still_valid_check()
6601 if not isinstance(data, (Record, RecordTemplate, dict, tuple)):
6602 raise TypeError("%r(%r) is not a record, template, tuple, nor dict" % (type(data), data))
6603 index = self.index(data)
6604 record = self[index]
6605 item = source_table(record), recno(record), self.key(record)
6606 self._list.remove(item)
6607 self._set.remove(item[2])
6608
6612
6613 - def sort(self, key=None, reverse=False):
6618
6619
6620 -class Index(_Navigation):
6621 """
6622 non-persistent index for a table
6623 """
6624
6626 self._table = table
6627 self._values = []
6628 self._rec_by_val = []
6629 self._records = {}
6630 self.__doc__ = key.__doc__ or 'unknown'
6631 self._key = key
6632 self._previous_status = []
6633 for record in table:
6634 value = key(record)
6635 if value is DoNotIndex:
6636 continue
6637 rec_num = recno(record)
6638 if not isinstance(value, tuple):
6639 value = (value, )
6640 vindex = bisect_right(self._values, value)
6641 self._values.insert(vindex, value)
6642 self._rec_by_val.insert(vindex, rec_num)
6643 self._records[rec_num] = value
6644 table._indexen.add(self)
6645
6647 rec_num = recno(record)
6648 key = self.key(record)
6649 if rec_num in self._records:
6650 if self._records[rec_num] == key:
6651 return
6652 old_key = self._records[rec_num]
6653 vindex = bisect_left(self._values, old_key)
6654 self._values.pop(vindex)
6655 self._rec_by_val.pop(vindex)
6656 del self._records[rec_num]
6657 assert rec_num not in self._records
6658 if key == (DoNotIndex, ):
6659 return
6660 vindex = bisect_right(self._values, key)
6661 self._values.insert(vindex, key)
6662 self._rec_by_val.insert(vindex, rec_num)
6663 self._records[rec_num] = key
6664
6666 if not isinstance(data, (Record, RecordTemplate, tuple, dict)):
6667 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, ))
6668 try:
6669 value = self.key(data)
6670 return value in self._values
6671 except Exception:
6672 for record in self:
6673 if record == data:
6674 return True
6675 return False
6676
6678 '''if key is an integer, returns the matching record;
6679 if key is a [slice | string | tuple | record] returns a List;
6680 raises NotFoundError on failure'''
6681 if isinstance(key, baseinteger):
6682 count = len(self._values)
6683 if not -count <= key < count:
6684 raise NotFoundError("Record %d is not in list." % key)
6685 rec_num = self._rec_by_val[key]
6686 return self._table[rec_num]
6687 elif isinstance(key, slice):
6688 result = List()
6689 start, stop, step = key.start, key.stop, key.step
6690 if start is None: start = 0
6691 if stop is None: stop = len(self._rec_by_val)
6692 if step is None: step = 1
6693 if step < 0:
6694 start, stop = stop - 1, -(stop - start + 1)
6695 for loc in range(start, stop, step):
6696 record = self._table[self._rec_by_val[loc]]
6697 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6698 return result
6699 elif isinstance (key, (basestring, tuple, Record, RecordTemplate)):
6700 if isinstance(key, (Record, RecordTemplate)):
6701 key = self.key(key)
6702 elif isinstance(key, basestring):
6703 key = (key, )
6704 lo = self._search(key, where='left')
6705 hi = self._search(key, where='right')
6706 if lo == hi:
6707 raise NotFoundError(key)
6708 result = List(desc='match = %r' % (key, ))
6709 for loc in range(lo, hi):
6710 record = self._table[self._rec_by_val[loc]]
6711 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6712 return result
6713 else:
6714 raise TypeError('indices must be integers, match objects must by strings or tuples')
6715
6717 self._table.__enter__()
6718 return self
6719
6721 self._table.__exit__()
6722 return False
6723
6726
6728 return len(self._records)
6729
6731 """
6732 removes all entries from index
6733 """
6734 self._values[:] = []
6735 self._rec_by_val[:] = []
6736 self._records.clear()
6737
6738 - def _key(self, record):
6744
6751
6753 target = target[:len(match)]
6754 if isinstance(match[-1], basestring):
6755 target = list(target)
6756 target[-1] = target[-1][:len(match[-1])]
6757 target = tuple(target)
6758 return target == match
6759
6761 value = self._records.get(rec_num)
6762 if value is not None:
6763 vindex = bisect_left(self._values, value)
6764 del self._records[rec_num]
6765 self._values.pop(vindex)
6766 self._rec_by_val.pop(vindex)
6767
6769 """
6770 reindexes all records
6771 """
6772 for record in self._table:
6773 self(record)
6774
6775 - def _search(self, match, lo=0, hi=None, where=None):
6776 if hi is None:
6777 hi = len(self._values)
6778 if where == 'left':
6779 return bisect_left(self._values, match, lo, hi)
6780 elif where == 'right':
6781 return bisect_right(self._values, match, lo, hi)
6782
6783 - def index(self, record, start=None, stop=None):
6784 """
6785 returns the index of record between start and stop
6786 start and stop default to the first and last record
6787 """
6788 if not isinstance(record, (Record, RecordTemplate, dict, tuple)):
6789 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record))
6790 self._nav_check()
6791 if start is None:
6792 start = 0
6793 if stop is None:
6794 stop = len(self)
6795 for i in range(start, stop):
6796 if record == (self[i]):
6797 return i
6798 else:
6799 raise NotFoundError("dbf.Index.index(x): x not in Index", data=record)
6800
6801 - def index_search(self, match, start=None, stop=None, nearest=False, partial=False):
6802 """
6803 returns the index of match between start and stop
6804 start and stop default to the first and last record.
6805 if nearest is true returns the location of where the match should be
6806 otherwise raises NotFoundError
6807 """
6808 self._nav_check()
6809 if not isinstance(match, tuple):
6810 match = (match, )
6811 if start is None:
6812 start = 0
6813 if stop is None:
6814 stop = len(self)
6815 loc = self._search(match, start, stop, where='left')
6816 if loc == len(self._values):
6817 if nearest:
6818 return IndexLocation(loc, False)
6819 raise NotFoundError("dbf.Index.index_search(x): x not in index", data=match)
6820 if self._values[loc] == match \
6821 or partial and self._partial_match(self._values[loc], match):
6822 return IndexLocation(loc, True)
6823 elif nearest:
6824 return IndexLocation(loc, False)
6825 else:
6826 raise NotFoundError("dbf.Index.index_search(x): x not in Index", data=match)
6827
6828 - def key(self, record):
6829 result = self._key(record)
6830 if not isinstance(result, tuple):
6831 result = (result, )
6832 return result
6833
6834 - def query(self, criteria):
6835 """
6836 criteria is a callback that returns a truthy value for matching record
6837 """
6838 self._nav_check()
6839 return pql(self, criteria)
6840
6841 - def search(self, match, partial=False):
6842 """
6843 returns dbf.List of all (partially) matching records
6844 """
6845 self._nav_check()
6846 result = List()
6847 if not isinstance(match, tuple):
6848 match = (match, )
6849 loc = self._search(match, where='left')
6850 if loc == len(self._values):
6851 return result
6852 while loc < len(self._values) and self._values[loc] == match:
6853 record = self._table[self._rec_by_val[loc]]
6854 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6855 loc += 1
6856 if partial:
6857 while loc < len(self._values) and self._partial_match(self._values[loc], match):
6858 record = self._table[self._rec_by_val[loc]]
6859 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record)))
6860 loc += 1
6861 return result
6862
6865 """
6866 establishes a relation between two dbf tables (not persistent)
6867 """
6868
6869 relations = {}
6870
6871 - def __new__(cls, src, tgt, src_names=None, tgt_names=None):
6872 if (len(src) != 2 or len(tgt) != 2):
6873 raise DbfError("Relation should be called with ((src_table, src_field), (tgt_table, tgt_field))")
6874 if src_names and len(src_names) !=2 or tgt_names and len(tgt_names) != 2:
6875 raise DbfError('src_names and tgt_names, if specified, must be ("table","field")')
6876 src_table, src_field = src
6877 tgt_table, tgt_field = tgt
6878 try:
6879 if isinstance(src_field, baseinteger):
6880 table, field = src_table, src_field
6881 src_field = table.field_names[field]
6882 else:
6883 src_table.field_names.index(src_field)
6884 if isinstance(tgt_field, baseinteger):
6885 table, field = tgt_table, tgt_field
6886 tgt_field = table.field_names[field]
6887 else:
6888 tgt_table.field_names.index(tgt_field)
6889 except (IndexError, ValueError):
6890 raise DbfError('%r not in %r' % (field, table)) from None
6891 if src_names:
6892 src_table_name, src_field_name = src_names
6893 else:
6894 src_table_name, src_field_name = src_table.filename, src_field
6895 if src_table_name[-4:].lower() == '.dbf':
6896 src_table_name = src_table_name[:-4]
6897 if tgt_names:
6898 tgt_table_name, tgt_field_name = tgt_names
6899 else:
6900 tgt_table_name, tgt_field_name = tgt_table.filename, tgt_field
6901 if tgt_table_name[-4:].lower() == '.dbf':
6902 tgt_table_name = tgt_table_name[:-4]
6903 relation = cls.relations.get(((src_table, src_field), (tgt_table, tgt_field)))
6904 if relation is not None:
6905 return relation
6906 obj = object.__new__(cls)
6907 obj._src_table, obj._src_field = src_table, src_field
6908 obj._tgt_table, obj._tgt_field = tgt_table, tgt_field
6909 obj._src_table_name, obj._src_field_name = src_table_name, src_field_name
6910 obj._tgt_table_name, obj._tgt_field_name = tgt_table_name, tgt_field_name
6911 obj._tables = dict()
6912 cls.relations[((src_table, src_field), (tgt_table, tgt_field))] = obj
6913 return obj
6914
6922
6932
6935
6943
6946
6949
6950 @property
6952 "name of source table"
6953 return yo._src_table
6954
6955 @property
6957 "name of source field"
6958 return yo._src_field
6959
6960 @property
6962 return yo._src_table_name
6963
6964 @property
6966 return yo._src_field_name
6967
6968 @property
6970 "name of target table"
6971 return yo._tgt_table
6972
6973 @property
6975 "name of target field"
6976 return yo._tgt_field
6977
6978 @property
6980 return yo._tgt_table_name
6981
6982 @property
6984 return yo._tgt_field_name
6985
6986 @LazyAttr
6988 def index(record, field=yo._tgt_field):
6989 return record[field]
6990 index.__doc__ = "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name)
6991 yo.index = yo._tgt_table.create_index(index)
6992 source = dbf.List(yo._src_table, key=lambda rec, field=yo._src_field: rec[field])
6993 target = dbf.List(yo._tgt_table, key=lambda rec, field=yo._tgt_field: rec[field])
6994 if len(source) != len(yo._src_table):
6995 yo._tables[yo._src_table] = 'many'
6996 else:
6997 yo._tables[yo._src_table] = 'one'
6998 if len(target) != len(yo._tgt_table):
6999 yo._tables[yo._tgt_table] = 'many'
7000 else:
7001 yo._tables[yo._tgt_table] = 'one'
7002 return yo.index
7003
7005 yo.index
7006 try:
7007 if isinstance(table, basestring):
7008 table = (yo._src_table, yo._tgt_table)[yo._tgt_table_name == table]
7009 return yo._tables[table]
7010 except IndexError:
7011 raise NotFoundError("table %s not in relation" % table) from None
7012
7016
7017
7018
7019
7020 table_types = {
7021 'db3' : Db3Table,
7022 'clp' : ClpTable,
7023 'fp' : FpTable,
7024 'vfp' : VfpTable,
7025 }
7026
7027 version_map = {
7028 0x02 : 'FoxBASE',
7029 0x03 : 'dBase III Plus',
7030 0x04 : 'dBase IV',
7031 0x05 : 'dBase V',
7032 0x30 : 'Visual FoxPro',
7033 0x31 : 'Visual FoxPro (auto increment field)',
7034 0x32 : 'Visual FoxPro (VarChar, VarBinary, or BLOB enabled)',
7035 0x43 : 'dBase IV SQL table files',
7036 0x63 : 'dBase IV SQL system files',
7037 0x83 : 'dBase III Plus w/memos',
7038 0x8b : 'dBase IV w/memos',
7039 0x8e : 'dBase IV w/SQL table',
7040 0xf5 : 'FoxPro w/memos'}
7041
7042 code_pages = {
7043 0x00 : ('ascii', "plain ol' ascii"),
7044 0x01 : ('cp437', 'U.S. MS-DOS'),
7045 0x02 : ('cp850', 'International MS-DOS'),
7046 0x03 : ('cp1252', 'Windows ANSI'),
7047 0x04 : ('mac_roman', 'Standard Macintosh'),
7048 0x08 : ('cp865', 'Danish OEM'),
7049 0x09 : ('cp437', 'Dutch OEM'),
7050 0x0A : ('cp850', 'Dutch OEM (secondary)'),
7051 0x0B : ('cp437', 'Finnish OEM'),
7052 0x0D : ('cp437', 'French OEM'),
7053 0x0E : ('cp850', 'French OEM (secondary)'),
7054 0x0F : ('cp437', 'German OEM'),
7055 0x10 : ('cp850', 'German OEM (secondary)'),
7056 0x11 : ('cp437', 'Italian OEM'),
7057 0x12 : ('cp850', 'Italian OEM (secondary)'),
7058 0x13 : ('cp932', 'Japanese Shift-JIS'),
7059 0x14 : ('cp850', 'Spanish OEM (secondary)'),
7060 0x15 : ('cp437', 'Swedish OEM'),
7061 0x16 : ('cp850', 'Swedish OEM (secondary)'),
7062 0x17 : ('cp865', 'Norwegian OEM'),
7063 0x18 : ('cp437', 'Spanish OEM'),
7064 0x19 : ('cp437', 'English OEM (Britain)'),
7065 0x1A : ('cp850', 'English OEM (Britain) (secondary)'),
7066 0x1B : ('cp437', 'English OEM (U.S.)'),
7067 0x1C : ('cp863', 'French OEM (Canada)'),
7068 0x1D : ('cp850', 'French OEM (secondary)'),
7069 0x1F : ('cp852', 'Czech OEM'),
7070 0x22 : ('cp852', 'Hungarian OEM'),
7071 0x23 : ('cp852', 'Polish OEM'),
7072 0x24 : ('cp860', 'Portugese OEM'),
7073 0x25 : ('cp850', 'Potugese OEM (secondary)'),
7074 0x26 : ('cp866', 'Russian OEM'),
7075 0x37 : ('cp850', 'English OEM (U.S.) (secondary)'),
7076 0x40 : ('cp852', 'Romanian OEM'),
7077 0x4D : ('cp936', 'Chinese GBK (PRC)'),
7078 0x4E : ('cp949', 'Korean (ANSI/OEM)'),
7079 0x4F : ('cp950', 'Chinese Big 5 (Taiwan)'),
7080 0x50 : ('cp874', 'Thai (ANSI/OEM)'),
7081 0x57 : ('cp1252', 'ANSI'),
7082 0x58 : ('cp1252', 'Western European ANSI'),
7083 0x59 : ('cp1252', 'Spanish ANSI'),
7084 0x64 : ('cp852', 'Eastern European MS-DOS'),
7085 0x65 : ('cp866', 'Russian MS-DOS'),
7086 0x66 : ('cp865', 'Nordic MS-DOS'),
7087 0x67 : ('cp861', 'Icelandic MS-DOS'),
7088 0x68 : (None, 'Kamenicky (Czech) MS-DOS'),
7089 0x69 : (None, 'Mazovia (Polish) MS-DOS'),
7090 0x6a : ('cp737', 'Greek MS-DOS (437G)'),
7091 0x6b : ('cp857', 'Turkish MS-DOS'),
7092 0x78 : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'),
7093 0x79 : ('cp949', 'Korean Windows'),
7094 0x7a : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'),
7095 0x7b : ('cp932', 'Japanese Windows'),
7096 0x7c : ('cp874', 'Thai Windows'),
7097 0x7d : ('cp1255', 'Hebrew Windows'),
7098 0x7e : ('cp1256', 'Arabic Windows'),
7099 0xc8 : ('cp1250', 'Eastern European Windows'),
7100 0xc9 : ('cp1251', 'Russian Windows'),
7101 0xca : ('cp1254', 'Turkish Windows'),
7102 0xcb : ('cp1253', 'Greek Windows'),
7103 0x96 : ('mac_cyrillic', 'Russian Macintosh'),
7104 0x97 : ('mac_latin2', 'Macintosh EE'),
7105 0x98 : ('mac_greek', 'Greek Macintosh'),
7106 0xf0 : ('utf8', '8-bit unicode'),
7107 }
7108
7109
7110 default_codepage = code_pages.get(default_codepage, code_pages.get(0x00))[0]
7111
7112
7113 -def _nop(value):
7114 """
7115 returns parameter unchanged
7116 """
7117 return value
7118
7120 """
7121 ensures each tuple is the same length, using filler[-missing] for the gaps
7122 """
7123 final = []
7124 for t in tuples:
7125 if len(t) < length:
7126 final.append( tuple([item for item in t] + filler[len(t)-length:]) )
7127 else:
7128 final.append(t)
7129 return tuple(final)
7130
7132 if cp not in code_pages:
7133 for code_page in sorted(code_pages.keys()):
7134 sd, ld = code_pages[code_page]
7135 if cp == sd or cp == ld:
7136 if sd is None:
7137 raise DbfError("Unsupported codepage: %s" % ld)
7138 cp = code_page
7139 break
7140 else:
7141 raise DbfError("Unsupported codepage: %s" % cp)
7142 sd, ld = code_pages[cp]
7143 return cp, sd, ld
7144
7149 """
7150 under development
7151 """
7152
7153 version = 'dBase IV w/memos (non-functional)'
7154 _versionabbr = 'db4'
7155
7156 @MutableDefault
7158 return {
7159 CHAR: {'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character},
7160 CURRENCY: {'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':Decimal, 'Init':add_vfp_currency},
7161 DOUBLE: {'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':float, 'Init':add_vfp_double},
7162 FLOAT: {'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':float, 'Init':add_vfp_numeric},
7163 NUMERIC: {'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':int, 'Init':add_vfp_numeric},
7164 INTEGER: {'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':int, 'Init':add_vfp_integer},
7165 LOGICAL: {'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':Logical, 'Init':add_logical},
7166 DATE: {'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':Date, 'Init':add_date},
7167 DATETIME: {'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':DateTime, 'Init':add_vfp_datetime},
7168 MEMO: {'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo},
7169 GENERAL: {'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo},
7170 PICTURE: {'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo},
7171 _NULLFLAG: {'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':int, 'Init':None} }
7172
7173 _memoext = '.dbt'
7174 _memotypes = ('G', 'M', 'P')
7175 _memoClass = _VfpMemo
7176 _yesMemoMask = 0x8b
7177 _noMemoMask = 0x04
7178 _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y')
7179 _variable_fields = ('C', 'F', 'N')
7180 _binary_fields = ('G', 'P')
7181 _character_fields = ('C', 'M')
7182 _decimal_fields = ('F', 'N')
7183 _numeric_fields = ('B', 'F', 'I', 'N', 'Y')
7184 _currency_fields = ('Y',)
7185 _supported_tables = (0x04, 0x8b)
7186 _dbfTableHeader = [0] * 32
7187 _dbfTableHeader[0] = 0x8b
7188 _dbfTableHeader[10] = 0x01
7189 _dbfTableHeader[29] = 0x03
7190 _dbfTableHeader = bytes(_dbfTableHeader)
7191 _dbfTableHeaderExtra = b''
7192
7216
7225
7227 """
7228 marks record as deleted
7229 """
7230 template = isinstance(record, RecordTemplate)
7231 if not template and record._meta.status == CLOSED:
7232 raise DbfError("%s is closed; cannot delete record" % record._meta.filename)
7233 record_in_flux = not record._write_to_disk
7234 if not template and not record_in_flux:
7235 record._start_flux()
7236 try:
7237 record._data[0] = ASTERISK
7238 if not template:
7239 record._dirty = True
7240 except:
7241 if not template and not record_in_flux:
7242 record._rollback_flux()
7243 raise
7244 if not template and not record_in_flux:
7245 record._commit_flux()
7246
7247 -def export(table_or_records, filename=None, field_names=None, format='csv', header=True, dialect='dbf', encoding=None):
7248 """
7249 writes the records using CSV or tab-delimited format, using the filename
7250 given if specified, otherwise the table name
7251 if table_or_records is a collection of records (not an actual table) they
7252 should all be of the same format
7253 """
7254 table = source_table(table_or_records[0])
7255 if filename is None:
7256 filename = table.filename
7257 if field_names is None:
7258 field_names = table.field_names
7259 if isinstance(field_names, basestring):
7260 field_names = [f.strip() for f in field_names.split(',')]
7261 format = format.lower()
7262 if format not in ('csv', 'tab', 'fixed'):
7263 raise DbfError("export format: csv, tab, or fixed -- not %s" % format)
7264 if format == 'fixed':
7265 format = 'txt'
7266 if encoding is None:
7267 encoding = table.codepage.name
7268 encoder = codecs.getencoder(encoding)
7269 header_names = field_names
7270 base, ext = os.path.splitext(filename)
7271 if ext.lower() in ('', '.dbf'):
7272 filename = base + "." + format
7273 try:
7274 if format == 'csv':
7275 fd = open(filename, 'w', encoding=encoding)
7276 csvfile = csv.writer(fd, dialect=dialect)
7277 if header:
7278 csvfile.writerow(header_names)
7279 for record in table_or_records:
7280 fields = []
7281 for fieldname in field_names:
7282 data = record[fieldname]
7283
7284
7285
7286 fields.append(data)
7287 csvfile.writerow(fields)
7288 elif format == 'tab':
7289 fd = open(filename, 'w', encoding=encoding)
7290 if header:
7291 fd.write('\t'.join(header_names) + '\n')
7292 for record in table_or_records:
7293 fields = []
7294 for fieldname in field_names:
7295 data = record[fieldname]
7296
7297
7298
7299 fields.append(str(data))
7300 fd.write('\t'.join(fields) + '\n')
7301 else:
7302 fd = open(filename, 'w', encoding=encoding)
7303 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w', encoding=encoding)
7304 header.write("%-15s Size\n" % "Field Name")
7305 header.write("%-15s ----\n" % ("-" * 15))
7306 sizes = []
7307 for field in field_names:
7308 size = table.field_info(field).length
7309 sizes.append(size)
7310
7311 header.write("%-15s %3d\n" % (field, size))
7312 header.write('\nTotal Records in file: %d\n' % len(table_or_records))
7313 header.close()
7314 for record in table_or_records:
7315 fields = []
7316 for i, fieldname in enumerate(field_names):
7317 data = record[fieldname]
7318
7319
7320
7321 fields.append("%-*s" % (sizes[i], data))
7322 fd.write(''.join(fields) + '\n')
7323 finally:
7324 fd.close()
7325 fd = None
7326 return len(table_or_records)
7327
7341
7343 """
7344 marked for deletion?
7345 """
7346 return record._data[0] == ASTERISK
7347
7349 """
7350 physical record number
7351 """
7352 return record._recnum
7353
7354 -def reset(record, keep_fields=None):
7355 """
7356 sets record's fields back to original, except for fields in keep_fields
7357 """
7358 template = record_in_flux = False
7359 if isinstance(record, RecordTemplate):
7360 template = True
7361 else:
7362 record_in_flux = not record._write_to_disk
7363 if record._meta.status == CLOSED:
7364 raise DbfError("%s is closed; cannot modify record" % record._meta.filename)
7365 if keep_fields is None:
7366 keep_fields = []
7367 keep = {}
7368 for field in keep_fields:
7369 keep[field] = record[field]
7370 record._data[:] = record._meta.blankrecord[:]
7371 for field in keep_fields:
7372 record[field] = keep[field]
7373 if not template:
7374 if record._write_to_disk:
7375 record._write()
7376 else:
7377 record._dirty = True
7378
7380 """
7381 table associated with table | record | index
7382 """
7383 table = thingie._meta.table()
7384 if table is None:
7385 raise DbfError("table is no longer available")
7386 return table
7387
7389 """
7390 marks record as active
7391 """
7392 template = isinstance(record, RecordTemplate)
7393 if not template and record._meta.status == CLOSED:
7394 raise DbfError("%s is closed; cannot undelete record" % record._meta.filename)
7395 record_in_flux = not record._write_to_disk
7396 if not template and not record_in_flux:
7397 record._start_flux()
7398 try:
7399 record._data[0] = SPACE
7400 if not template:
7401 record._dirty = True
7402 except:
7403 if not template and not record_in_flux:
7404 record._rollback_flux()
7405 raise
7406 if not template and not record_in_flux:
7407 record._commit_flux()
7408 -def write(record, **kwargs):
7420
7421 -def Process(records, start=0, stop=None, filter=None):
7422 """commits each record to disk before returning the next one; undoes all changes to that record if exception raised
7423 if records is a table, it will be opened and closed if necessary
7424 filter function should return True to skip record, False to keep"""
7425 already_open = True
7426 if isinstance(records, Table):
7427 already_open = records.status != CLOSED
7428 if not already_open:
7429 records.open()
7430 try:
7431 if stop is None:
7432 stop = len(records)
7433 for record in records[start:stop]:
7434 if filter is not None and filter(record):
7435 continue
7436 try:
7437 record._start_flux()
7438 yield record
7439 except:
7440 record._rollback_flux()
7441 raise
7442 else:
7443 record._commit_flux()
7444 finally:
7445 if not already_open:
7446 records.close()
7447
7448 -def Templates(records, start=0, stop=None, filter=None):
7449 """
7450 returns a template of each record instead of the record itself
7451 if records is a table, it will be opened and closed if necessary
7452 """
7453 already_open = True
7454 if isinstance(records, Table):
7455 already_open = records.status != CLOSED
7456 if not already_open:
7457 records.open()
7458 try:
7459 if stop is None:
7460 stop = len(records)
7461 for record in records[start:stop]:
7462 if filter is not None and filter(record):
7463 continue
7464 yield(create_template(record))
7465 finally:
7466 if not already_open:
7467 records.close()
7468
7470 """
7471 returns integers 0 - len(sequence)
7472 """
7473 for i in range(len(sequence)):
7474 yield i
7475
7486
7510
7512 """
7513 adds fields to an existing table
7514 """
7515 table = Table(table_name)
7516 table.open()
7517 try:
7518 table.add_fields(field_specs)
7519 finally:
7520 table.close()
7521
7523 """
7524 deletes fields from an existing table
7525 """
7526 table = Table(table_name)
7527 table.open()
7528 try:
7529 table.delete_fields(field_names)
7530 finally:
7531 table.close()
7532
7534 """
7535 prints the first record of a table
7536 """
7537 table = Table(table_name)
7538 table.open()
7539 try:
7540 print(str(table[0]))
7541 finally:
7542 table.close()
7543
7544 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None,
7545 dbf_type='db3', memo_size=64, min_field_size=1,
7546 encoding=None, errors=None):
7547 """
7548 creates a Character table from a csv file
7549 to_disk will create a table with the same name
7550 filename will be used if provided
7551 field_names default to f0, f1, f2, etc, unless specified (list)
7552 extra_fields can be used to add additional fields -- should be normal field specifiers (list)
7553 """
7554 with codecs.open(csvfile, 'r', encoding='latin-1', errors=errors) as fd:
7555 reader = csv.reader(fd)
7556 if field_names:
7557 if isinstance(field_names, basestring):
7558 field_names = field_names.split()
7559 if ' ' not in field_names[0]:
7560 field_names = ['%s M' % fn for fn in field_names]
7561 else:
7562 field_names = ['f0 M']
7563 if filename:
7564 to_disk = True
7565 else:
7566 filename = os.path.splitext(csvfile)[0]
7567 if to_disk:
7568 csv_table = Table(filename, [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding)
7569 else:
7570 csv_table = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding, on_disk=False)
7571 csv_table.open()
7572 fields_so_far = 1
7573 while reader:
7574 try:
7575 row = next(reader)
7576 except UnicodeEncodeError:
7577 row = ['']
7578 except StopIteration:
7579 break
7580 while fields_so_far < len(row):
7581 if fields_so_far == len(field_names):
7582 field_names.append('f%d M' % fields_so_far)
7583 csv_table.add_fields(field_names[fields_so_far])
7584 fields_so_far += 1
7585 csv_table.append(tuple(row))
7586 if extra_fields:
7587 csv_table.add_fields(extra_fields)
7588 csv_table.close()
7589 return csv_table
7590
7592 """
7593 returns the list of field names of a table
7594 """
7595 table = Table(table_name)
7596 return table.field_names
7597
7598 -def info(table_name):
7599 """
7600 prints table info
7601 """
7602 table = Table(table_name)
7603 print(str(table))
7604
7606 """
7607 renames a field in a table
7608 """
7609 table = Table(table_name)
7610 try:
7611 table.rename_field(oldfield, newfield)
7612 finally:
7613 table.close()
7614
7616 """
7617 returns the definition of a field (or all fields)
7618 """
7619 table = Table(table_name)
7620 return table.structure(field)
7621
7623 """
7624 just what it says ;)
7625 """
7626 for index, dummy in enumerate(records):
7627 chars = dummy._data
7628 print("%2d: " % (index,))
7629 for char in chars[1:]:
7630 print(" %2x " % (char,))
7631 print()
7632
7633
7634
7635
7636 -def gather(record, data, drop=False):
7637 """
7638 saves data into a record's fields; writes to disk if not in flux
7639 keys with no matching field will raise a FieldMissingError
7640 exception unless drop_missing == True;
7641 if an Exception occurs the record is restored before reraising
7642 """
7643 if isinstance(record, Record) and record._meta.status == CLOSED:
7644 raise DbfError("%s is closed; cannot modify record" % record._meta.filename)
7645 record_in_flux = not record._write_to_disk
7646 if not record_in_flux:
7647 record._start_flux()
7648 try:
7649 record_fields = field_names(record)
7650 for key in field_names(data):
7651 value = data[key]
7652 if not key in record_fields:
7653 if drop:
7654 continue
7655 raise FieldMissingError(key)
7656 record[key] = value
7657 except:
7658 if not record_in_flux:
7659 record._rollback_flux()
7660 raise
7661 if not record_in_flux:
7662 record._commit_flux()
7663
7664 -def scan(table, direction='forward', filter=lambda rec: True):
7665 """
7666 moves record pointer forward 1; returns False if Eof/Bof reached
7667 table must be derived from _Navigation or have skip() method
7668 """
7669 if direction not in ('forward', 'reverse'):
7670 raise TypeError("direction should be 'forward' or 'reverse', not %r" % direction)
7671 if direction == 'forward':
7672 n = +1
7673 no_more_records = Eof
7674 else:
7675 n = -1
7676 no_more_records = Bof
7677 try:
7678 while True:
7679 table.skip(n)
7680 if filter(table.current_record):
7681 return True
7682 except no_more_records:
7683 return False
7684
7686 """
7687 returns as_type() of [fieldnames and] values.
7688 """
7689 if isinstance(as_type, types.FunctionType):
7690 return as_type(record)
7691 elif issubclass(as_type, _mappings):
7692 return as_type(zip(field_names(record), record))
7693 else:
7694 return as_type(record)
7695