[Python-modules-commits] [dbf] 02/07: Imported Upstream version 0.96.003

Sandro Tosi morph at moszumanska.debian.org
Wed Nov 4 01:45:00 UTC 2015


This is an automated email from the git hooks/post-receive script.

morph pushed a commit to branch master
in repository dbf.

commit 25ba7216958f33c10b5927c7aa9cbc9829d1c330
Author: Sandro Tosi <morph at debian.org>
Date:   Wed Nov 4 01:38:12 2015 +0000

    Imported Upstream version 0.96.003
---
 PKG-INFO                                           |   49 +-
 README                                             |   39 -
 dbf/README.md                                      |  128 +
 dbf/WHATSNEW                                       |  293 +
 dbf/__init__.py                                    |  298 +-
 dbf/_io.py                                         |  257 -
 dbf/dates.py                                       |  544 --
 dbf/exceptions.py                                  |   36 -
 dbf/html/api-objects.txt                           | 1528 ----
 dbf/html/class-tree.html                           |  368 -
 dbf/html/crarr.png                                 |  Bin 340 -> 0 bytes
 dbf/html/dbf-module.html                           |  564 --
 dbf/html/dbf-pysrc.html                            |  703 --
 dbf/html/dbf._io-module.html                       |  891 ---
 dbf/html/dbf._io-pysrc.html                        |  461 --
 dbf/html/dbf.dates-module.html                     |  148 -
 dbf/html/dbf.dates-pysrc.html                      | 1271 ---
 dbf/html/dbf.dates.Date-class.html                 |  750 --
 dbf/html/dbf.dates.DateTime-class.html             |  788 --
 dbf/html/dbf.dates.Time-class.html                 |  616 --
 dbf/html/dbf.exceptions-module.html                |  185 -
 dbf/html/dbf.exceptions-pysrc.html                 |  390 -
 dbf/html/dbf.exceptions.Bof-class.html             |  279 -
 dbf/html/dbf.exceptions.DataOverflow-class.html    |  250 -
 dbf/html/dbf.exceptions.DbfError-class.html        |  190 -
 dbf/html/dbf.exceptions.DbfWarning-class.html      |  190 -
 dbf/html/dbf.exceptions.DoNotIndex-class.html      |  272 -
 dbf/html/dbf.exceptions.Eof-class.html             |  279 -
 dbf/html/dbf.exceptions.FieldMissing-class.html    |  260 -
 dbf/html/dbf.exceptions.NonUnicode-class.html      |  248 -
 dbf/html/dbf.old-module.html                       |  539 --
 dbf/html/dbf.old-pysrc.html                        |  686 --
 dbf/html/dbf.old._io-module.html                   |  891 ---
 dbf/html/dbf.old._io-pysrc.html                    |  457 --
 dbf/html/dbf.old.dates-module.html                 |  149 -
 dbf/html/dbf.old.dates-pysrc.html                  | 1272 ---
 dbf/html/dbf.old.dates.Date-class.html             |  751 --
 dbf/html/dbf.old.dates.DateTime-class.html         |  789 --
 dbf/html/dbf.old.dates.Time-class.html             |  617 --
 dbf/html/dbf.old.exceptions-module.html            |  186 -
 dbf/html/dbf.old.exceptions-pysrc.html             |  391 -
 dbf/html/dbf.old.exceptions.Bof-class.html         |  280 -
 .../dbf.old.exceptions.DataOverflow-class.html     |  251 -
 dbf/html/dbf.old.exceptions.DbfError-class.html    |  191 -
 dbf/html/dbf.old.exceptions.DbfWarning-class.html  |  191 -
 dbf/html/dbf.old.exceptions.DoNotIndex-class.html  |  273 -
 dbf/html/dbf.old.exceptions.Eof-class.html         |  280 -
 .../dbf.old.exceptions.FieldMissing-class.html     |  261 -
 dbf/html/dbf.old.exceptions.NonUnicode-class.html  |  249 -
 dbf/html/dbf.old.tables-module.html                |  533 --
 dbf/html/dbf.old.tables-pysrc.html                 | 6499 ---------------
 dbf/html/dbf.old.tables.Db3Table-class.html        |  592 --
 dbf/html/dbf.old.tables.DbfCsv-class.html          |  215 -
 dbf/html/dbf.old.tables.DbfTable-class.html        | 1512 ----
 .../dbf.old.tables.DbfTable.DbfIterator-class.html |  265 -
 .../dbf.old.tables.DbfTable._DbfLists-class.html   |  280 -
 .../dbf.old.tables.DbfTable._Indexen-class.html    |  280 -
 .../dbf.old.tables.DbfTable._MetaData-class.html   |  296 -
 dbf/html/dbf.old.tables.DbfTable._Table-class.html |  285 -
 ...dbf.old.tables.DbfTable._TableHeader-class.html |  330 -
 dbf/html/dbf.old.tables.FpTable-class.html         |  615 --
 dbf/html/dbf.old.tables.Index-class.html           |  575 --
 .../dbf.old.tables.Index.IndexIterator-class.html  |  267 -
 dbf/html/dbf.old.tables.List-class.html            |  793 --
 dbf/html/dbf.old.tables.VfpTable-class.html        |  615 --
 dbf/html/dbf.old.tables._Db3Memo-class.html        |  319 -
 dbf/html/dbf.old.tables._Db4Table-class.html       |  540 --
 dbf/html/dbf.old.tables._DbfMemo-class.html        |  328 -
 dbf/html/dbf.old.tables._DbfRecord-class.html      |  840 --
 dbf/html/dbf.old.tables._VfpMemo-class.html        |  319 -
 dbf/html/dbf.old.tables.property-class.html        |  342 -
 dbf/html/dbf.tables-module.html                    |  603 --
 dbf/html/dbf.tables-pysrc.html                     | 6792 ----------------
 dbf/html/dbf.tables.Db3Table-class.html            |  589 --
 dbf/html/dbf.tables.DbfCsv-class.html              |  214 -
 dbf/html/dbf.tables.DbfTable-class.html            | 1514 ----
 .../dbf.tables.DbfTable.DbfIterator-class.html     |  264 -
 dbf/html/dbf.tables.DbfTable._DbfLists-class.html  |  279 -
 dbf/html/dbf.tables.DbfTable._Indexen-class.html   |  279 -
 dbf/html/dbf.tables.DbfTable._MetaData-class.html  |  295 -
 dbf/html/dbf.tables.DbfTable._Table-class.html     |  284 -
 .../dbf.tables.DbfTable._TableHeader-class.html    |  329 -
 dbf/html/dbf.tables.FpTable-class.html             |  614 --
 dbf/html/dbf.tables.Index-class.html               |  611 --
 dbf/html/dbf.tables.Index.IndexIterator-class.html |  266 -
 dbf/html/dbf.tables.List-class.html                |  794 --
 dbf/html/dbf.tables.VfpTable-class.html            |  614 --
 dbf/html/dbf.tables._Db3Memo-class.html            |  318 -
 dbf/html/dbf.tables._Db4Table-class.html           |  539 --
 dbf/html/dbf.tables._DbfMemo-class.html            |  327 -
 dbf/html/dbf.tables._DbfRecord-class.html          |  839 --
 dbf/html/dbf.tables._VfpMemo-class.html            |  318 -
 dbf/html/dbf.tables.property-class.html            |  341 -
 dbf/html/epydoc.css                                |  322 -
 dbf/html/epydoc.js                                 |  293 -
 dbf/html/help.html                                 |  264 -
 dbf/html/identifier-index.html                     | 2875 -------
 dbf/html/index.html                                |  564 --
 dbf/html/module-tree.html                          |  116 -
 dbf/html/redirect.html                             |   38 -
 dbf/tables.py                                      | 2566 ------
 dbf/tests.py                                       | 4903 ++++++++++++
 dbf/ver_2.py                                       | 8286 ++++++++++++++++++++
 dbf/ver_32.py                                      | 7711 ++++++++++++++++++
 dbf/ver_33.py                                      | 7713 ++++++++++++++++++
 setup.py                                           |   33 +-
 106 files changed, 29157 insertions(+), 61169 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index e3d58c0..cfaf7c2 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,21 +1,28 @@
-Metadata-Version: 1.1
-Name: dbf
-Version: 0.88.16
-Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)
-Home-page: http://groups.google.com/group/python-dbase
-Author: Ethan Furman
-Author-email: ethan at stoneleaf.us
-License: BSD License
-Description: 
-        Currently supports dBase III, and FoxPro - Visual FoxPro 6 tables. Text is returned as unicode, and codepage settings in tables are honored. Documentation needs work, but author is very responsive to e-mails.
-        
-        Not supported: index files, null fields, auto-incrementing fields.
-        
-        
-Platform: UNKNOWN
-Classifier: Development Status :: 4 - Beta
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: BSD License
-Classifier: Programming Language :: Python
-Classifier: Topic :: Database
-Provides: dbf
+Metadata-Version: 1.1
+Name: dbf
+Version: 0.96.003
+Summary: Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro .dbf files (including memos)
+Home-page: https://pypi.python.org/pypi/dbf
+Author: Ethan Furman
+Author-email: ethan at stoneleaf.us
+License: BSD License
+Description: 
+        Currently supports dBase III, FoxPro, and Visual FoxPro tables. Text is returned as unicode, and codepage settings in tables are honored. Memos and Null fields are supported.  Documentation needs work, but author is very responsive to e-mails.
+        
+        Not supported: index files (but can create tempory non-file indexes), auto-incrementing fields, and Varchar fields.
+        
+        Installation:  `pip install dbf`
+        
+        There may be messages about byte-compiling failures -- you can safely ignore them (this is a multi-version release, and 2 and 3 don't like some of each other's code).
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: Database
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Provides: dbf
diff --git a/README b/README
deleted file mode 100644
index c92e9e5..0000000
--- a/README
+++ /dev/null
@@ -1,39 +0,0 @@
-sample table & data:
-
-  sample = dbf.table('/temp/sample', "name C(30), age N(3.0), wisdom M")
-
-  record = sample.append()
-  record['name'] = 'Ethan'
-  record['age'] = 37
-  record['wisdom'] = 'Python rules!'
-
-  record = {'name':'Allen', 'age':51, 'wisdom':'code smarter, not harder'}
-  sample.append(record)
-
-  sample.append()
-  record = sample[-1]
-  record.name = 'Alexis'
-  record.age = 29
-  record.wisdom = 'take a break!  refresh the little grey cells!'
-
-retrieving data to store it somewhere else:
-  source = dbf.table('/some/path/to/file.dbf')
-  for record in source:
-    data = record.scatterFields()   # creates dictionary {fieldname:value, fieldname:value, ...}
-    data = list(record)             # creates list of values in field order
-    # do something with the data
-
-
-Important notes:
-
-* When accessing a text field, the returned data does not include trailing blanks...
-    for record in sample:
-       print '"' + record.name + '"'
-  prints:           NOT:
-    "Ethan"             "Ethan                         "
-    "Allen"             "Allen                         "
-    "Alexis"            "Alexis                        "
-  keep this in mind when doing comparisons.
-
-Things to do:
-    Better documentation.
diff --git a/dbf/README.md b/dbf/README.md
new file mode 100644
index 0000000..ae4e684
--- /dev/null
+++ b/dbf/README.md
@@ -0,0 +1,128 @@
+dbf
+===
+
+dbf (also known as python dbase) is a module for reading/writing
+dBase III, FP, VFP, and Clipper .dbf database files.  It's
+an ancient format that still finds lots of use (the most common
+I'm aware of is retrieving legacy data so it can be stored in a
+newer database system; other uses include GIS, stand-alone programs
+such as Family History, Personal Finance, etc.).
+
+Highlights
+----------
+
+Table -- represents a single .dbf/.dbt (or .fpt) file combination
+and provides access to records; suports the sequence access and 'with'
+protocols.  Temporary tables can also live entirely in memory.
+
+Record -- repesents a single record/row in the table, with field access
+returning native or custom data types; supports the sequence, mapping,
+attribute access (with the field names as the attributes), and 'with'
+protocols.  Updates to a record object are reflected on disk either
+immediately (using gather() or write()), or at the end of a 'with'
+statement.
+
+Index -- nonpersistent index for a table.
+
+Fields::
+
+    dBase III (Null not supported)
+
+        Character --> unicode
+        Date      --> datetime.date or None
+        Logical   --> bool or None
+        Memo      --> unicode or None
+        Numeric   --> int/float depending on field definition or None
+
+        Float     --> same as numeric
+
+    Clipper (Null not supported)
+
+        Character --> unicode  (character fields can be up to 65,519)
+
+    Foxpro (Null supported)
+
+        General   --> str (treated as binary)
+        Picture   --> str (treated as binary)
+
+    Visual Foxpro (Null supported)
+
+        Currency  --> decimal.Decimal
+        douBle    --> float
+        Integer   --> int
+        dateTime  --> datetime.datetime
+
+    If a field is uninitialized (Date, Logical, Numeric, Memo, General,
+    Picture) then None is returned for the value.
+
+Custom data types::
+
+    Null     -->  used to support Null values
+
+    Char     -->  unicode type that auto-trims trailing whitespace, and
+                  ignores trailing whitespace for comparisons
+
+    Date     -->  date object that allows for no date
+
+    DateTime -->  datetime object that allows for no datetime
+
+    Time     -->  time object that allows for no time
+
+    Logical  -->  adds Unknown state to bool's: instead of True/False/None,
+                  values are Truth, Falsth, and Unknown, with appropriate
+                  tri-state logic; just as bool(None) is False, bool(Unknown)
+                  is also False;  the numerical values of Falsth, Truth, and
+                  Unknown is 0, 1, 2
+
+    Quantum  -->  similar to Logical, but implements boolean algebra (I think).
+                  Has states of Off, On, and Other.  Other has no boolean nor
+                  numerical value, and attempts to use it as such will raise
+                  an exception
+
+
+Whirlwind Tour
+--------------
+
+    import datetime
+    import dbf
+
+    table = dbf.Table(
+            filename='test',
+            field_specs='name C(25); age N(3,0); birth D; qualified L',
+            on_disk=False,
+            )
+    table.open()
+
+    for datum in (
+            ('Spanky', 7, dbf.Date.fromymd('20010315'), False),
+            ('Spunky', 23, dbf.Date(1989, 07, 23), True),
+            ('Sparky', 99, dbf.Date(), dbf.Unknown),
+            ):
+        table.append(datum)
+
+    for record in table:
+        print record
+        print '--------'
+        print record[0:3]
+        print record['name':'qualified']
+        print [record.name, record.age, record.birth]
+        print '--------'
+
+    custom = table.new(
+            filename='test_on_disk',
+            default_data_types=dict(C=dbf.Char, D=dbf.Date, L=dbf.Logical),
+            )
+
+    with custom:    # automatically opened and closed
+        for record in table:
+            custom.append(record)
+        for record in custom:
+            dbf.write(record, name=record.name.upper())
+            print record
+            print '--------'
+            print record[0:3]
+            print record['name':'qualified']
+            print [record.name, record.age, record.birth]
+            print '--------'
+
+    table.close()
diff --git a/dbf/WHATSNEW b/dbf/WHATSNEW
new file mode 100644
index 0000000..9b3211f
--- /dev/null
+++ b/dbf/WHATSNEW
@@ -0,0 +1,293 @@
+What's New
+==========
+
+0.96.000
+--------
+
+add support for Python 3
+convert from module to package layout
+remove pql
+fix Time and DateTime signatures: microsec --> microseconds
+fix Time and DateTime .now() to truncate microseconds past thousands
+
+
+0.95.014
+--------
+
+use a sparse container for the table -- should make very large dbf files usable
+
+
+0.95.013
+--------
+
+Null fields properly ignored if table doesn't support it
+
+
+0.95.012
+--------
+
+adjust setup.py to require enum34
+
+add custom data types to xmlrpclib.Marshaller (marshalled as the underlying
+type)
+
+add support for storing numbers as scientific notation
+
+fixed DateTime.now() and Time.now() to only return milliseconds
+
+
+0.95.008
+--------
+
+fix Period.__contains__
+
+add new default_data_type to Table.__init__ of 'enhanced' which selects all
+the custom data types (Char, Logical, Date, DateTime)
+
+add string input type to Date, Time, and DateTime
+
+
+0.95.007
+--------
+
+Add .fromfloat() to Time
+
+Add .tofloat() to Time
+
+Add Period for matching various time periods 
+
+
+0.95.006
+--------
+
+Add .format() and .replace() to Date, Time, and DateTime
+
+Add nesting to Table context manager
+
+Add enumerations IsoDay, RelativeDay, IsoMonth, RelativeMonth
+
+
+0.95.003
+--------
+
+Fixed issue with incorrect memo size in base file (thanks, Glenn!)
+
+Memo file extensions now have the same casing as the dbf file's, and are
+searched for that way (thanks again, Glenn!)
+
+
+0.95.002
+--------
+
+Fixed the version number in this file for the last release. :/
+
+string slices now work for RecordTemplate
+
+
+0.95.001
+--------
+
+Miscellaneous bugs squashed.
+
+backup tables are created in the same location as the original table if TMP,
+TEMP, nor DBF_TEMP are defined in the environment
+
+delete() and undelete() now support RecordTemplate
+
+Process() and Templates() now support start, stop, and filter to allow finer
+control of which records will be returned.
+
+Added Relation, which makes linking two tables together on a common field easier.
+Not persistent.
+
+xBase-Compatibility Break: added utf8 codepage (xf0).
+
+Backwards-Compatibility Break: reverted Logical.__eq__ to return True if Logical
+is True, False otherwise; this properly mimics the behavior of using True/False/None
+directly.  If the previous behavior is desired, use Quantum instead (it uses the
+states On/Off/Other), or use `if some_var is Unknown: ... ; elif some_var ... ; else ... `.
+
+Many thanks to all who have helped with ideas and bug fixes.
+
+
+0.94.004
+--------
+
+Templates now use same (custom) data types as table they are created
+from.
+
+Added Index.index_search(match, start=None, stop=None, nearest=False, partial=False)
+which returns the index of the first match.  If nearest is False and nothing is found
+a NotFoundError is raised, otherwise the index of where the match would be is
+returned
+
+Added IndexLocation, which is a subclass of long and is returned by Index.index_search.
+Unlike normal numbers where 0 == False and anything else == True, IndexLocation is True if
+the number represents a found match, and False if the number represents where a match
+should be (a False value will only be returned if nearest == True).
+
+Backwards-Compatibility Break: memory-only tables are now specified with on_disk=True
+instead of bracketing the filename with ':'.  Removed dbf.codepage() and dbf.encoding()
+as users can directly access dbf.default_codepage and dbf.input_decoding.
+
+Backwards-Compatibility Break: .use_deleted no longer used (it disappeared sometime
+between .90.0 and now). Rationale: the deleted flag is just that: a flag.  The record is
+still present and still available.  If you don't want to use it, either check if the
+record has been deleted (dbf.is_deleted(record)) or create an index that doesn't include
+the deleted records... or pack the table and actually remove the records for good.
+
+
+0.94.003
+--------
+
+Minor bug fixes, more documentation.
+
+
+0.94.001
+--------
+
+Added support for Clipper's large Character fields (up to 65,519)
+
+More code clean-up and slight breakage::
+
+    - _Dbf* has had '_Dbf' removed  (_DbfRecord --> Record)
+    - DbfTable --> Table (Table factory function removed)
+
+
+0.94.000
+--------
+
+Massive backwards incompatible changes.
+
+export() method removed from Tables and made into a normal function.
+
+All non-underscore methods removed from the record class and made into
+normal functions::
+
+    - delete_record --> delete
+    - field_names --> field_names
+    - gather_records --> gather
+    - has_been_deleted --> is_deleted
+    - record_number --> recno
+    - reset_record --> reset
+    - scatter_records --> scatter
+    - undelete_record --> undelete
+    - write_record --> write
+
+Transaction methods removed entirely.
+
+Can use strings as start/stop of slices: `record['name':'age']`
+
+Record templates now exist, and are much like records except that they are
+not directly tied to a table and can be freely modified.  They can be created
+by either the `dbf.create_template` function or the `table.create_template` method.
+
+scatter() now returns a RecordTemplate instead of a dict, but the as_type parameter
+can be used to get dicts (or tuples, lists, whatever)
+
+
+0.93.020
+--------
+    Finished changes so other Python implementations should work (PyPy
+    definitely does).
+
+    Table signature changed -- `read_only`, `meta_only`, and `keep_memos`
+    dropped.
+
+    tables now have a `status` attribute which will be one of `closed`,
+    'read_only`, or `read_write`
+
+    `.append` no longer returns the newly added record (use table[-1] if you need it)
+    `.find` method removed (use `.query` instead);
+    `.sql` method removed (use `.query` instead);
+    `.size` renamed to `.field_size`;
+    `.type` renamed to `.field_type` (which returns a FieldType named tuple);
+
+    the way to change records has changed:
+
+        to update any/all fields at once:
+            record.write_record(field1=..., field2=...)
+
+            or
+
+            record.gather_fields(dict)
+
+        to update one field at a time:
+            2.6, 2.7 (2.5 using `from __future__ import with_statement`)
+            with record:
+                record.field1 = ...
+                record.field2 = ...
+
+            or
+
+            for record in dbf.Process(table | records):
+                record.field1 = ...
+                record.field2 = ...
+
+    attempting to change a field outside of these two methods will raise a
+    `DbfError`.
+
+    Changing behavior based on a transaction:
+
+        record.gather_fields()
+
+            if a transaction is not running this will write to disk
+            (no changes made if error occurs, exception reraised)
+
+            if a transaction is running, and an error occurs, the calling code
+            is responsible for calling .rollback_transaction() or otherwise
+            handling the problem (exception is reraised)
+
+        record.reset_record()
+
+            if a transaction is not running the changes are written to disk
+
+            if a transaction is running the changes are not written to disk
+
+    `xxx in table` and `xxx in record` used to be a field-name check - it is
+    now a record / value check; use `xxx in table.field_names` and
+    `xxx in record.field_names` to do the field-name check.
+
+    added equality/inequality check for records, which can be compared against
+    other records / dicts / tuples (field/key order does not matter for
+    record-record nor record-dict checks).
+
+
+0.93.011
+--------
+    `with` will work now.  Really.
+
+    Started making changes so dbf will work with the non-CPython
+    implementations (at this point it is not reliable).
+
+
+0.93.010
+--------
+    Table now returns a closed database; .open() must now be called before
+    accessing the records.
+    Note: fields, number of records, table type, and other metadata is
+    available on closed tables.
+
+    Finished adding support for 'F' (aka 'N') field types in dBase III tables;
+    this is a practicality beats purity issue as the F type is not part of the
+    db3 standard, but is exactly the same as N and other programs will use it
+    instead of N when creating db3 tables.
+
+
+0.93.000
+--------
+    PEP 8 changes (yo --> self, someMethod --> some_method)
+
+
+0.92.002
+--------
+    added more support for the Null type in the other custome data types
+
+
+0.91.001
+--------
+    Removed __del__ from dbf records; consequently they no longer autosave when
+    going out of scope.  Either call .write_record() explicitly, or use the new
+    Write iterator which will call .write_record for you.
+
+    Finished adding Null support (not supported in db3 tables)
diff --git a/dbf/__init__.py b/dbf/__init__.py
index 53df532..da51f44 100644
--- a/dbf/__init__.py
+++ b/dbf/__init__.py
@@ -1,226 +1,72 @@
-"""
-Copyright
-=========
-    - Copyright: 2008-2009 Ad-Mail, Inc -- All rights reserved.
-    - Author: Ethan Furman
-    - Contact: ethan at stoneleaf.us
-    - Organization: Ad-Mail, Inc.
-    - Version: 0.88.016 as of 06 Dec 2010
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-    - Redistributions of source code must retain the above copyright
-      notice, this list of conditions and the following disclaimer.
-    - Redistributions in binary form must reproduce the above copyright
-      notice, this list of conditions and the following disclaimer in the
-      documentation and/or other materials provided with the distribution.
-    - Neither the name of Ad-Mail, Inc nor the
-      names of its contributors may be used to endorse or promote products
-      derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY Ad-Mail, Inc ''AS IS'' AND ANY
-EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL Ad-Mail, Inc BE LIABLE FOR ANY
-DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-B{I{Summary}}
-
-Python package for reading/writing dBase III and VFP 6 tables and memos
-
-The entire table is read into memory, and all operations occur on the in-memory
-table, with data changes being written to disk as they occur.
-
-Goals:  programming style with databases
-    - C{table = dbf.table('table name' [, fielddesc[, fielddesc[, ....]]])}
-        - fielddesc examples:  C{name C(30); age N(3,0); wisdom M; marriage D}
-    - C{record = [ table.current() | table[int] | table.append() | table.[next|prev|top|bottom|goto]() ]}
-    - C{record.field | record['field']} accesses the field
-
-NOTE:  Of the VFP data types, auto-increment and null settings are not implemented.
-"""
-import os
-import csv
-
-from dbf.dates import Date, DateTime, Time
-from dbf.exceptions import DbfWarning, Bof, Eof, DbfError, DataOverflow, FieldMissing, DoNotIndex
-from dbf.tables import DbfTable, Db3Table, VfpTable, FpTable, List, DbfCsv
-from dbf.tables import sql, ascii, codepage, encoding, version_map
-
-version = (0, 88, 16)
-
-default_type = 'db3'    # default format if none specified
-sql_user_functions = {}      # user-defined sql functions
-
-__docformat__ = 'epytext'
-
-def Table(filename, field_specs='', memo_size=128, ignore_memos=False, \
-          read_only=False, keep_memos=False, meta_only=False, dbf_type=None, codepage=None):
-    "returns an open table of the correct dbf_type, or creates it if field_specs is given"
-    #- print "dbf.Table(%s)" % ', '.join(['%r' % arg for arg in (filename, field_specs, dbf_type, codepage)])
-    if field_specs and dbf_type is None:
-        dbf_type = default_type
-    if dbf_type is not None:
-        dbf_type = dbf_type.lower()
-        if dbf_type == 'db3':
-            return Db3Table(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage)
-        elif dbf_type == 'fp':
-            return FpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage)
-        elif dbf_type == 'vfp':
-            return VfpTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage)
-        elif dbf_type == 'dbf':
-            return DbfTable(filename, field_specs, memo_size, ignore_memos, read_only, keep_memos, meta_only, codepage)
-        else:
-            raise DbfError("Unknown table type: %s" % dbf_type)
-    else:
-        possibles = guess_table_type(filename)
-        if len(possibles) == 1:
-            return possibles[0][2](filename, field_specs, memo_size, ignore_memos, \
-                                 read_only, keep_memos, meta_only)
-        else:
-            for type, desc, cls in possibles:
-                if type == default_type:
-                    return cls(filename, field_specs, memo_size, ignore_memos, \
-                                 read_only, keep_memos, meta_only)
-            else:
-                types = ', '.join(["%s" % item[1] for item in possibles])
-                abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']'
-                raise DbfError("Table could be any of %s.  Please specify %s when opening" % (types, abbrs))
-def index(sequence):
-    "returns integers 0 - len(sequence)"
-    for i in xrange(len(sequence)):
-        yield i    
-def guess_table_type(filename):
-    reported = table_type(filename)
-    possibles = []
-    version = reported[0]
-    for tabletype in (Db3Table, FpTable, VfpTable):
-        if version in tabletype._supported_tables:
-            possibles.append((tabletype._versionabbv, tabletype._version, tabletype))
-    if not possibles:
-        raise DbfError("Tables of type %s not supported" % str(reported))
-    return possibles
-def table_type(filename):
-    "returns text representation of a table's dbf version"
-    base, ext = os.path.splitext(filename)
-    if ext == '':
-        filename = base + '.dbf'
-    if not os.path.exists(filename):
-        raise DbfError('File %s not found' % filename)
-    fd = open(filename)
-    version = fd.read(1)
-    fd.close()
-    fd = None
-    if not version in version_map:
-        raise DbfError("Unknown dbf type: %s (%x)" % (version, ord(version)))
-    return version, version_map[version]
-
-def add_fields(table, field_specs):
-    "adds fields to an existing table"
-    table = Table(table)
-    try:
-        table.add_fields(field_specs)
-    finally:
-        table.close()
-def delete_fields(table, field_names):
-    "deletes fields from an existing table"
-    table = Table(table)
-    try:
-        table.delete_fields(field_names)
-    finally:
-        table.close()
-def export(table, filename='', fields='', format='csv', header=True):
-    "creates a csv or tab-delimited file from an existing table"
-    if fields is None:
-        fields = []
-    table = Table(table)
-    try:
-        table.export(filename=filename, field_specs=fields, format=format, header=header)
-    finally:
-        table.close()
-def first_record(table):
-    "prints the first record of a table"
-    table = Table(table)
-    try:
-        print str(table[0])
-    finally:
-        table.close()
-def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, dbf_type='db3', memo_size=64, min_field_size=1):
-    """creates a Character table from a csv file
-    to_disk will create a table with the same name
-    filename will be used if provided
-    field_names default to f0, f1, f2, etc, unless specified (list)
-    extra_fields can be used to add additional fields -- should be normal field specifiers (list)"""
-    reader = csv.reader(open(csvfile))
-    if field_names:
-        field_names = ['%s M' % fn for fn in field_names]
-    else:
-        field_names = ['f0 M']
-    mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size)
-    fields_so_far = 1
-    for row in reader:
-        while fields_so_far < len(row):
-            if fields_so_far == len(field_names):
-                field_names.append('f%d M' % fields_so_far)
-            mtable.add_fields(field_names[fields_so_far])
-            fields_so_far += 1
-        mtable.append(tuple(row))
-    if filename:
-        to_disk = True
-    if not to_disk:
-        if extra_fields:
-            mtable.add_fields(extra_fields)
-    else:
-        if not filename:
-            filename = os.path.splitext(csvfile)[0]
-        length = [min_field_size] * len(field_names)
-        for record in mtable:
-            for i in index(record.field_names):
-                length[i] = max(length[i], len(record[i]))
-        fields = mtable.field_names
-        fielddef = []
-        for i in index(length):
-            if length[i] < 255:
-                fielddef.append('%s C(%d)' % (fields[i], length[i]))
-            else:
-                fielddef.append('%s M' % (fields[i]))
-        if extra_fields:
-            fielddef.extend(extra_fields)
-        csvtable = Table(filename, fielddef, dbf_type=dbf_type)
-        for record in mtable:
-            csvtable.append(record.scatter_fields())
-        return csvtable
-    return mtable
-def get_fields(table):
-    "returns the list of field names of a table"
-    table = Table(table)
-    return table.field_names
-def info(table):
-    "prints table info"
-    table = Table(table)
-    print str(table)
-def rename_field(table, oldfield, newfield):
-    "renames a field in a table"
-    table = Table(table)
-    try:
-        table.rename_field(oldfield, newfield)
-    finally:
-        table.close()
-def structure(table, field=None):
-    "returns the definition of a field (or all fields)"
-    table = Table(table)
-    return table.structure(field)
-def hex_dump(records):
-    "just what it says ;)"
-    for index,dummy in enumerate(records):
-        chars = dummy._data
-        print "%2d: " % index,
-        for char in chars[1:]:
-            print " %2x " % ord(char),
-        print
-
+import sys as _sys
+
+version = (0, 96, 3)
+
+py_ver = _sys.version_info[:2]
+if py_ver >= (3, 3):
+    from dbf import ver_33 as _dbf
+elif py_ver[:2] == (3, 2):
+    from dbf import ver_32 as _dbf
+elif (2, 5) <= py_ver[:2] < (3, 0):
+    from dbf import ver_2 as _dbf
+else:
+    raise ImportError('dbf does not support Python %d.%d' % py_ver[:2])
+
+del py_ver
+
+__all__ = (
+        'Table', 'Record', 'List', 'Index', 'Relation', 'Iter', 'Date', 'DateTime', 'Time',
+        'CodePage', 'create_template', 'delete', 'field_names', 'gather', 'is_deleted',
+        'recno', 'source_table', 'reset', 'scatter', 'undelete',
+        'DbfError', 'DataOverflowError', 'BadDataError', 'FieldMissingError',
+        'FieldSpecError', 'NonUnicodeError', 'NotFoundError',
+        'DbfWarning', 'Eof', 'Bof', 'DoNotIndex',
+        'Null', 'Char', 'Date', 'DateTime', 'Time', 'Logical', 'Quantum',
+        'NullDate', 'NullDateTime', 'NullTime', 'Vapor', 'Period',
+        'Process', 'Templates',
+        'Truth', 'Falsth', 'Unknown', 'NoneType', 'Decimal', 'IndexLocation',
+        'guess_table_type', 'table_type',
+        'add_fields', 'delete_fields', 'get_fields', 'rename_field',
+        'export', 'first_record', 'from_csv', 'info', 'structure',
+        )
+
+module = globals()
+
+for name in dir(_dbf):
+    if name.startswith('__') or name == 'module':
+        continue
+    module[name] = getattr(_dbf, name)
+
+
+# from dbf.api import *
+
+class fake_module(object):
+
+    def __init__(self, name, *args):
+        self.name = name
+        self.__all__ = []
+        all_objects = globals()
+        for name in args:
+            self.__dict__[name] = all_objects[name]
+            self.__all__.append(name)
+
+    def register(self):
+        _sys.modules["%s.%s" % (__name__, self.name)] = self
+
+fake_module('api',
+    'Table', 'Record', 'List', 'Index', 'Relation', 'Iter', 'Null', 'Char', 'Date', 'DateTime', 'Time',
+    'Logical', 'Quantum', 'CodePage', 'create_template', 'delete', 'field_names', 'gather', 'is_deleted',
+    'recno', 'source_table', 'reset', 'scatter', 'undelete',
+    'NullDate', 'NullDateTime', 'NullTime', 'NoneType', 'NullType', 'Decimal', 'Vapor', 'Period',
+    'Truth', 'Falsth', 'Unknown', 'On', 'Off', 'Other',
+    'DbfError', 'DataOverflowError', 'BadDataError', 'FieldMissingError',
+    'FieldSpecError', 'NonUnicodeError', 'NotFoundError',
+    'DbfWarning', 'Eof', 'Bof', 'DoNotIndex', 'IndexLocation',
+    'Process', 'Templates',
+    ).register()
+
+dbf = fake_module('dbf', *__all__)
+setattr(_dbf, 'dbf', dbf)
+del dbf
+del _dbf
+
diff --git a/dbf/_io.py b/dbf/_io.py
deleted file mode 100644
index 1d62140..0000000
--- a/dbf/_io.py
+++ /dev/null
@@ -1,257 +0,0 @@
-"""Routines for saving, retrieving, and creating fields"""
-
-import struct
-from decimal import Decimal
-from dbf.exceptions import DbfError, DataOverflow
-from dbf.dates import Date, DateTime, Time
-from math import floor
-
-
-# Constants
-VFPTIME = 1721425
-
-def packShortInt(value, bigendian=False):
-        "Returns a two-bye integer from the value, or raises DbfError"
-        # 256 / 65,536
-        if value > 65535:
-            raise DateOverflow("Maximum Integer size exceeded.  Possible: 65535.  Attempted: %d" % value)
-        if bigendian:
-            return struct.pack('>H', value)
-        else:
-            return struct.pack('<H', value)
-def packLongInt(value, bigendian=False):
-        "Returns a four-bye integer from the value, or raises DbfError"
-        # 256 / 65,536 / 16,777,216
-        if value > 4294967295:
-            raise DateOverflow("Maximum Integer size exceeded.  Possible: 4294967295.  Attempted: %d" % value)
-        if bigendian:
-            return struct.pack('>L', value)
-        else:
-            return struct.pack('<L', value)
-def packDate(date):
-        "Returns a group of three bytes, in integer form, of the date"
-        return "%c%c%c" % (date.year-1900, date.month, date.day)
-def packStr(string):
-        "Returns an 11 byte, upper-cased, null padded string suitable for field names; raises DbfError if the string is bigger than 10 bytes"
-        if len(string) > 10:
-            raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string)))
-        return struct.pack('11s', string.upper())       
-def unpackShortInt(bytes, bigendian=False):
-        "Returns the value in the two-byte integer passed in"
-        if bigendian:
-            return struct.unpack('>H', bytes)[0]
-        else:
-            return struct.unpack('<H', bytes)[0]
-def unpackLongInt(bytes, bigendian=False):
... 90094 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/dbf.git



More information about the Python-modules-commits mailing list