/[thuban]/branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c
ViewVC logotype

Diff of /branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

trunk/thuban/libraries/pyshapelib/dbflib.i revision 2453 by bh, Mon Dec 13 17:57:04 2004 UTC branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c revision 2744 by bramz, Thu Mar 15 13:48:58 2007 UTC
# Line 1  Line 1 
1  /* SWIG (www.swig.org) interface file for the dbf interface of shapelib  #include "pyshapelib_common.h"
  *  
  * At the moment (Dec 2000) this file is only useful to generate Python  
  * bindings. Invoke swig as follows:  
  *  
  *      swig -python -shadow dbflib.i  
  *  
  * to generate dbflib_wrap.c and dbflib.py. dbflib_wrap.c defines a  
  * bunch of Python-functions that wrap the appripriate dbflib functions  
  * and dbflib.py contains an object oriented wrapper around  
  * dbflib_wrap.c.  
  *  
  * This module defines one object type: DBFFile.  
  */  
   
 /* this is the dbflib module */  
 %module dbflib  
   
 /* first a %{,%} block. These blocks are copied verbatim to the  
  * dbflib_wrap.c file and are not parsed by SWIG. This is the place to  
  * import headerfiles and define helper-functions that are needed by the  
  * automatically generated wrappers.  
  */  
2    
3  %{  /* --- DBFFile ------------------------------------------------------------------------------------------------------- */
 #include "shapefil.h"  
4    
5    typedef struct {
6            PyObject_HEAD
7            DBFHandle handle;
8    } DBFFileObject;
9    
 /* Read one attribute from the dbf handle and return it as a new python object  
  *  
  * If an error occurs, set the appropriate Python exception and return  
  * NULL.  
  *  
  * Assume that the values of the record and field arguments are valid.  
  * The name argument will be passed to DBFGetFieldInfo as is and should  
  * thus be either NULL or a pointer to an array of at least 12 chars  
  */  
 static PyObject *  
 do_read_attribute(DBFInfo * handle, int record, int field, char * name)  
 {  
     int type, width;  
     PyObject *value;  
   
     type = DBFGetFieldInfo(handle, field, name, &width, NULL);  
     /* For strings NULL and the empty string are indistinguishable  
      * in DBF files. We prefer empty strings instead for backwards  
      * compatibility reasons because older wrapper versions returned  
      * emtpy strings as empty strings.  
      */  
     if (type != FTString && DBFIsAttributeNULL(handle, record, field))  
     {  
         value = Py_None;  
         Py_INCREF(value);  
     }  
     else  
     {  
         switch (type)  
         {  
         case FTString:  
         {  
             const char * temp = DBFReadStringAttribute(handle, record, field);  
             if (temp)  
             {  
                 value = PyString_FromString(temp);  
             }  
             else  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "Can't read value for row %d column %d",  
                              record, field);  
                 value = NULL;  
             }  
             break;  
         }  
         case FTInteger:  
             value = PyInt_FromLong(DBFReadIntegerAttribute(handle, record,  
                                                            field));  
             break;  
         case FTDouble:  
             value = PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record,  
                                                               field));  
             break;  
         default:  
             PyErr_Format(PyExc_TypeError, "Invalid field data type %d",  
                          type);  
             value = NULL;  
         }  
     }  
     if (!value)  
         return NULL;  
10    
     return value;  
 }      
11    
12  /* the read_attribute method. Return the value of the given record and  /* allocator
13   * field as a python object of the appropriate type.  */
14   *  static PyObject* dbffile_new(PyTypeObject* type, PyObject* args, PyObject* kwds)
15   * In case of error, set a python exception and return NULL. Since that  {
16   * value will be returned to the python interpreter as is, the          DBFFileObject* self;    
17   * interpreter should recognize the exception.          self = (DBFFileObject*) type->tp_alloc(type, 0);
18   */          self->handle = NULL;
19            return (PyObject*) self;
20  static PyObject *  }
 DBFInfo_read_attribute(DBFInfo * handle, int record, int field)  
 {  
     if (record < 0 || record >= DBFGetRecordCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "record index %d out of bounds (record count: %d)",  
                      record, DBFGetRecordCount(handle));  
         return NULL;  
     }  
21    
     if (field < 0 || field >= DBFGetFieldCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "field index %d out of bounds (field count: %d)",  
                      field, DBFGetFieldCount(handle));  
         return NULL;  
     }  
22    
     return do_read_attribute(handle, record, field, NULL);  
 }  
       
23    
24  /* the read_record method. Return the record record as a dictionary with  /* deallocator
25   * whose keys are the names of the fields, and their values as the  */
26   * appropriate Python type.  static void dbffile_dealloc(DBFFileObject* self)
27   *  {
28   * In case of error, set a python exception and return NULL. Since that          DBFClose(self->handle);
29   * value will be returned to the python interpreter as is, the          self->handle = NULL;
30   * interpreter should recognize the exception.          self->ob_type->tp_free((PyObject*)self);
31   */  }
   
 static PyObject *  
 DBFInfo_read_record(DBFInfo * handle, int record)  
 {  
     int num_fields;  
     int i;  
     int type, width;  
     char name[12];  
     PyObject *dict;  
     PyObject *value;  
   
     if (record < 0 || record >= DBFGetRecordCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "record index %d out of bounds (record count: %d)",  
                      record, DBFGetRecordCount(handle));  
         return NULL;  
     }  
32    
     dict = PyDict_New();  
     if (!dict)  
         return NULL;  
           
     num_fields = DBFGetFieldCount(handle);  
     for (i = 0; i < num_fields; i++)  
     {  
         value = do_read_attribute(handle, record, i, name);  
         if (!value)  
             goto fail;  
   
         PyDict_SetItemString(dict, name, value);  
         Py_DECREF(value);  
     }  
   
     return dict;  
   
  fail:  
     Py_XDECREF(dict);  
     return NULL;  
 }  
   
 /* the write_record method. Write the record record given wither as a  
  * dictionary or a sequence (i.e. a list or a tuple).  
  *  
  * If it's a dictionary the keys must be the names of the fields and  
  * their value must have a suitable type. Only the fields actually  
  * contained in the dictionary are written. Fields for which there's no  
  * item in the dict are not modified.  
  *  
  * If it's a sequence, all fields must be present in the right order.  
  *  
  * In case of error, set a python exception and return NULL. Since that  
  * value will be returned to the python interpreter as is, the  
  * interpreter should recognize the exception.  
  *  
  * The method is implemented with two c-functions, write_field to write  
  * a single field and DBFInfo_write_record as the front-end.  
  */  
33    
34    
35  /* write a single field of a record. */  /* constructor
36  static int  */
37  write_field(DBFHandle handle, int record, int field, int type,  static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds)
38              PyObject * value)  {
39  {          char* file;
40      char * string_value;          char* mode = "rb";
41      int int_value;          if (kwds != NULL && PyDict_Size(kwds) > 0)
     double double_value;  
   
     if (value == Py_None)  
     {  
         if (!DBFWriteNULLAttribute(handle, record, field))  
         {  
             PyErr_Format(PyExc_IOError,  
                          "can't write NULL field %d of record %d",  
                          field, record);  
             return 0;  
         }  
     }  
     else  
     {  
         switch (type)  
         {  
         case FTString:  
             string_value = PyString_AsString(value);  
             if (!string_value)  
                 return 0;  
             if (!DBFWriteStringAttribute(handle, record, field, string_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         case FTInteger:  
             int_value = PyInt_AsLong(value);  
             if (int_value == -1 && PyErr_Occurred())  
                 return 0;  
             if (!DBFWriteIntegerAttribute(handle, record, field, int_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         case FTDouble:  
             double_value = PyFloat_AsDouble(value);  
             if (double_value == -1 && PyErr_Occurred())  
                 return 0;  
             if (!DBFWriteDoubleAttribute(handle, record, field, double_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         default:  
             PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);  
             return 0;  
         }  
     }  
   
     return 1;  
 }  
   
 static  
 PyObject *  
 DBFInfo_write_record(DBFHandle handle, int record, PyObject *record_object)  
 {  
     int num_fields;  
     int i, length;  
     int type, width;  
     char name[12];  
     PyObject * value = NULL;  
   
     num_fields = DBFGetFieldCount(handle);  
   
     /* We used to use PyMapping_Check to test whether record_object is a  
      * dictionary like object instead of PySequence_Check to test  
      * whether it's a sequence. Unfortunately in Python 2.3  
      * PyMapping_Check returns true for lists and tuples too so the old  
      * approach doesn't work anymore.  
      */  
     if (PySequence_Check(record_object))  
     {  
         /* It's a sequence object. Iterate through all items in the  
          * sequence and write them to the appropriate field.  
          */  
         length = PySequence_Length(record_object);  
         if (length != num_fields)  
         {  
             PyErr_SetString(PyExc_TypeError,  
                             "record must have one item for each field");  
             goto fail;  
         }  
         for (i = 0; i < length; i++)  
         {  
             type = DBFGetFieldInfo(handle, i, name, &width, NULL);  
             value = PySequence_GetItem(record_object, i);  
             if (value)  
             {  
                 if (!write_field(handle, record, i, type, value))  
                     goto fail;  
                 Py_DECREF(value);  
             }  
             else  
             {  
                 goto fail;  
             }  
         }  
     }  
     else  
     {  
         /* It's a dictionary-like object. Iterate over the names of the  
          * known fields and write the corresponding item  
          */  
         for (i = 0; i < num_fields; i++)  
42          {          {
43              type = DBFGetFieldInfo(handle, i, name, &width, NULL);                  PyErr_Format(PyExc_TypeError, "dbflib.DBFFile.__init__ takes no keyword arguments");
44                    return -1;
45            }
46    
47              /* if the dictionary has the key name write that object to          if (!PyArg_ParseTuple(args, "et|s:__init__", Py_FileSystemDefaultEncoding, &file, &mode)) return -1;
48               * the appropriate field, other wise just clear the python          
49               * exception and do nothing.          self->handle = DBFOpen(file, mode);
50               */          PyMem_Free(file);
51              value = PyMapping_GetItemString(record_object, name);          
52              if (value)          return self->handle ? 0 : -1;
             {  
                 if (!write_field(handle, record, i, type, value))  
                     goto fail;  
                 Py_DECREF(value);  
             }  
             else  
             {  
                 PyErr_Clear();  
             }  
         }  
     }  
   
     Py_INCREF(Py_None);  
     return Py_None;  
   
  fail:  
     Py_XDECREF(value);  
     return NULL;  
53  }  }
 %}  
54    
55    
56  /* The commit method implementation  
57   *  static PyObject* dbffile_close(DBFFileObject* self)
  * The method relies on the DBFUpdateHeader method which is not  
  * available in shapelib <= 1.2.10.  setup.py defines  
  * HAVE_UPDATE_HEADER's value depending on whether the function is  
  * available in the shapelib version the code is compiled with.  
  */  
 %{  
 static  
 void  
 DBFInfo_commit(DBFHandle handle)  
58  {  {
59  #if HAVE_UPDATE_HEADER          DBFClose(self->handle);
60      DBFUpdateHeader(handle);          self->handle = NULL;
61  #endif          Py_RETURN_NONE;
62  }  }
 %}  
63    
64    
 /*  
  * The SWIG Interface definition.  
  */  
   
 /* include some common SWIG type definitions and standard exception  
    handling code */  
 %include typemaps.i  
 %include exception.i  
   
 /* As for ShapeFile in shapelib.i, We define a new C-struct that holds  
  * the DBFHandle. This is mainly done so we can separate the close()  
  * method from the destructor but it also helps with exception handling.  
  *  
  * After the DBFFile has been opened or created the handle is not NULL.  
  * The close() method closes the file and sets handle to NULL as an  
  * indicator that the file has been closed.  
  */  
65    
66  %{  static PyObject* dbffile_field_count(DBFFileObject* self)
67      typedef struct {  {
68          DBFHandle handle;          return PyInt_FromLong((long)DBFGetFieldCount(self->handle));
69      } DBFFile;  }
 %}  
70    
71    
 /* The first argument to the DBFFile methods is a DBFFile pointer.  
  * We have to check whether handle is not NULL in most methods but not  
  * all. In the destructor and the close method, it's OK for handle to be  
  * NULL. We achieve this by checking whether the preprocessor macro  
  * NOCHECK_$name is defined. SWIG replaces $name with the name of the  
  * function for which the code is inserted. In the %{,%}-block below we  
  * define the macros for the destructor and the close() method.  
  */  
72    
73  %typemap(python,check) DBFFile *{  static PyObject* dbffile_record_count(DBFFileObject* self)
74  %#ifndef NOCHECK_$name  {
75      if (!$target || !$target->handle)          return PyInt_FromLong((long)DBFGetRecordCount(self->handle));
         SWIG_exception(SWIG_TypeError, "dbffile already closed");  
 %#endif  
76  }  }
77    
 %{  
 #define NOCHECK_delete_DBFFile  
 #define NOCHECK_DBFFile_close  
 %}  
   
78    
 /* An exception handle for the constructor and the module level open()  
  * and create() functions.  
  *  
  * Annoyingly, we *have* to put braces around the SWIG_exception()  
  * calls, at least in the python case, because of the way the macro is  
  * written. Of course, always putting braces around the branches of an  
  * if-statement is often considered good practice.  
  */  
 %typemap(python,except) DBFFile * {  
     $function;  
     if (!$source)  
     {  
         SWIG_exception(SWIG_MemoryError, "no memory");  
     }  
     else if (!$source->handle)  
     {  
         SWIG_exception(SWIG_IOError, "$name failed");  
     }  
 }  
79    
80  /* Exception handler for the add_field method */  static PyObject* dbffile_field_info(DBFFileObject* self, PyObject* args)
81  %typemap(python,except) int DBFFile_add_field {  {
82      $function;          char field_name[12];
83      if ($source < 0)          int field, width = 0, decimals = 0, field_type;
84      {          
85          SWIG_exception(SWIG_RuntimeError, "add_field failed");          if (!PyArg_ParseTuple(args, "i:field_info", &field)) return NULL;
86      }          
87            field_name[0] = '\0';
88            field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals);
89            
90            return Py_BuildValue("isii", field_type, field_name, width, decimals);
91  }  }
92    
 /* define and use some typemaps for the field_info() method whose  
  * C-implementation has three output parameters that are returned  
  * through pointers passed into the function. SWIG already has  
  * definitions for common types such as int* and we can use those for  
  * the last two parameters:  
  */  
93    
 %apply int * OUTPUT { int * output_width }  
 %apply int * OUTPUT { int * output_decimals }  
94    
95  /* the fieldname has to be defined manually: */  static PyObject* dbffile_add_field(DBFFileObject* self, PyObject* args)
96  %typemap(python,ignore) char *fieldname_out(char temp[12]) {  {
97      $target = temp;          char* name;
98            int type, width, decimals;
99            int field;
100            
101            if (!PyArg_ParseTuple(args, "siii:add_field", &name, &type, &width, &decimals)) return NULL;
102            
103            field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals);
104            
105            if (field < 0)
106            {
107                    PyErr_SetString(PyExc_ValueError, "Failed to add field due to inappropriate field definition");
108                    return NULL;
109            }
110            return PyInt_FromLong((long)field);
111  }  }
112    
 %typemap(python,argout) char *fieldname_out() {  
     PyObject * string = PyString_FromString($source);  
     $target = t_output_helper($target,string);  
 }  
113    
114    
115    /* Read one attribute from the dbf handle and return it as a new python object
116    *
117    * If an error occurs, set the appropriate Python exception and return
118    * NULL.
119    *
120    * Assume that the values of the record and field arguments are valid.
121    * The name argument will be passed to DBFGetFieldInfo as is and should
122    * thus be either NULL or a pointer to an array of at least 12 chars
123    */
124    static PyObject* do_read_attribute(DBFHandle handle, int record, int field, char * name)
125    {
126            int type, width;
127            const char* temp;
128            type = DBFGetFieldInfo(handle, field, name, &width, NULL);
129            
130            /* For strings NULL and the empty string are indistinguishable
131            * in DBF files. We prefer empty strings instead for backwards
132            * compatibility reasons because older wrapper versions returned
133            * emtpy strings as empty strings.
134            */
135            if (type != FTString && DBFIsAttributeNULL(handle, record, field))
136            {
137                    Py_RETURN_NONE;
138            }
139            else
140            {
141                    switch (type)
142                    {
143                    case FTString:
144                            temp = DBFReadStringAttribute(handle, record, field);
145                            if (!temp)
146                            {
147                                    PyErr_Format(PyExc_IOError,
148                                                    "Can't read value for row %d column %d",
149                                                    record, field);
150                                    return NULL;
151                            }
152                            return PyString_FromString(temp);
153    
154                    case FTInteger:
155                            return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field));
156    
157                    case FTDouble:
158                            return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field));
159    
160                    default:
161                            PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);
162                            return NULL;
163                    }
164            }
165    }    
166    
167    
 /*  
  * The SWIG-version of the DBFFile struct  
  */  
168    
169  typedef struct  /* the read_attribute method. Return the value of the given record and
170    * field as a python object of the appropriate type.
171    */
172    static PyObject* dbffile_read_attribute(DBFFileObject* self, PyObject* args)
173  {  {
174      %addmethods {          int record, field;
175          DBFFile(const char *file, const char * mode = "rb") {  
176              DBFFile * self = malloc(sizeof(DBFFile));          if (!PyArg_ParseTuple(args, "ii:read_field", &record, &field)) return NULL;
177              if (self)          
178                  self->handle = DBFOpen(file, mode);          if (record < 0 || record >= DBFGetRecordCount(self->handle))
179              return self;          {
180          }                  PyErr_Format(PyExc_ValueError,
181                                                "record index %d out of bounds (record count: %d)",
182          ~DBFFile() {                                  record, DBFGetRecordCount(self->handle));
183              if (self->handle)                  return NULL;
                 DBFClose(self->handle);  
             free(self);  
184          }          }
185    
186          void close() {          if (field < 0 || field >= DBFGetFieldCount(self->handle))
187              if (self->handle)          {
188                  DBFClose(self->handle);                  PyErr_Format(PyExc_ValueError,
189              self->handle = NULL;                                  "field index %d out of bounds (field count: %d)",
190                                    field, DBFGetFieldCount(self->handle));
191                    return NULL;
192          }          }
193    
194          int field_count() {          return do_read_attribute(self->handle, record, field, NULL);
195              return DBFGetFieldCount(self->handle);  }
196    
197    
198    
199    /* the read_record method. Return the record record as a dictionary with
200    * whose keys are the names of the fields, and their values as the
201    * appropriate Python type.
202    */
203    static PyObject* dbffile_read_record(DBFFileObject* self, PyObject* args)
204    {
205            int record;
206            int num_fields;
207            int i;
208            char name[12];
209            PyObject *dict;
210            PyObject *value = NULL;
211    
212            if (!PyArg_ParseTuple(args, "i:read_record", &record)) return NULL;
213    
214            if (record < 0 || record >= DBFGetRecordCount(self->handle))
215            {
216                    PyErr_Format(PyExc_ValueError,
217                            "record index %d out of bounds (record count: %d)",
218                            record, DBFGetRecordCount(self->handle));
219                    return NULL;
220          }          }
221    
222          int record_count() {          dict = PyDict_New();
223              return DBFGetRecordCount(self->handle);          if (!dict) return NULL;
224            
225            num_fields = DBFGetFieldCount(self->handle);
226            for (i = 0; i < num_fields; i++)
227            {
228                    value = do_read_attribute(self->handle, record, i, name);
229                    if (!value || PyDict_SetItemString(dict, name, value) < 0) goto fail;
230                    Py_DECREF(value);
231                    value = NULL;
232          }          }
233    
234          int field_info(int iField, char * fieldname_out,          return dict;
235                         int * output_width, int * output_decimals) {  
236              return DBFGetFieldInfo(self->handle, iField, fieldname_out,  fail:
237                                     output_width, output_decimals);          Py_XDECREF(value);
238            Py_DECREF(dict);
239            return NULL;
240    }
241    
242    
243    
244    /* write a single field of a record. */
245    static int do_write_field(DBFHandle handle, int record, int field, int type, PyObject* value)
246    {
247            char * string_value;
248            int int_value;
249            double double_value;
250    
251            if (value == Py_None)
252            {
253                    if (!DBFWriteNULLAttribute(handle, record, field))
254                    {
255                            PyErr_Format(PyExc_IOError,
256                                    "can't write NULL field %d of record %d",
257                                    field, record);
258                            return 0;
259                    }
260          }          }
261                        else
262          PyObject * read_record(int record) {          {
263              return DBFInfo_read_record(self->handle, record);                  switch (type)
264                    {
265                    case FTString:
266                            string_value = PyString_AsString(value);
267                            if (!string_value) return 0;
268                            if (!DBFWriteStringAttribute(handle, record, field, string_value))
269                            {
270                                    PyErr_Format(PyExc_IOError,
271                                                    "can't write field %d of record %d",
272                                                    field, record);
273                                    return 0;
274                            }
275                            break;
276    
277                    case FTInteger:
278                            int_value = PyInt_AsLong(value);
279                            if (int_value == -1 && PyErr_Occurred()) return 0;
280                            if (!DBFWriteIntegerAttribute(handle, record, field, int_value))
281                            {
282                                    PyErr_Format(PyExc_IOError,
283                                                    "can't write field %d of record %d",
284                                                    field, record);
285                                    return 0;
286                            }
287                            break;
288    
289                    case FTDouble:
290                            double_value = PyFloat_AsDouble(value);
291                            if (double_value == -1 && PyErr_Occurred()) return 0;
292                            if (!DBFWriteDoubleAttribute(handle, record, field, double_value))
293                            {
294                                    PyErr_Format(PyExc_IOError,
295                                                    "can't write field %d of record %d",
296                                                    field, record);
297                                    return 0;
298                            }
299                            break;
300    
301                    default:
302                            PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);
303                            return 0;
304                    }
305          }          }
306    
307          PyObject * read_attribute(int record, int field) {          return 1;
308              return DBFInfo_read_attribute(self->handle, record, field);  }
309          }  
310    
311          int add_field(const char * pszFieldName, DBFFieldType eType,  
312                        int nWidth, int nDecimals) {  static PyObject* dbffile_write_field(DBFFileObject* self, PyObject* args)
313              return DBFAddField(self->handle, pszFieldName, eType, nWidth,  {
314                                 nDecimals);          int record, field;
315            PyObject* value;
316            int type;
317    
318            if (!PyArg_ParseTuple(args, "iiO:write_field", &record, &field, &value)) return NULL;
319            
320            if (field < 0 || field >= DBFGetFieldCount(self->handle))
321            {
322                    PyErr_Format(PyExc_ValueError,
323                                    "field index %d out of bounds (field count: %d)",
324                                    field, DBFGetFieldCount(self->handle));
325                    return NULL;
326          }          }
327    
328          PyObject *write_record(int record, PyObject *dict_or_sequence) {          type = DBFGetFieldInfo(self->handle, field, NULL, NULL, NULL);
329              return DBFInfo_write_record(self->handle, record,          if (!do_write_field(self->handle, record, field, type, value)) return NULL;
330                                          dict_or_sequence);          Py_RETURN_NONE;
331    }
332    
333    
334    
335    static PyObject* dbffile_write_record(DBFFileObject* self, PyObject* args)
336    {
337            int record;
338            PyObject* record_object;
339            int i, num_fields;
340            
341            int type;
342            char name[12];
343            PyObject* value = NULL;
344            
345            if (!PyArg_ParseTuple(args, "iO:write_record", &record, &record_object)) return NULL;
346            
347            num_fields = DBFGetFieldCount(self->handle);
348            
349            /* mimic ShapeFile functionality where id = -1 means appending */
350            if (record == -1)
351            {
352                    record = num_fields;
353          }          }
354    
355          void commit() {          if (PySequence_Check(record_object))
356              DBFInfo_commit(self->handle);          {
357                    /* It's a sequence object. Iterate through all items in the
358                    * sequence and write them to the appropriate field.
359                    */
360                    if (PySequence_Length(record_object) != num_fields)
361                    {
362                            PyErr_SetString(PyExc_TypeError, "record must have one item for each field");
363                            return NULL;
364                    }
365                    for (i = 0; i < num_fields; ++i)
366                    {
367                            type = DBFGetFieldInfo(self->handle, i, NULL, NULL, NULL);
368                            value = PySequence_GetItem(record_object, i);
369                            if (!value) return NULL;
370                            if (!do_write_field(self->handle, record, i, type, value))
371                            {
372                                    Py_DECREF(value);
373                                    return NULL;
374                            }
375                            Py_DECREF(value);
376                    }
377          }          }
378          /* Delete the commit method from the class if it doesn't have a          else
379           * real implementation.          {
380           */                  /* It's a dictionary-like object. Iterate over the names of the
381          %pragma(python) addtomethod="__class__:if not dbflibc._have_commit: del commit"                  * known fields and write the corresponding item
382                    */
383                    for (i = 0; i < num_fields; ++i)
384                    {
385                            name[0] = '\0';
386                            type = DBFGetFieldInfo(self->handle, i, name, NULL, NULL);
387                            value = PyDict_GetItemString(record_object, name);
388                            if (value && !do_write_field(self->handle, record, i, type, value)) return NULL;
389                    }
390            }
391            
392            return PyInt_FromLong((long)record);
393    }
394    
395    
396    
397    static PyObject* dbffile_repr(DBFFileObject* self)
398    {
399            /* TODO: it would be nice to do something like "dbflib.DBFFile(filename, mode)" instead */
400            return PyString_FromFormat("<dbflib.DBFFile object at %p>", self->handle);
401    }
402    
403    
404    
405    /* The commit method implementation
406    *
407    * The method relies on the DBFUpdateHeader method which is not
408    * available in shapelib <= 1.2.10.  setup.py defines
409    * HAVE_UPDATE_HEADER's value depending on whether the function is
410    * available in the shapelib version the code is compiled with.
411    */
412    #if HAVE_UPDATE_HEADER
413    static PyObject* dbffile_commit(DBFFileObject* self)
414    {
415            DBFUpdateHeader(self->handle);
416            Py_RETURN_NONE;
417    }
418    #endif
419    
420    
421    
422    static struct PyMethodDef dbffile_methods[] =
423    {
424            {"close", (PyCFunction)dbffile_close, METH_NOARGS,
425                    "close()\n"
426                    "close DBFFile"},
427            {"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS,
428                    "field_count()\n"
429                    "returns number of fields currently defined"},
430            {"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS,
431                    "record_count()\n"
432                    "returns number of records that currently exist"},
433            {"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS,
434                    "field_info(field_index)\n"
435                    "returns info of a field as a tuple (type, name, width, decimals) with:\n"
436                    "-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n"
437                    "-name: the name of the field as a string\n"
438                    "-width: the width of the field as a number of characters\n"
439                    "-decimals: the number of decimal digits" },
440            {"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS,
441                    "add_field(type, name, width, decimals)\n"
442                    "adds a new field and returns field index if successful\n"
443                    "-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n"
444                    "-name: the name of the field as a string\n"
445                    "-width: the width of the field as a number of characters\n"
446                    "-decimals: the number of decimal digits" },
447            {"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS,
448                    "read_attribute(record_index, field_index)\n"
449                    "return the value of one field of a record"},
450            {"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS,
451                    "read_record(record_index)\n"
452                    "return an entire record as a dict of field names and values"},
453            {"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS,
454                    "write_field(record_index, field_index, new_value)\n"
455                    "write a single field of a record"},
456            {"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS,
457                    "write_record(record_index, record)\n"
458                    "write an entire record as a dict or a sequence\n"
459                    "record can either be a dictionary in which case the keys are used as field names, "
460                    "or a sequence that must have an item for every field (length = field_count())"},
461    #if HAVE_UPDATE_HEADER
462            {"commit", (PyCFunction)dbffile_read_record, METH_NOARGS,
463                    "commit()"},
464    #endif
465            {NULL}
466    };
467    
468    
469    
470    static struct PyGetSetDef dbffile_getsetters[] =
471    {
472            {NULL}
473    };
474    
         /* The __del__ method generated by the old SWIG version we're  
          * tries to access self.thisown which may not be set at all when  
          * there was an exception during construction.  Therefore we  
          * override it with our own version.  
          * FIXME: It would be better to upgrade to a newer SWIG version  
          * or to get rid of SWIG entirely.  
          */  
         %pragma(python) addtoclass = "  
     def __del__(self,dbflibc=dbflibc):  
         if getattr(self, 'thisown', 0):  
             dbflibc.delete_DBFFile(self)  
     "  
475    
476    
477      }  static PyTypeObject DBFFileType = PYSHAPELIB_DEFINE_TYPE(DBFFileObject, dbffile, "shapelib.DBFFile", 0);
 } DBFFile;  
478    
479    
 /*  
  * Two module level functions, open() and create() that correspond to  
  * DBFOpen and DBFCreate respectively. open() is equivalent to the  
  * DBFFile constructor.  
  */  
480    
481    /* --- dbflib -------------------------------------------------------------------------------------------------------- */
482    
483  %{  static PyObject* dbflib_open(PyObject* module, PyObject* args)
484      DBFFile * open_DBFFile(const char * file, const char * mode)  {
485      {          return PyObject_CallObject((PyObject*)&DBFFileType, args);
486          DBFFile * self = malloc(sizeof(DBFFile));  }
         if (self)  
             self->handle = DBFOpen(file, mode);  
         return self;  
     }  
 %}  
487    
 %name(open) %new DBFFile * open_DBFFile(const char * file,  
                                         const char * mode = "rb");  
488    
 %{  
     DBFFile * create_DBFFile(const char * file)  
     {  
         DBFFile * self = malloc(sizeof(DBFFile));  
         if (self)  
             self->handle = DBFCreate(file);  
         return self;  
     }  
 %}  
 %name(create) %new DBFFile * create_DBFFile(const char * file);  
489    
490    static PyObject* dbflib_create(PyObject* module, PyObject* args)
491    {
492            char* file;
493            DBFFileObject* result;
494            
495            if (!PyArg_ParseTuple(args, "et:create", Py_FileSystemDefaultEncoding, &file)) return NULL;
496            
497            result = PyObject_New(DBFFileObject, &DBFFileType);
498            if (!result)
499            {
500                    return PyErr_NoMemory();
501            }
502            
503            result->handle = DBFCreate(file);
504            if (!result->handle)
505            {
506                    PyObject_Del((PyObject*)result);
507                    PyErr_SetString(PyExc_RuntimeError, "Failed to create DBFFile");
508                    return NULL;
509            }
510            
511            return (PyObject*) result;
512    }
513    
514    
515  /* constant definitions copied from shapefil.h */  
516  typedef enum {  static struct PyMethodDef dbflib_methods[] =
517    FTString,  {
518    FTInteger,          {"open", (PyCFunction)dbflib_open, METH_VARARGS,
519    FTDouble,                  "open(filename [, mode])\n"
520    FTInvalid                  "open a DBFFile" },
521  } DBFFieldType;          {"create", (PyCFunction)dbflib_create, METH_VARARGS,
522                    "create(filename)\n"
523                    "create a DBFFile" },
524            {NULL}
525    };
526    
527    
 /* Put the value of the HAVE_UPDATE_HEADER preprocessor macro into the  
  * wrapper so that the __class__ pragma above knows when to remove the  
  * commit method  
  */  
 const int _have_commit = HAVE_UPDATE_HEADER;  
528    
529    PyMODINIT_FUNC initdbflib(void)
530    {
531            PyObject* module = Py_InitModule("dbflib", dbflib_methods);
532            if (!module) return;
533            
534            PYSHAPELIB_ADD_TYPE(DBFFileType, "DBFFile");
535            
536            PYSHAPELIB_ADD_CONSTANT(FTString);
537            PYSHAPELIB_ADD_CONSTANT(FTInteger);
538            PYSHAPELIB_ADD_CONSTANT(FTDouble);
539            PYSHAPELIB_ADD_CONSTANT(FTInvalid);
540            PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER);
541    }

Legend:
Removed from v.2453  
changed lines
  Added in v.2744

[email protected]
ViewVC Help
Powered by ViewVC 1.1.26