/[thuban]/branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c
ViewVC logotype

Diff of /branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflib.i revision 2734 by bramz, Thu Mar 1 12:42:59 2007 UTC branches/WIP-pyshapelib-bramz/libraries/pyshapelib/dbflibmodule.c revision 2742 by bramz, Wed Mar 14 16:26:14 2007 UTC
# Line 1  Line 1 
1  /* SWIG (www.swig.org) interface file for the dbf interface of shapelib  #include "pyshapelib_common.h"
  *  
  * At the moment (Dec 2000) this file is only useful to generate Python  
  * bindings. Invoke swig as follows:  
  *  
  *      swig -python -shadow dbflib.i  
  *  
  * to generate dbflib_wrap.c and dbflib.py. dbflib_wrap.c defines a  
  * bunch of Python-functions that wrap the appripriate dbflib functions  
  * and dbflib.py contains an object oriented wrapper around  
  * dbflib_wrap.c.  
  *  
  * This module defines one object type: DBFFile.  
  */  
   
 /* this is the dbflib module */  
 %module dbflib  
   
 /* first a %{,%} block. These blocks are copied verbatim to the  
  * dbflib_wrap.c file and are not parsed by SWIG. This is the place to  
  * import headerfiles and define helper-functions that are needed by the  
  * automatically generated wrappers.  
  */  
2    
3  %{  /* --- DBFFile ------------------------------------------------------------------------------------------------------- */
 #include "shapefil.h"  
4    
5    typedef struct {
6            PyObject_HEAD
7            DBFHandle handle;
8    } DBFFileObject;
9    
 /* Read one attribute from the dbf handle and return it as a new python object  
  *  
  * If an error occurs, set the appropriate Python exception and return  
  * NULL.  
  *  
  * Assume that the values of the record and field arguments are valid.  
  * The name argument will be passed to DBFGetFieldInfo as is and should  
  * thus be either NULL or a pointer to an array of at least 12 chars  
  */  
 static PyObject *  
 do_read_attribute(DBFInfo * handle, int record, int field, char * name)  
 {  
     int type, width;  
     PyObject *value;  
   
     type = DBFGetFieldInfo(handle, field, name, &width, NULL);  
     /* For strings NULL and the empty string are indistinguishable  
      * in DBF files. We prefer empty strings instead for backwards  
      * compatibility reasons because older wrapper versions returned  
      * emtpy strings as empty strings.  
      */  
     if (type != FTString && DBFIsAttributeNULL(handle, record, field))  
     {  
         value = Py_None;  
         Py_INCREF(value);  
     }  
     else  
     {  
         switch (type)  
         {  
         case FTString:  
         {  
             const char * temp = DBFReadStringAttribute(handle, record, field);  
             if (temp)  
             {  
                 value = PyString_FromString(temp);  
             }  
             else  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "Can't read value for row %d column %d",  
                              record, field);  
                 value = NULL;  
             }  
             break;  
         }  
         case FTInteger:  
             value = PyInt_FromLong(DBFReadIntegerAttribute(handle, record,  
                                                            field));  
             break;  
         case FTDouble:  
             value = PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record,  
                                                               field));  
             break;  
         default:  
             PyErr_Format(PyExc_TypeError, "Invalid field data type %d",  
                          type);  
             value = NULL;  
         }  
     }  
     if (!value)  
         return NULL;  
10    
     return value;  
 }      
11    
12  /* the read_attribute method. Return the value of the given record and  /* allocator
13   * field as a python object of the appropriate type.  */
14   *  static PyObject* dbffile_new(PyTypeObject* type, PyObject* args, PyObject* kwds)
15   * In case of error, set a python exception and return NULL. Since that  {
16   * value will be returned to the python interpreter as is, the          DBFFileObject* self;    
17   * interpreter should recognize the exception.          self = (DBFFileObject*) type->tp_alloc(type, 0);
18   */          self->handle = NULL;
19            return (PyObject*) self;
20  static PyObject *  }
 DBFInfo_read_attribute(DBFInfo * handle, int record, int field)  
 {  
     if (record < 0 || record >= DBFGetRecordCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "record index %d out of bounds (record count: %d)",  
                      record, DBFGetRecordCount(handle));  
         return NULL;  
     }  
21    
     if (field < 0 || field >= DBFGetFieldCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "field index %d out of bounds (field count: %d)",  
                      field, DBFGetFieldCount(handle));  
         return NULL;  
     }  
22    
     return do_read_attribute(handle, record, field, NULL);  
 }  
       
23    
24  /* the read_record method. Return the record record as a dictionary with  /* deallocator
25   * whose keys are the names of the fields, and their values as the  */
26   * appropriate Python type.  static void dbffile_dealloc(DBFFileObject* self)
27   *  {
28   * In case of error, set a python exception and return NULL. Since that          DBFClose(self->handle);
29   * value will be returned to the python interpreter as is, the          self->handle = NULL;
30   * interpreter should recognize the exception.          self->ob_type->tp_free((PyObject*)self);
31   */  }
   
 static PyObject *  
 DBFInfo_read_record(DBFInfo * handle, int record)  
 {  
     int num_fields;  
     int i;  
     int type, width;  
     char name[12];  
     PyObject *dict;  
     PyObject *value;  
   
     if (record < 0 || record >= DBFGetRecordCount(handle))  
     {  
         PyErr_Format(PyExc_ValueError,  
                      "record index %d out of bounds (record count: %d)",  
                      record, DBFGetRecordCount(handle));  
         return NULL;  
     }  
32    
     dict = PyDict_New();  
     if (!dict)  
         return NULL;  
           
     num_fields = DBFGetFieldCount(handle);  
     for (i = 0; i < num_fields; i++)  
     {  
         value = do_read_attribute(handle, record, i, name);  
         if (!value)  
             goto fail;  
   
         PyDict_SetItemString(dict, name, value);  
         Py_DECREF(value);  
     }  
   
     return dict;  
   
  fail:  
     Py_XDECREF(dict);  
     return NULL;  
 }  
   
 /* the write_record method. Write the record record given wither as a  
  * dictionary or a sequence (i.e. a list or a tuple).  
  *  
  * If it's a dictionary the keys must be the names of the fields and  
  * their value must have a suitable type. Only the fields actually  
  * contained in the dictionary are written. Fields for which there's no  
  * item in the dict are not modified.  
  *  
  * If it's a sequence, all fields must be present in the right order.  
  *  
  * In case of error, set a python exception and return NULL. Since that  
  * value will be returned to the python interpreter as is, the  
  * interpreter should recognize the exception.  
  *  
  * The method is implemented with two c-functions, write_field to write  
  * a single field and DBFInfo_write_record as the front-end.  
  */  
33    
34    
35  /* write a single field of a record. */  /* constructor
36  static int  */
37  write_field(DBFHandle handle, int record, int field, int type,  static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds)
38              PyObject * value)  {
39  {          char* file;
40      char * string_value;          char* mode = "rb";
41      int int_value;          if (kwds != NULL && PyDict_Size(kwds) > 0)
     double double_value;  
   
     if (value == Py_None)  
     {  
         if (!DBFWriteNULLAttribute(handle, record, field))  
         {  
             PyErr_Format(PyExc_IOError,  
                          "can't write NULL field %d of record %d",  
                          field, record);  
             return 0;  
         }  
     }  
     else  
     {  
         switch (type)  
         {  
         case FTString:  
             string_value = PyString_AsString(value);  
             if (!string_value)  
                 return 0;  
             if (!DBFWriteStringAttribute(handle, record, field, string_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         case FTInteger:  
             int_value = PyInt_AsLong(value);  
             if (int_value == -1 && PyErr_Occurred())  
                 return 0;  
             if (!DBFWriteIntegerAttribute(handle, record, field, int_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         case FTDouble:  
             double_value = PyFloat_AsDouble(value);  
             if (double_value == -1 && PyErr_Occurred())  
                 return 0;  
             if (!DBFWriteDoubleAttribute(handle, record, field, double_value))  
             {  
                 PyErr_Format(PyExc_IOError,  
                              "can't write field %d of record %d",  
                              field, record);  
                 return 0;  
             }  
             break;  
   
         default:  
             PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);  
             return 0;  
         }  
     }  
   
     return 1;  
 }  
   
 static  
 PyObject *  
 DBFInfo_write_record(DBFHandle handle, int record, PyObject *record_object)  
 {  
     int num_fields;  
     int i, length;  
     int type, width;  
     char name[12];  
     PyObject * value = NULL;  
   
     num_fields = DBFGetFieldCount(handle);  
   
     /* We used to use PyMapping_Check to test whether record_object is a  
      * dictionary like object instead of PySequence_Check to test  
      * whether it's a sequence. Unfortunately in Python 2.3  
      * PyMapping_Check returns true for lists and tuples too so the old  
      * approach doesn't work anymore.  
      */  
     if (PySequence_Check(record_object))  
     {  
         /* It's a sequence object. Iterate through all items in the  
          * sequence and write them to the appropriate field.  
          */  
         length = PySequence_Length(record_object);  
         if (length != num_fields)  
         {  
             PyErr_SetString(PyExc_TypeError,  
                             "record must have one item for each field");  
             goto fail;  
         }  
         for (i = 0; i < length; i++)  
         {  
             type = DBFGetFieldInfo(handle, i, name, &width, NULL);  
             value = PySequence_GetItem(record_object, i);  
             if (value)  
             {  
                 if (!write_field(handle, record, i, type, value))  
                     goto fail;  
                 Py_DECREF(value);  
             }  
             else  
             {  
                 goto fail;  
             }  
         }  
     }  
     else  
     {  
         /* It's a dictionary-like object. Iterate over the names of the  
          * known fields and write the corresponding item  
          */  
         for (i = 0; i < num_fields; i++)  
42          {          {
43              type = DBFGetFieldInfo(handle, i, name, &width, NULL);                  PyErr_Format(PyExc_TypeError, "dbflib.DBFFile.__init__ takes no keyword arguments");
44                    return -1;
45              /* if the dictionary has the key name write that object to          }
46               * the appropriate field, other wise just clear the python          if (!PyArg_ParseTuple(args, "s|s", &file, &mode)) return -1;
47               * exception and do nothing.          
48               */          self->handle = DBFOpen(file, mode);
49              value = PyMapping_GetItemString(record_object, name);          return self->handle ? 0 : -1;
             if (value)  
             {  
                 if (!write_field(handle, record, i, type, value))  
                     goto fail;  
                 Py_DECREF(value);  
             }  
             else  
             {  
                 PyErr_Clear();  
             }  
         }  
     }  
   
     Py_INCREF(Py_None);  
     return Py_None;  
   
  fail:  
     Py_XDECREF(value);  
     return NULL;  
50  }  }
 %}  
51    
52    
53  /* The commit method implementation  
54   *  static PyObject* dbffile_close(DBFFileObject* self)
  * The method relies on the DBFUpdateHeader method which is not  
  * available in shapelib <= 1.2.10.  setup.py defines  
  * HAVE_UPDATE_HEADER's value depending on whether the function is  
  * available in the shapelib version the code is compiled with.  
  */  
 %{  
 static  
 void  
 DBFInfo_commit(DBFHandle handle)  
55  {  {
56  #if HAVE_UPDATE_HEADER          DBFClose(self->handle);
57      DBFUpdateHeader(handle);          self->handle = NULL;
58  #endif          Py_RETURN_NONE;
59  }  }
 %}  
60    
61    
 /*  
  * The SWIG Interface definition.  
  */  
   
 /* include some common SWIG type definitions and standard exception  
    handling code */  
 %include typemaps.i  
 %include exception.i  
   
 /* As for ShapeFile in shapelib.i, We define a new C-struct that holds  
  * the DBFHandle. This is mainly done so we can separate the close()  
  * method from the destructor but it also helps with exception handling.  
  *  
  * After the DBFFile has been opened or created the handle is not NULL.  
  * The close() method closes the file and sets handle to NULL as an  
  * indicator that the file has been closed.  
  */  
62    
63  %{  static PyObject* dbffile_field_count(DBFFileObject* self)
64      typedef struct {  {
65          DBFHandle handle;          return PyInt_FromLong((long)DBFGetFieldCount(self->handle));
66      } DBFFile;  }
 %}  
67    
68    
 /* The first argument to the DBFFile methods is a DBFFile pointer.  
  * We have to check whether handle is not NULL in most methods but not  
  * all. In the destructor and the close method, it's OK for handle to be  
  * NULL. We achieve this by checking whether the preprocessor macro  
  * NOCHECK_$name is defined. SWIG replaces $name with the name of the  
  * function for which the code is inserted. In the %{,%}-block below we  
  * define the macros for the destructor and the close() method.  
  */  
69    
70  %typemap(python,check) DBFFile *{  static PyObject* dbffile_record_count(DBFFileObject* self)
71  %#ifndef NOCHECK_$name  {
72      if (!$target || !$target->handle)          return PyInt_FromLong((long)DBFGetRecordCount(self->handle));
         SWIG_exception(SWIG_TypeError, "dbffile already closed");  
 %#endif  
73  }  }
74    
 %{  
 #define NOCHECK_delete_DBFFile  
 #define NOCHECK_DBFFile_close  
 %}  
   
75    
 /* An exception handle for the constructor and the module level open()  
  * and create() functions.  
  *  
  * Annoyingly, we *have* to put braces around the SWIG_exception()  
  * calls, at least in the python case, because of the way the macro is  
  * written. Of course, always putting braces around the branches of an  
  * if-statement is often considered good practice.  
  */  
 %typemap(python,except) DBFFile * {  
     $function;  
     if (!$source)  
     {  
         SWIG_exception(SWIG_MemoryError, "no memory");  
     }  
     else if (!$source->handle)  
     {  
         SWIG_exception(SWIG_IOError, "$name failed");  
     }  
 }  
76    
77  /* Exception handler for the add_field method */  static PyObject* dbffile_field_info(DBFFileObject* self, PyObject* args)
78  %typemap(python,except) int DBFFile_add_field {  {
79      $function;          char field_name[12];
80      if ($source < 0)          int field, width = 0, decimals = 0, field_type;
81      {          
82          SWIG_exception(SWIG_RuntimeError, "add_field failed");          if (!PyArg_ParseTuple(args, "i", &field)) return NULL;
83      }          
84            field_name[0] = '\0';
85            field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals);
86            
87            return Py_BuildValue("isii", field_type, field_name, width, decimals);
88  }  }
89    
 /* define and use some typemaps for the field_info() method whose  
  * C-implementation has three output parameters that are returned  
  * through pointers passed into the function. SWIG already has  
  * definitions for common types such as int* and we can use those for  
  * the last two parameters:  
  */  
90    
 %apply int * OUTPUT { int * output_width }  
 %apply int * OUTPUT { int * output_decimals }  
91    
92  /* the fieldname has to be defined manually: */  static PyObject* dbffile_add_field(DBFFileObject* self, PyObject* args)
93  %typemap(python,ignore) char *fieldname_out(char temp[12]) {  {
94      $target = temp;          char* name;
95            int type, width, decimals;
96            int field;
97            
98            if (!PyArg_ParseTuple(args, "siii", &name, &type, &width, &decimals)) return NULL;
99            
100            field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals);
101            
102            if (field < 0)
103            {
104                    PyErr_SetString(PyExc_ValueError, "Failed to add field due to inappropriate field definition");
105                    return NULL;
106            }
107            return PyInt_FromLong((long)field);
108  }  }
109    
 %typemap(python,argout) char *fieldname_out() {  
     PyObject * string = PyString_FromString($source);  
     $target = t_output_helper($target,string);  
 }  
110    
111    
112    /* Read one attribute from the dbf handle and return it as a new python object
113    *
114    * If an error occurs, set the appropriate Python exception and return
115    * NULL.
116    *
117    * Assume that the values of the record and field arguments are valid.
118    * The name argument will be passed to DBFGetFieldInfo as is and should
119    * thus be either NULL or a pointer to an array of at least 12 chars
120    */
121    static PyObject* do_read_attribute(DBFHandle handle, int record, int field, char * name)
122    {
123            int type, width;
124            const char* temp;
125            type = DBFGetFieldInfo(handle, field, name, &width, NULL);
126            
127            /* For strings NULL and the empty string are indistinguishable
128            * in DBF files. We prefer empty strings instead for backwards
129            * compatibility reasons because older wrapper versions returned
130            * emtpy strings as empty strings.
131            */
132            if (type != FTString && DBFIsAttributeNULL(handle, record, field))
133            {
134                    Py_RETURN_NONE;
135            }
136            else
137            {
138                    switch (type)
139                    {
140                    case FTString:
141                            temp = DBFReadStringAttribute(handle, record, field);
142                            if (!temp)
143                            {
144                                    PyErr_Format(PyExc_IOError,
145                                                    "Can't read value for row %d column %d",
146                                                    record, field);
147                                    return NULL;
148                            }
149                            return PyString_FromString(temp);
150    
151                    case FTInteger:
152                            return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field));
153    
154                    case FTDouble:
155                            return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field));
156    
157                    default:
158                            PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);
159                            return NULL;
160                    }
161            }
162    }    
163    
164    
 /*  
  * The SWIG-version of the DBFFile struct  
  */  
165    
166  typedef struct  /* the read_attribute method. Return the value of the given record and
167    * field as a python object of the appropriate type.
168    */
169    static PyObject* dbffile_read_attribute(DBFFileObject* self, PyObject* args)
170  {  {
171      %addmethods {          int record, field;
172          DBFFile(const char *file, const char * mode = "rb") {  
173              DBFFile * self = malloc(sizeof(DBFFile));          if (!PyArg_ParseTuple(args, "ii", &record, &field)) return NULL;
174              if (self)          
175                  self->handle = DBFOpen(file, mode);          if (record < 0 || record >= DBFGetRecordCount(self->handle))
176              return self;          {
177          }                  PyErr_Format(PyExc_ValueError,
178                                                "record index %d out of bounds (record count: %d)",
179          ~DBFFile() {                                  record, DBFGetRecordCount(self->handle));
180              if (self->handle)                  return NULL;
                 DBFClose(self->handle);  
             free(self);  
181          }          }
182    
183          void close() {          if (field < 0 || field >= DBFGetFieldCount(self->handle))
184              if (self->handle)          {
185                  DBFClose(self->handle);                  PyErr_Format(PyExc_ValueError,
186              self->handle = NULL;                                  "field index %d out of bounds (field count: %d)",
187                                    field, DBFGetFieldCount(self->handle));
188                    return NULL;
189          }          }
190    
191          int field_count() {          return do_read_attribute(self->handle, record, field, NULL);
192              return DBFGetFieldCount(self->handle);  }
193    
194    
195    
196    /* the read_record method. Return the record record as a dictionary with
197    * whose keys are the names of the fields, and their values as the
198    * appropriate Python type.
199    */
200    static PyObject* dbffile_read_record(DBFFileObject* self, PyObject* args)
201    {
202            int record;
203            int num_fields;
204            int i;
205            char name[12];
206            PyObject *dict;
207            PyObject *value = NULL;
208    
209            if (!PyArg_ParseTuple(args, "i", &record)) return NULL;
210    
211            if (record < 0 || record >= DBFGetRecordCount(self->handle))
212            {
213                    PyErr_Format(PyExc_ValueError,
214                            "record index %d out of bounds (record count: %d)",
215                            record, DBFGetRecordCount(self->handle));
216                    return NULL;
217          }          }
218    
219          int record_count() {          dict = PyDict_New();
220              return DBFGetRecordCount(self->handle);          if (!dict) return NULL;
221            
222            num_fields = DBFGetFieldCount(self->handle);
223            for (i = 0; i < num_fields; i++)
224            {
225                    value = do_read_attribute(self->handle, record, i, name);
226                    if (!value || PyDict_SetItemString(dict, name, value) < 0) goto fail;
227                    Py_DECREF(value);
228                    value = NULL;
229          }          }
230    
231          int field_info(int iField, char * fieldname_out,          return dict;
232                         int * output_width, int * output_decimals) {  
233              return DBFGetFieldInfo(self->handle, iField, fieldname_out,  fail:
234                                     output_width, output_decimals);          Py_XDECREF(value);
235            Py_DECREF(dict);
236            return NULL;
237    }
238    
239    
240    
241    /* write a single field of a record. */
242    static int do_write_field(DBFHandle handle, int record, int field, int type, PyObject* value)
243    {
244            char * string_value;
245            int int_value;
246            double double_value;
247    
248            if (value == Py_None)
249            {
250                    if (!DBFWriteNULLAttribute(handle, record, field))
251                    {
252                            PyErr_Format(PyExc_IOError,
253                                    "can't write NULL field %d of record %d",
254                                    field, record);
255                            return 0;
256                    }
257          }          }
258                        else
259          PyObject * read_record(int record) {          {
260              return DBFInfo_read_record(self->handle, record);                  switch (type)
261                    {
262                    case FTString:
263                            string_value = PyString_AsString(value);
264                            if (!string_value) return 0;
265                            if (!DBFWriteStringAttribute(handle, record, field, string_value))
266                            {
267                                    PyErr_Format(PyExc_IOError,
268                                                    "can't write field %d of record %d",
269                                                    field, record);
270                                    return 0;
271                            }
272                            break;
273    
274                    case FTInteger:
275                            int_value = PyInt_AsLong(value);
276                            if (int_value == -1 && PyErr_Occurred()) return 0;
277                            if (!DBFWriteIntegerAttribute(handle, record, field, int_value))
278                            {
279                                    PyErr_Format(PyExc_IOError,
280                                                    "can't write field %d of record %d",
281                                                    field, record);
282                                    return 0;
283                            }
284                            break;
285    
286                    case FTDouble:
287                            double_value = PyFloat_AsDouble(value);
288                            if (double_value == -1 && PyErr_Occurred()) return 0;
289                            if (!DBFWriteDoubleAttribute(handle, record, field, double_value))
290                            {
291                                    PyErr_Format(PyExc_IOError,
292                                                    "can't write field %d of record %d",
293                                                    field, record);
294                                    return 0;
295                            }
296                            break;
297    
298                    default:
299                            PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type);
300                            return 0;
301                    }
302          }          }
303    
304          PyObject * read_attribute(int record, int field) {          return 1;
305              return DBFInfo_read_attribute(self->handle, record, field);  }
306          }  
307    
308          int add_field(const char * pszFieldName, DBFFieldType eType,  
309                        int nWidth, int nDecimals) {  static PyObject* dbffile_write_field(DBFFileObject* self, PyObject* args)
310              return DBFAddField(self->handle, pszFieldName, eType, nWidth,  {
311                                 nDecimals);          int record, field;
312            PyObject* value;
313            int type;
314    
315            if (!PyArg_ParseTuple(args, "iiO", &record, &field, &value)) return NULL;
316            
317            if (field < 0 || field >= DBFGetFieldCount(self->handle))
318            {
319                    PyErr_Format(PyExc_ValueError,
320                                    "field index %d out of bounds (field count: %d)",
321                                    field, DBFGetFieldCount(self->handle));
322                    return NULL;
323          }          }
324    
325          PyObject *write_record(int record, PyObject *dict_or_sequence) {          type = DBFGetFieldInfo(self->handle, field, NULL, NULL, NULL);
326              return DBFInfo_write_record(self->handle, record,          if (!do_write_field(self->handle, record, field, type, value)) return NULL;
327                                          dict_or_sequence);          Py_RETURN_NONE;
328    }
329    
330    
331    
332    static PyObject* dbffile_write_record(DBFFileObject* self, PyObject* args)
333    {
334            int record;
335            PyObject* record_object;
336            int i, num_fields;
337            
338            int type;
339            char name[12];
340            PyObject* value = NULL;
341            
342            if (!PyArg_ParseTuple(args, "iO", &record, &record_object)) return NULL;
343            
344            num_fields = DBFGetFieldCount(self->handle);
345            
346            /* mimic ShapeFile functionality where id = -1 means appending */
347            if (record == -1)
348            {
349                    record = num_fields;
350          }          }
351    
352          void commit() {          if (PySequence_Check(record_object))
353              DBFInfo_commit(self->handle);          {
354                    /* It's a sequence object. Iterate through all items in the
355                    * sequence and write them to the appropriate field.
356                    */
357                    if (PySequence_Length(record_object) != num_fields)
358                    {
359                            PyErr_SetString(PyExc_TypeError, "record must have one item for each field");
360                            return NULL;
361                    }
362                    for (i = 0; i < num_fields; ++i)
363                    {
364                            type = DBFGetFieldInfo(self->handle, i, NULL, NULL, NULL);
365                            value = PySequence_GetItem(record_object, i);
366                            if (!value) return NULL;
367                            if (!do_write_field(self->handle, record, i, type, value))
368                            {
369                                    Py_DECREF(value);
370                                    return NULL;
371                            }
372                            Py_DECREF(value);
373                    }
374          }          }
375          /* Delete the commit method from the class if it doesn't have a          else
376           * real implementation.          {
377           */                  /* It's a dictionary-like object. Iterate over the names of the
378          %pragma(python) addtomethod="__class__:if not dbflibc._have_commit: del commit"                  * known fields and write the corresponding item
379                    */
380                    for (i = 0; i < num_fields; ++i)
381                    {
382                            name[0] = '\0';
383                            type = DBFGetFieldInfo(self->handle, i, name, NULL, NULL);
384                            value = PyDict_GetItemString(record_object, name);
385                            if (value && !do_write_field(self->handle, record, i, type, value)) return NULL;
386                    }
387            }
388            
389            return PyInt_FromLong((long)record);
390    }
391    
392    
393    
394    static PyObject* dbffile_repr(DBFFileObject* self)
395    {
396            /* TODO: it would be nice to do something like "dbflib.DBFFile(filename, mode)" instead */
397            return PyString_FromFormat("<dbflib.DBFFile object at %p>", self->handle);
398    }
399    
400    
401    
402    /* The commit method implementation
403    *
404    * The method relies on the DBFUpdateHeader method which is not
405    * available in shapelib <= 1.2.10.  setup.py defines
406    * HAVE_UPDATE_HEADER's value depending on whether the function is
407    * available in the shapelib version the code is compiled with.
408    */
409    #if HAVE_UPDATE_HEADER
410    static PyObject* dbffile_commit(DBFFileObject* self)
411    {
412            DBFUpdateHeader(self->handle);
413            Py_RETURN_NONE;
414    }
415    #endif
416    
417    
418    
419    static struct PyMethodDef dbffile_methods[] =
420    {
421            {"close", (PyCFunction)dbffile_close, METH_NOARGS, "close DBFFile"},
422            {"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, "return number of fields currently defined"},
423            {"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, "return number of records that currently exist"},
424            {"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS,
425                    "returns info of a field as a tuple (type, name, width, decimals) with:\n"
426                    "-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n"
427                    "-name: the name of the field as a string\n"
428                    "-width: the width of the field as a number of characters\n"
429                    "-decimals: the number of decimal digits" },
430            {"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS,
431                    "adds a new field and returns field index if successful\n"
432                    "-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n"
433                    "-name: the name of the field as a string\n"
434                    "-width: the width of the field as a number of characters\n"
435                    "-decimals: the number of decimal digits" },
436            {"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, "return the value of one field of a record"},
437            {"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, "return an entire record as a dict of field names and values"},
438            {"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, "write a single field of a record"},
439            {"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, "write an entire record as a dict or a sequence"},
440    #if HAVE_UPDATE_HEADER
441            {"commit", (PyCFunction)dbffile_read_record, METH_NOARGS, NULL},
442    #endif
443            {NULL}
444    };
445    
446    
447    
448    static struct PyGetSetDef dbffile_getsetters[] =
449    {
450            {NULL}
451    };
452    
         /* The __del__ method generated by the old SWIG version we're  
          * tries to access self.thisown which may not be set at all when  
          * there was an exception during construction.  Therefore we  
          * override it with our own version.  
          * FIXME: It would be better to upgrade to a newer SWIG version  
          * or to get rid of SWIG entirely.  
          */  
         %pragma(python) addtoclass = "  
     def __del__(self,dbflibc=dbflibc):  
         if getattr(self, 'thisown', 0):  
             dbflibc.delete_DBFFile(self)  
     "  
453    
454    
455      }  static PyTypeObject DBFFileType = PYSHAPELIB_DEFINE_TYPE(DBFFileObject, dbffile, "shapelib.DBFFile", 0);
 } DBFFile;  
456    
457    
 /*  
  * Two module level functions, open() and create() that correspond to  
  * DBFOpen and DBFCreate respectively. open() is equivalent to the  
  * DBFFile constructor.  
  */  
458    
459    /* --- dbflib -------------------------------------------------------------------------------------------------------- */
460    
461  %{  static PyObject* dbflib_open(PyObject* module, PyObject* args)
462      DBFFile * open_DBFFile(const char * file, const char * mode)  {
463      {          return PyObject_CallObject((PyObject*)&DBFFileType, args);
464          DBFFile * self = malloc(sizeof(DBFFile));  }
         if (self)  
             self->handle = DBFOpen(file, mode);  
         return self;  
     }  
 %}  
465    
 %name(open) %new DBFFile * open_DBFFile(const char * file,  
                                         const char * mode = "rb");  
466    
 %{  
     DBFFile * create_DBFFile(const char * file)  
     {  
         DBFFile * self = malloc(sizeof(DBFFile));  
         if (self)  
             self->handle = DBFCreate(file);  
         return self;  
     }  
 %}  
 %name(create) %new DBFFile * create_DBFFile(const char * file);  
467    
468    static PyObject* dbflib_create(PyObject* module, PyObject* args)
469    {
470            char* file;
471            DBFFileObject* result;
472            
473            if (!PyArg_ParseTuple(args, "s", &file)) return NULL;
474            
475            result = PyObject_New(DBFFileObject, &DBFFileType);
476            if (!result)
477            {
478                    return PyErr_NoMemory();
479            }
480            
481            result->handle = DBFCreate(file);
482            if (!result->handle)
483            {
484                    PyObject_Del((PyObject*)result);
485                    PyErr_SetString(PyExc_RuntimeError, "Failed to create DBFFile");
486                    return NULL;
487            }
488            
489            return (PyObject*) result;
490    }
491    
492    
493  /* constant definitions copied from shapefil.h */  
494  typedef enum {  static struct PyMethodDef dbflib_methods[] =
495    FTString,  {
496    FTInteger,          {"open", (PyCFunction)dbflib_open, METH_VARARGS, "open a DBFFile" },
497    FTDouble,          {"create", (PyCFunction)dbflib_create, METH_VARARGS, "create a DBFFile" },
498    FTInvalid          {NULL}
499  } DBFFieldType;  };
500    
501    
 /* Put the value of the HAVE_UPDATE_HEADER preprocessor macro into the  
  * wrapper so that the __class__ pragma above knows when to remove the  
  * commit method  
  */  
 const int _have_commit = HAVE_UPDATE_HEADER;  
502    
503    PyMODINIT_FUNC initdbflib(void)
504    {
505            PyObject* module = Py_InitModule("dbflib", dbflib_methods);
506            if (!module) return;
507            
508            PYSHAPELIB_ADD_TYPE(DBFFileType, "DBFFile");
509            
510            PYSHAPELIB_ADD_CONSTANT(FTString);
511            PYSHAPELIB_ADD_CONSTANT(FTInteger);
512            PYSHAPELIB_ADD_CONSTANT(FTDouble);
513            PYSHAPELIB_ADD_CONSTANT(FTInvalid);
514            PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER);
515    }

Legend:
Removed from v.2734  
changed lines
  Added in v.2742

[email protected]
ViewVC Help
Powered by ViewVC 1.1.26