1 |
#include "pyshapelib_common.h" |
2 |
|
3 |
/* --- DBFFile ------------------------------------------------------------------------------------------------------- */ |
4 |
|
5 |
typedef struct { |
6 |
PyObject_HEAD |
7 |
DBFHandle handle; |
8 |
} DBFFileObject; |
9 |
|
10 |
|
11 |
|
12 |
/* allocator |
13 |
*/ |
14 |
static PyObject* dbffile_new(PyTypeObject* type, PyObject* args, PyObject* kwds) |
15 |
{ |
16 |
DBFFileObject* self; |
17 |
self = (DBFFileObject*) type->tp_alloc(type, 0); |
18 |
self->handle = NULL; |
19 |
return (PyObject*) self; |
20 |
} |
21 |
|
22 |
|
23 |
|
24 |
/* deallocator |
25 |
*/ |
26 |
static void dbffile_dealloc(DBFFileObject* self) |
27 |
{ |
28 |
DBFClose(self->handle); |
29 |
self->handle = NULL; |
30 |
self->ob_type->tp_free((PyObject*)self); |
31 |
} |
32 |
|
33 |
|
34 |
|
35 |
/* constructor |
36 |
*/ |
37 |
static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds) |
38 |
{ |
39 |
char* file; |
40 |
char* mode = "rb"; |
41 |
if (kwds != NULL && PyDict_Size(kwds) > 0) |
42 |
{ |
43 |
PyErr_Format(PyExc_TypeError, "dbflib.DBFFile.__init__ takes no keyword arguments"); |
44 |
return -1; |
45 |
} |
46 |
if (!PyArg_ParseTuple(args, "s|s", &file, &mode)) return -1; |
47 |
|
48 |
self->handle = DBFOpen(file, mode); |
49 |
return self->handle ? 0 : -1; |
50 |
} |
51 |
|
52 |
|
53 |
|
54 |
static PyObject* dbffile_close(DBFFileObject* self) |
55 |
{ |
56 |
DBFClose(self->handle); |
57 |
self->handle = NULL; |
58 |
Py_RETURN_NONE; |
59 |
} |
60 |
|
61 |
|
62 |
|
63 |
static PyObject* dbffile_field_count(DBFFileObject* self) |
64 |
{ |
65 |
return PyInt_FromLong((long)DBFGetFieldCount(self->handle)); |
66 |
} |
67 |
|
68 |
|
69 |
|
70 |
static PyObject* dbffile_record_count(DBFFileObject* self) |
71 |
{ |
72 |
return PyInt_FromLong((long)DBFGetRecordCount(self->handle)); |
73 |
} |
74 |
|
75 |
|
76 |
|
77 |
static PyObject* dbffile_field_info(DBFFileObject* self, PyObject* args) |
78 |
{ |
79 |
char field_name[12]; |
80 |
int field, width = 0, decimals = 0, field_type; |
81 |
|
82 |
if (!PyArg_ParseTuple(args, "i", &field)) return NULL; |
83 |
|
84 |
field_name[0] = '\0'; |
85 |
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
86 |
|
87 |
return Py_BuildValue("isii", field_type, field_name, width, decimals); |
88 |
} |
89 |
|
90 |
|
91 |
|
92 |
static PyObject* dbffile_add_field(DBFFileObject* self, PyObject* args) |
93 |
{ |
94 |
char* name; |
95 |
int type, width, decimals; |
96 |
int field; |
97 |
|
98 |
if (!PyArg_ParseTuple(args, "siii", &name, &type, &width, &decimals)) return NULL; |
99 |
|
100 |
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
101 |
|
102 |
if (field < 0) |
103 |
{ |
104 |
PyErr_SetString(PyExc_ValueError, "Failed to add field due to inappropriate field definition"); |
105 |
return NULL; |
106 |
} |
107 |
return PyInt_FromLong((long)field); |
108 |
} |
109 |
|
110 |
|
111 |
|
112 |
/* Read one attribute from the dbf handle and return it as a new python object |
113 |
* |
114 |
* If an error occurs, set the appropriate Python exception and return |
115 |
* NULL. |
116 |
* |
117 |
* Assume that the values of the record and field arguments are valid. |
118 |
* The name argument will be passed to DBFGetFieldInfo as is and should |
119 |
* thus be either NULL or a pointer to an array of at least 12 chars |
120 |
*/ |
121 |
static PyObject* do_read_attribute(DBFHandle handle, int record, int field, char * name) |
122 |
{ |
123 |
int type, width; |
124 |
const char* temp; |
125 |
type = DBFGetFieldInfo(handle, field, name, &width, NULL); |
126 |
|
127 |
/* For strings NULL and the empty string are indistinguishable |
128 |
* in DBF files. We prefer empty strings instead for backwards |
129 |
* compatibility reasons because older wrapper versions returned |
130 |
* emtpy strings as empty strings. |
131 |
*/ |
132 |
if (type != FTString && DBFIsAttributeNULL(handle, record, field)) |
133 |
{ |
134 |
Py_RETURN_NONE; |
135 |
} |
136 |
else |
137 |
{ |
138 |
switch (type) |
139 |
{ |
140 |
case FTString: |
141 |
temp = DBFReadStringAttribute(handle, record, field); |
142 |
if (!temp) |
143 |
{ |
144 |
PyErr_Format(PyExc_IOError, |
145 |
"Can't read value for row %d column %d", |
146 |
record, field); |
147 |
return NULL; |
148 |
} |
149 |
return PyString_FromString(temp); |
150 |
|
151 |
case FTInteger: |
152 |
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
153 |
|
154 |
case FTDouble: |
155 |
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
156 |
|
157 |
default: |
158 |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
159 |
return NULL; |
160 |
} |
161 |
} |
162 |
} |
163 |
|
164 |
|
165 |
|
166 |
/* the read_attribute method. Return the value of the given record and |
167 |
* field as a python object of the appropriate type. |
168 |
*/ |
169 |
static PyObject* dbffile_read_attribute(DBFFileObject* self, PyObject* args) |
170 |
{ |
171 |
int record, field; |
172 |
|
173 |
if (!PyArg_ParseTuple(args, "ii", &record, &field)) return NULL; |
174 |
|
175 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
176 |
{ |
177 |
PyErr_Format(PyExc_ValueError, |
178 |
"record index %d out of bounds (record count: %d)", |
179 |
record, DBFGetRecordCount(self->handle)); |
180 |
return NULL; |
181 |
} |
182 |
|
183 |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
184 |
{ |
185 |
PyErr_Format(PyExc_ValueError, |
186 |
"field index %d out of bounds (field count: %d)", |
187 |
field, DBFGetFieldCount(self->handle)); |
188 |
return NULL; |
189 |
} |
190 |
|
191 |
return do_read_attribute(self->handle, record, field, NULL); |
192 |
} |
193 |
|
194 |
|
195 |
|
196 |
/* the read_record method. Return the record record as a dictionary with |
197 |
* whose keys are the names of the fields, and their values as the |
198 |
* appropriate Python type. |
199 |
*/ |
200 |
static PyObject* dbffile_read_record(DBFFileObject* self, PyObject* args) |
201 |
{ |
202 |
int record; |
203 |
int num_fields; |
204 |
int i; |
205 |
char name[12]; |
206 |
PyObject *dict; |
207 |
PyObject *value = NULL; |
208 |
|
209 |
if (!PyArg_ParseTuple(args, "i", &record)) return NULL; |
210 |
|
211 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
212 |
{ |
213 |
PyErr_Format(PyExc_ValueError, |
214 |
"record index %d out of bounds (record count: %d)", |
215 |
record, DBFGetRecordCount(self->handle)); |
216 |
return NULL; |
217 |
} |
218 |
|
219 |
dict = PyDict_New(); |
220 |
if (!dict) return NULL; |
221 |
|
222 |
num_fields = DBFGetFieldCount(self->handle); |
223 |
for (i = 0; i < num_fields; i++) |
224 |
{ |
225 |
value = do_read_attribute(self->handle, record, i, name); |
226 |
if (!value || PyDict_SetItemString(dict, name, value) < 0) goto fail; |
227 |
Py_DECREF(value); |
228 |
value = NULL; |
229 |
} |
230 |
|
231 |
return dict; |
232 |
|
233 |
fail: |
234 |
Py_XDECREF(value); |
235 |
Py_DECREF(dict); |
236 |
return NULL; |
237 |
} |
238 |
|
239 |
|
240 |
|
241 |
/* write a single field of a record. */ |
242 |
static int do_write_field(DBFHandle handle, int record, int field, int type, PyObject* value) |
243 |
{ |
244 |
char * string_value; |
245 |
int int_value; |
246 |
double double_value; |
247 |
|
248 |
if (value == Py_None) |
249 |
{ |
250 |
if (!DBFWriteNULLAttribute(handle, record, field)) |
251 |
{ |
252 |
PyErr_Format(PyExc_IOError, |
253 |
"can't write NULL field %d of record %d", |
254 |
field, record); |
255 |
return 0; |
256 |
} |
257 |
} |
258 |
else |
259 |
{ |
260 |
switch (type) |
261 |
{ |
262 |
case FTString: |
263 |
string_value = PyString_AsString(value); |
264 |
if (!string_value) return 0; |
265 |
if (!DBFWriteStringAttribute(handle, record, field, string_value)) |
266 |
{ |
267 |
PyErr_Format(PyExc_IOError, |
268 |
"can't write field %d of record %d", |
269 |
field, record); |
270 |
return 0; |
271 |
} |
272 |
break; |
273 |
|
274 |
case FTInteger: |
275 |
int_value = PyInt_AsLong(value); |
276 |
if (int_value == -1 && PyErr_Occurred()) return 0; |
277 |
if (!DBFWriteIntegerAttribute(handle, record, field, int_value)) |
278 |
{ |
279 |
PyErr_Format(PyExc_IOError, |
280 |
"can't write field %d of record %d", |
281 |
field, record); |
282 |
return 0; |
283 |
} |
284 |
break; |
285 |
|
286 |
case FTDouble: |
287 |
double_value = PyFloat_AsDouble(value); |
288 |
if (double_value == -1 && PyErr_Occurred()) return 0; |
289 |
if (!DBFWriteDoubleAttribute(handle, record, field, double_value)) |
290 |
{ |
291 |
PyErr_Format(PyExc_IOError, |
292 |
"can't write field %d of record %d", |
293 |
field, record); |
294 |
return 0; |
295 |
} |
296 |
break; |
297 |
|
298 |
default: |
299 |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
300 |
return 0; |
301 |
} |
302 |
} |
303 |
|
304 |
return 1; |
305 |
} |
306 |
|
307 |
|
308 |
|
309 |
static PyObject* dbffile_write_field(DBFFileObject* self, PyObject* args) |
310 |
{ |
311 |
int record, field; |
312 |
PyObject* value; |
313 |
int type; |
314 |
|
315 |
if (!PyArg_ParseTuple(args, "iiO", &record, &field, &value)) return NULL; |
316 |
|
317 |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
318 |
{ |
319 |
PyErr_Format(PyExc_ValueError, |
320 |
"field index %d out of bounds (field count: %d)", |
321 |
field, DBFGetFieldCount(self->handle)); |
322 |
return NULL; |
323 |
} |
324 |
|
325 |
type = DBFGetFieldInfo(self->handle, field, NULL, NULL, NULL); |
326 |
if (!do_write_field(self->handle, record, field, type, value)) return NULL; |
327 |
Py_RETURN_NONE; |
328 |
} |
329 |
|
330 |
|
331 |
|
332 |
static PyObject* dbffile_write_record(DBFFileObject* self, PyObject* args) |
333 |
{ |
334 |
int record; |
335 |
PyObject* record_object; |
336 |
int i, num_fields; |
337 |
|
338 |
int type; |
339 |
char name[12]; |
340 |
PyObject* value = NULL; |
341 |
|
342 |
if (!PyArg_ParseTuple(args, "iO", &record, &record_object)) return NULL; |
343 |
|
344 |
num_fields = DBFGetFieldCount(self->handle); |
345 |
|
346 |
/* mimic ShapeFile functionality where id = -1 means appending */ |
347 |
if (record == -1) |
348 |
{ |
349 |
record = num_fields; |
350 |
} |
351 |
|
352 |
if (PySequence_Check(record_object)) |
353 |
{ |
354 |
/* It's a sequence object. Iterate through all items in the |
355 |
* sequence and write them to the appropriate field. |
356 |
*/ |
357 |
if (PySequence_Length(record_object) != num_fields) |
358 |
{ |
359 |
PyErr_SetString(PyExc_TypeError, "record must have one item for each field"); |
360 |
return NULL; |
361 |
} |
362 |
for (i = 0; i < num_fields; ++i) |
363 |
{ |
364 |
type = DBFGetFieldInfo(self->handle, i, NULL, NULL, NULL); |
365 |
value = PySequence_GetItem(record_object, i); |
366 |
if (!value) return NULL; |
367 |
if (!do_write_field(self->handle, record, i, type, value)) |
368 |
{ |
369 |
Py_DECREF(value); |
370 |
return NULL; |
371 |
} |
372 |
Py_DECREF(value); |
373 |
} |
374 |
} |
375 |
else |
376 |
{ |
377 |
/* It's a dictionary-like object. Iterate over the names of the |
378 |
* known fields and write the corresponding item |
379 |
*/ |
380 |
for (i = 0; i < num_fields; ++i) |
381 |
{ |
382 |
name[0] = '\0'; |
383 |
type = DBFGetFieldInfo(self->handle, i, name, NULL, NULL); |
384 |
value = PyDict_GetItemString(record_object, name); |
385 |
if (value && !do_write_field(self->handle, record, i, type, value)) return NULL; |
386 |
} |
387 |
} |
388 |
|
389 |
return PyInt_FromLong((long)record); |
390 |
} |
391 |
|
392 |
|
393 |
|
394 |
static PyObject* dbffile_repr(DBFFileObject* self) |
395 |
{ |
396 |
/* TODO: it would be nice to do something like "dbflib.DBFFile(filename, mode)" instead */ |
397 |
return PyString_FromFormat("<dbflib.DBFFile object at %p>", self->handle); |
398 |
} |
399 |
|
400 |
|
401 |
|
402 |
/* The commit method implementation |
403 |
* |
404 |
* The method relies on the DBFUpdateHeader method which is not |
405 |
* available in shapelib <= 1.2.10. setup.py defines |
406 |
* HAVE_UPDATE_HEADER's value depending on whether the function is |
407 |
* available in the shapelib version the code is compiled with. |
408 |
*/ |
409 |
#if HAVE_UPDATE_HEADER |
410 |
static PyObject* dbffile_commit(DBFFileObject* self) |
411 |
{ |
412 |
DBFUpdateHeader(self->handle); |
413 |
Py_RETURN_NONE; |
414 |
} |
415 |
#endif |
416 |
|
417 |
|
418 |
|
419 |
static struct PyMethodDef dbffile_methods[] = |
420 |
{ |
421 |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, "close DBFFile"}, |
422 |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, "return number of fields currently defined"}, |
423 |
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, "return number of records that currently exist"}, |
424 |
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
425 |
"returns info of a field as a tuple (type, name, width, decimals) with:\n" |
426 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
427 |
"-name: the name of the field as a string\n" |
428 |
"-width: the width of the field as a number of characters\n" |
429 |
"-decimals: the number of decimal digits" }, |
430 |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
431 |
"adds a new field and returns field index if successful\n" |
432 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
433 |
"-name: the name of the field as a string\n" |
434 |
"-width: the width of the field as a number of characters\n" |
435 |
"-decimals: the number of decimal digits" }, |
436 |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, "return the value of one field of a record"}, |
437 |
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, "return an entire record as a dict of field names and values"}, |
438 |
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, "write a single field of a record"}, |
439 |
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, "write an entire record as a dict or a sequence"}, |
440 |
#if HAVE_UPDATE_HEADER |
441 |
{"commit", (PyCFunction)dbffile_read_record, METH_NOARGS, NULL}, |
442 |
#endif |
443 |
{NULL} |
444 |
}; |
445 |
|
446 |
|
447 |
|
448 |
static struct PyGetSetDef dbffile_getsetters[] = |
449 |
{ |
450 |
{NULL} |
451 |
}; |
452 |
|
453 |
|
454 |
|
455 |
static PyTypeObject DBFFileType = PYSHAPELIB_DEFINE_TYPE(DBFFileObject, dbffile, "shapelib.DBFFile", 0); |
456 |
|
457 |
|
458 |
|
459 |
/* --- dbflib -------------------------------------------------------------------------------------------------------- */ |
460 |
|
461 |
static PyObject* dbflib_open(PyObject* module, PyObject* args) |
462 |
{ |
463 |
return PyObject_CallObject((PyObject*)&DBFFileType, args); |
464 |
} |
465 |
|
466 |
|
467 |
|
468 |
static PyObject* dbflib_create(PyObject* module, PyObject* args) |
469 |
{ |
470 |
char* file; |
471 |
DBFFileObject* result; |
472 |
|
473 |
if (!PyArg_ParseTuple(args, "s", &file)) return NULL; |
474 |
|
475 |
result = PyObject_New(DBFFileObject, &DBFFileType); |
476 |
if (!result) |
477 |
{ |
478 |
return PyErr_NoMemory(); |
479 |
} |
480 |
|
481 |
result->handle = DBFCreate(file); |
482 |
if (!result->handle) |
483 |
{ |
484 |
PyObject_Del((PyObject*)result); |
485 |
PyErr_SetString(PyExc_RuntimeError, "Failed to create DBFFile"); |
486 |
return NULL; |
487 |
} |
488 |
|
489 |
return (PyObject*) result; |
490 |
} |
491 |
|
492 |
|
493 |
|
494 |
static struct PyMethodDef dbflib_methods[] = |
495 |
{ |
496 |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, "open a DBFFile" }, |
497 |
{"create", (PyCFunction)dbflib_create, METH_VARARGS, "create a DBFFile" }, |
498 |
{NULL} |
499 |
}; |
500 |
|
501 |
|
502 |
|
503 |
PyMODINIT_FUNC initdbflib(void) |
504 |
{ |
505 |
PyObject* module = Py_InitModule("dbflib", dbflib_methods); |
506 |
if (!module) return; |
507 |
|
508 |
PYSHAPELIB_ADD_TYPE(DBFFileType, "DBFFile"); |
509 |
|
510 |
PYSHAPELIB_ADD_CONSTANT(FTString); |
511 |
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
512 |
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
513 |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
514 |
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
515 |
} |