1 |
bramz |
2742 |
#include "pyshapelib_common.h" |
2 |
jan |
1611 |
|
3 |
bramz |
2742 |
/* --- DBFFile ------------------------------------------------------------------------------------------------------- */ |
4 |
jan |
1611 |
|
5 |
bramz |
2742 |
typedef struct { |
6 |
|
|
PyObject_HEAD |
7 |
|
|
DBFHandle handle; |
8 |
|
|
} DBFFileObject; |
9 |
jan |
1611 |
|
10 |
|
|
|
11 |
bh |
1917 |
|
12 |
bramz |
2742 |
/* allocator |
13 |
|
|
*/ |
14 |
|
|
static PyObject* dbffile_new(PyTypeObject* type, PyObject* args, PyObject* kwds) |
15 |
bh |
1917 |
{ |
16 |
bramz |
2742 |
DBFFileObject* self; |
17 |
|
|
self = (DBFFileObject*) type->tp_alloc(type, 0); |
18 |
|
|
self->handle = NULL; |
19 |
|
|
return (PyObject*) self; |
20 |
|
|
} |
21 |
bh |
1917 |
|
22 |
bramz |
2742 |
|
23 |
|
|
|
24 |
|
|
/* deallocator |
25 |
|
|
*/ |
26 |
|
|
static void dbffile_dealloc(DBFFileObject* self) |
27 |
|
|
{ |
28 |
|
|
DBFClose(self->handle); |
29 |
|
|
self->handle = NULL; |
30 |
|
|
self->ob_type->tp_free((PyObject*)self); |
31 |
|
|
} |
32 |
|
|
|
33 |
|
|
|
34 |
|
|
|
35 |
|
|
/* constructor |
36 |
|
|
*/ |
37 |
|
|
static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds) |
38 |
|
|
{ |
39 |
|
|
char* file; |
40 |
|
|
char* mode = "rb"; |
41 |
bramz |
2745 |
static char *kwlist[] = {"name", "mode", NULL}; |
42 |
|
|
if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|s:__init__", kwlist, |
43 |
|
|
Py_FileSystemDefaultEncoding, &file, &mode)) return -1; |
44 |
bramz |
2742 |
|
45 |
bramz |
2749 |
self->handle = DBFOpen(file, mode); |
46 |
|
|
if (!self->handle) |
47 |
|
|
{ |
48 |
|
|
PyErr_SetFromErrnoWithFilename(PyExc_IOError, file); |
49 |
|
|
} |
50 |
|
|
|
51 |
bramz |
2744 |
PyMem_Free(file); |
52 |
bramz |
2742 |
return self->handle ? 0 : -1; |
53 |
|
|
} |
54 |
bh |
1917 |
|
55 |
|
|
|
56 |
|
|
|
57 |
bramz |
2742 |
static PyObject* dbffile_close(DBFFileObject* self) |
58 |
bh |
1917 |
{ |
59 |
bramz |
2742 |
DBFClose(self->handle); |
60 |
|
|
self->handle = NULL; |
61 |
|
|
Py_RETURN_NONE; |
62 |
|
|
} |
63 |
bh |
1917 |
|
64 |
|
|
|
65 |
bramz |
2742 |
|
66 |
|
|
static PyObject* dbffile_field_count(DBFFileObject* self) |
67 |
|
|
{ |
68 |
|
|
return PyInt_FromLong((long)DBFGetFieldCount(self->handle)); |
69 |
bh |
1917 |
} |
70 |
|
|
|
71 |
jan |
1611 |
|
72 |
bramz |
2742 |
|
73 |
|
|
static PyObject* dbffile_record_count(DBFFileObject* self) |
74 |
jan |
1611 |
{ |
75 |
bramz |
2742 |
return PyInt_FromLong((long)DBFGetRecordCount(self->handle)); |
76 |
|
|
} |
77 |
jan |
1611 |
|
78 |
|
|
|
79 |
bramz |
2742 |
|
80 |
|
|
static PyObject* dbffile_field_info(DBFFileObject* self, PyObject* args) |
81 |
|
|
{ |
82 |
|
|
char field_name[12]; |
83 |
|
|
int field, width = 0, decimals = 0, field_type; |
84 |
jan |
1611 |
|
85 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "i:field_info", &field)) return NULL; |
86 |
bramz |
2742 |
|
87 |
|
|
field_name[0] = '\0'; |
88 |
|
|
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
89 |
|
|
|
90 |
|
|
return Py_BuildValue("isii", field_type, field_name, width, decimals); |
91 |
|
|
} |
92 |
bh |
1917 |
|
93 |
jan |
1611 |
|
94 |
bh |
1917 |
|
95 |
bramz |
2742 |
static PyObject* dbffile_add_field(DBFFileObject* self, PyObject* args) |
96 |
|
|
{ |
97 |
|
|
char* name; |
98 |
|
|
int type, width, decimals; |
99 |
|
|
int field; |
100 |
|
|
|
101 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "siii:add_field", &name, &type, &width, &decimals)) return NULL; |
102 |
bramz |
2742 |
|
103 |
|
|
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
104 |
|
|
|
105 |
|
|
if (field < 0) |
106 |
|
|
{ |
107 |
|
|
PyErr_SetString(PyExc_ValueError, "Failed to add field due to inappropriate field definition"); |
108 |
|
|
return NULL; |
109 |
|
|
} |
110 |
|
|
return PyInt_FromLong((long)field); |
111 |
jan |
1611 |
} |
112 |
|
|
|
113 |
|
|
|
114 |
|
|
|
115 |
bramz |
2742 |
/* Read one attribute from the dbf handle and return it as a new python object |
116 |
|
|
* |
117 |
|
|
* If an error occurs, set the appropriate Python exception and return |
118 |
|
|
* NULL. |
119 |
|
|
* |
120 |
|
|
* Assume that the values of the record and field arguments are valid. |
121 |
|
|
* The name argument will be passed to DBFGetFieldInfo as is and should |
122 |
|
|
* thus be either NULL or a pointer to an array of at least 12 chars |
123 |
|
|
*/ |
124 |
|
|
static PyObject* do_read_attribute(DBFHandle handle, int record, int field, char * name) |
125 |
jan |
1611 |
{ |
126 |
bramz |
2742 |
int type, width; |
127 |
|
|
const char* temp; |
128 |
|
|
type = DBFGetFieldInfo(handle, field, name, &width, NULL); |
129 |
|
|
|
130 |
|
|
/* For strings NULL and the empty string are indistinguishable |
131 |
|
|
* in DBF files. We prefer empty strings instead for backwards |
132 |
|
|
* compatibility reasons because older wrapper versions returned |
133 |
|
|
* emtpy strings as empty strings. |
134 |
|
|
*/ |
135 |
|
|
if (type != FTString && DBFIsAttributeNULL(handle, record, field)) |
136 |
jan |
1611 |
{ |
137 |
bramz |
2742 |
Py_RETURN_NONE; |
138 |
jan |
1611 |
} |
139 |
bramz |
2742 |
else |
140 |
jan |
1611 |
{ |
141 |
bramz |
2742 |
switch (type) |
142 |
|
|
{ |
143 |
|
|
case FTString: |
144 |
|
|
temp = DBFReadStringAttribute(handle, record, field); |
145 |
bramz |
2745 |
if (temp) return PyString_FromString(temp); |
146 |
jan |
1611 |
|
147 |
bramz |
2742 |
case FTInteger: |
148 |
|
|
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
149 |
jan |
1611 |
|
150 |
bramz |
2742 |
case FTDouble: |
151 |
|
|
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
152 |
bramz |
2745 |
|
153 |
|
|
case FTLogical: |
154 |
|
|
temp = DBFReadLogicalAttribute(handle, record, field); |
155 |
|
|
if (temp) |
156 |
|
|
{ |
157 |
|
|
switch (temp[0]) |
158 |
|
|
{ |
159 |
|
|
case 'F': |
160 |
|
|
case 'N': |
161 |
|
|
Py_RETURN_FALSE; |
162 |
|
|
case 'T': |
163 |
|
|
case 'Y': |
164 |
|
|
Py_RETURN_TRUE; |
165 |
|
|
} |
166 |
|
|
} |
167 |
|
|
break; |
168 |
jan |
1611 |
|
169 |
bramz |
2742 |
default: |
170 |
|
|
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
171 |
|
|
return NULL; |
172 |
|
|
} |
173 |
jan |
1611 |
} |
174 |
bramz |
2745 |
|
175 |
|
|
PyErr_Format(PyExc_IOError, "Can't read value for row %d column %d", record, field); |
176 |
|
|
return NULL; |
177 |
bramz |
2742 |
} |
178 |
jan |
1611 |
|
179 |
|
|
|
180 |
bramz |
2742 |
|
181 |
|
|
/* the read_attribute method. Return the value of the given record and |
182 |
|
|
* field as a python object of the appropriate type. |
183 |
|
|
*/ |
184 |
|
|
static PyObject* dbffile_read_attribute(DBFFileObject* self, PyObject* args) |
185 |
jan |
1611 |
{ |
186 |
bramz |
2742 |
int record, field; |
187 |
jan |
1611 |
|
188 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "ii:read_field", &record, &field)) return NULL; |
189 |
bramz |
2742 |
|
190 |
|
|
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
191 |
|
|
{ |
192 |
|
|
PyErr_Format(PyExc_ValueError, |
193 |
|
|
"record index %d out of bounds (record count: %d)", |
194 |
|
|
record, DBFGetRecordCount(self->handle)); |
195 |
|
|
return NULL; |
196 |
|
|
} |
197 |
jan |
1611 |
|
198 |
bramz |
2742 |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
199 |
jan |
1611 |
{ |
200 |
bramz |
2742 |
PyErr_Format(PyExc_ValueError, |
201 |
|
|
"field index %d out of bounds (field count: %d)", |
202 |
|
|
field, DBFGetFieldCount(self->handle)); |
203 |
|
|
return NULL; |
204 |
jan |
1611 |
} |
205 |
bramz |
2742 |
|
206 |
|
|
return do_read_attribute(self->handle, record, field, NULL); |
207 |
|
|
} |
208 |
|
|
|
209 |
|
|
|
210 |
|
|
|
211 |
|
|
/* the read_record method. Return the record record as a dictionary with |
212 |
|
|
* whose keys are the names of the fields, and their values as the |
213 |
|
|
* appropriate Python type. |
214 |
|
|
*/ |
215 |
|
|
static PyObject* dbffile_read_record(DBFFileObject* self, PyObject* args) |
216 |
|
|
{ |
217 |
|
|
int record; |
218 |
|
|
int num_fields; |
219 |
|
|
int i; |
220 |
|
|
char name[12]; |
221 |
|
|
PyObject *dict; |
222 |
|
|
PyObject *value = NULL; |
223 |
|
|
|
224 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "i:read_record", &record)) return NULL; |
225 |
bramz |
2742 |
|
226 |
|
|
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
227 |
jan |
1611 |
{ |
228 |
bramz |
2742 |
PyErr_Format(PyExc_ValueError, |
229 |
|
|
"record index %d out of bounds (record count: %d)", |
230 |
|
|
record, DBFGetRecordCount(self->handle)); |
231 |
|
|
return NULL; |
232 |
jan |
1611 |
} |
233 |
bramz |
2742 |
|
234 |
|
|
dict = PyDict_New(); |
235 |
|
|
if (!dict) return NULL; |
236 |
|
|
|
237 |
|
|
num_fields = DBFGetFieldCount(self->handle); |
238 |
jan |
1611 |
for (i = 0; i < num_fields; i++) |
239 |
|
|
{ |
240 |
bramz |
2742 |
value = do_read_attribute(self->handle, record, i, name); |
241 |
|
|
if (!value || PyDict_SetItemString(dict, name, value) < 0) goto fail; |
242 |
jan |
1611 |
Py_DECREF(value); |
243 |
bramz |
2742 |
value = NULL; |
244 |
jan |
1611 |
} |
245 |
|
|
|
246 |
bramz |
2742 |
return dict; |
247 |
jan |
1611 |
|
248 |
bramz |
2742 |
fail: |
249 |
|
|
Py_XDECREF(value); |
250 |
|
|
Py_DECREF(dict); |
251 |
|
|
return NULL; |
252 |
jan |
1611 |
} |
253 |
|
|
|
254 |
|
|
|
255 |
bramz |
2742 |
|
256 |
|
|
/* write a single field of a record. */ |
257 |
|
|
static int do_write_field(DBFHandle handle, int record, int field, int type, PyObject* value) |
258 |
bh |
2212 |
{ |
259 |
bramz |
2742 |
char * string_value; |
260 |
|
|
int int_value; |
261 |
|
|
double double_value; |
262 |
bramz |
2745 |
int logical_value; |
263 |
bh |
2212 |
|
264 |
bramz |
2742 |
if (value == Py_None) |
265 |
|
|
{ |
266 |
bramz |
2745 |
if (DBFWriteNULLAttribute(handle, record, field)) return 1; |
267 |
bramz |
2742 |
} |
268 |
|
|
else |
269 |
|
|
{ |
270 |
|
|
switch (type) |
271 |
|
|
{ |
272 |
|
|
case FTString: |
273 |
|
|
string_value = PyString_AsString(value); |
274 |
|
|
if (!string_value) return 0; |
275 |
bramz |
2745 |
if (DBFWriteStringAttribute(handle, record, field, string_value)) return 1; |
276 |
bramz |
2742 |
break; |
277 |
bh |
2212 |
|
278 |
bramz |
2742 |
case FTInteger: |
279 |
|
|
int_value = PyInt_AsLong(value); |
280 |
|
|
if (int_value == -1 && PyErr_Occurred()) return 0; |
281 |
bramz |
2745 |
if (DBFWriteIntegerAttribute(handle, record, field, int_value)) return 1; |
282 |
bramz |
2742 |
break; |
283 |
jan |
1611 |
|
284 |
bramz |
2742 |
case FTDouble: |
285 |
|
|
double_value = PyFloat_AsDouble(value); |
286 |
|
|
if (double_value == -1 && PyErr_Occurred()) return 0; |
287 |
bramz |
2745 |
if (DBFWriteDoubleAttribute(handle, record, field, double_value)) return 1; |
288 |
bramz |
2742 |
break; |
289 |
bramz |
2745 |
|
290 |
|
|
case FTLogical: |
291 |
|
|
logical_value = PyObject_IsTrue(value); |
292 |
|
|
if (logical_value == -1) return 0; |
293 |
|
|
if (DBFWriteLogicalAttribute(handle, record, field, logical_value ? 'T' : 'F')) return 1; |
294 |
|
|
break; |
295 |
jan |
1611 |
|
296 |
bramz |
2742 |
default: |
297 |
|
|
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
298 |
|
|
return 0; |
299 |
|
|
} |
300 |
|
|
} |
301 |
jan |
1611 |
|
302 |
bramz |
2745 |
PyErr_Format(PyExc_IOError, "can't write field %d of record %d", field, record); |
303 |
|
|
return 0; |
304 |
bramz |
2742 |
} |
305 |
jan |
1611 |
|
306 |
|
|
|
307 |
|
|
|
308 |
bramz |
2742 |
static PyObject* dbffile_write_field(DBFFileObject* self, PyObject* args) |
309 |
|
|
{ |
310 |
|
|
int record, field; |
311 |
|
|
PyObject* value; |
312 |
|
|
int type; |
313 |
|
|
|
314 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "iiO:write_field", &record, &field, &value)) return NULL; |
315 |
bramz |
2742 |
|
316 |
|
|
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
317 |
|
|
{ |
318 |
|
|
PyErr_Format(PyExc_ValueError, |
319 |
|
|
"field index %d out of bounds (field count: %d)", |
320 |
|
|
field, DBFGetFieldCount(self->handle)); |
321 |
|
|
return NULL; |
322 |
|
|
} |
323 |
|
|
|
324 |
|
|
type = DBFGetFieldInfo(self->handle, field, NULL, NULL, NULL); |
325 |
|
|
if (!do_write_field(self->handle, record, field, type, value)) return NULL; |
326 |
|
|
Py_RETURN_NONE; |
327 |
jan |
1611 |
} |
328 |
|
|
|
329 |
|
|
|
330 |
|
|
|
331 |
bramz |
2742 |
static PyObject* dbffile_write_record(DBFFileObject* self, PyObject* args) |
332 |
|
|
{ |
333 |
|
|
int record; |
334 |
|
|
PyObject* record_object; |
335 |
|
|
int i, num_fields; |
336 |
|
|
|
337 |
|
|
int type; |
338 |
|
|
char name[12]; |
339 |
|
|
PyObject* value = NULL; |
340 |
|
|
|
341 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "iO:write_record", &record, &record_object)) return NULL; |
342 |
bramz |
2742 |
|
343 |
|
|
num_fields = DBFGetFieldCount(self->handle); |
344 |
|
|
|
345 |
|
|
/* mimic ShapeFile functionality where id = -1 means appending */ |
346 |
|
|
if (record == -1) |
347 |
|
|
{ |
348 |
|
|
record = num_fields; |
349 |
|
|
} |
350 |
jan |
1611 |
|
351 |
bramz |
2742 |
if (PySequence_Check(record_object)) |
352 |
|
|
{ |
353 |
|
|
/* It's a sequence object. Iterate through all items in the |
354 |
|
|
* sequence and write them to the appropriate field. |
355 |
|
|
*/ |
356 |
|
|
if (PySequence_Length(record_object) != num_fields) |
357 |
|
|
{ |
358 |
|
|
PyErr_SetString(PyExc_TypeError, "record must have one item for each field"); |
359 |
|
|
return NULL; |
360 |
|
|
} |
361 |
|
|
for (i = 0; i < num_fields; ++i) |
362 |
|
|
{ |
363 |
|
|
type = DBFGetFieldInfo(self->handle, i, NULL, NULL, NULL); |
364 |
|
|
value = PySequence_GetItem(record_object, i); |
365 |
|
|
if (!value) return NULL; |
366 |
|
|
if (!do_write_field(self->handle, record, i, type, value)) |
367 |
|
|
{ |
368 |
|
|
Py_DECREF(value); |
369 |
|
|
return NULL; |
370 |
|
|
} |
371 |
|
|
Py_DECREF(value); |
372 |
|
|
} |
373 |
|
|
} |
374 |
|
|
else |
375 |
|
|
{ |
376 |
|
|
/* It's a dictionary-like object. Iterate over the names of the |
377 |
|
|
* known fields and write the corresponding item |
378 |
|
|
*/ |
379 |
|
|
for (i = 0; i < num_fields; ++i) |
380 |
|
|
{ |
381 |
|
|
name[0] = '\0'; |
382 |
|
|
type = DBFGetFieldInfo(self->handle, i, name, NULL, NULL); |
383 |
|
|
value = PyDict_GetItemString(record_object, name); |
384 |
|
|
if (value && !do_write_field(self->handle, record, i, type, value)) return NULL; |
385 |
|
|
} |
386 |
|
|
} |
387 |
|
|
|
388 |
|
|
return PyInt_FromLong((long)record); |
389 |
bh |
1761 |
} |
390 |
jan |
1611 |
|
391 |
|
|
|
392 |
|
|
|
393 |
bramz |
2742 |
static PyObject* dbffile_repr(DBFFileObject* self) |
394 |
|
|
{ |
395 |
|
|
/* TODO: it would be nice to do something like "dbflib.DBFFile(filename, mode)" instead */ |
396 |
|
|
return PyString_FromFormat("<dbflib.DBFFile object at %p>", self->handle); |
397 |
jan |
1611 |
} |
398 |
|
|
|
399 |
|
|
|
400 |
|
|
|
401 |
bramz |
2742 |
/* The commit method implementation |
402 |
|
|
* |
403 |
|
|
* The method relies on the DBFUpdateHeader method which is not |
404 |
|
|
* available in shapelib <= 1.2.10. setup.py defines |
405 |
|
|
* HAVE_UPDATE_HEADER's value depending on whether the function is |
406 |
|
|
* available in the shapelib version the code is compiled with. |
407 |
|
|
*/ |
408 |
|
|
#if HAVE_UPDATE_HEADER |
409 |
|
|
static PyObject* dbffile_commit(DBFFileObject* self) |
410 |
jan |
1611 |
{ |
411 |
bramz |
2742 |
DBFUpdateHeader(self->handle); |
412 |
|
|
Py_RETURN_NONE; |
413 |
|
|
} |
414 |
|
|
#endif |
415 |
jan |
1611 |
|
416 |
|
|
|
417 |
|
|
|
418 |
bramz |
2742 |
static struct PyMethodDef dbffile_methods[] = |
419 |
|
|
{ |
420 |
bramz |
2744 |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, |
421 |
bramz |
2745 |
"close() -> None\n\n" |
422 |
|
|
"closes DBFFile"}, |
423 |
bramz |
2744 |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, |
424 |
bramz |
2745 |
"field_count() -> integer\n\n" |
425 |
bramz |
2744 |
"returns number of fields currently defined"}, |
426 |
|
|
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, |
427 |
bramz |
2745 |
"record_count() -> integer\n\n" |
428 |
bramz |
2744 |
"returns number of records that currently exist"}, |
429 |
|
|
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
430 |
bramz |
2745 |
"field_info(field_index) -> (type, name, width, decimals)\n\n" |
431 |
|
|
"returns info of a field as a tuple with:\n" |
432 |
|
|
"- type: the type of the field corresponding to the integer value of one " |
433 |
|
|
" of the constants FTString, FTInteger, ...\n" |
434 |
|
|
"- name: the name of the field as a string\n" |
435 |
|
|
"- width: the width of the field as a number of characters\n" |
436 |
|
|
"- decimals: the number of decimal digits" }, |
437 |
bramz |
2742 |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
438 |
bramz |
2745 |
"add_field(type, name, width, decimals) -> field_index\n\n" |
439 |
bramz |
2742 |
"adds a new field and returns field index if successful\n" |
440 |
bramz |
2745 |
"- type: the type of the field corresponding to the integer value of one " |
441 |
|
|
" of the constants FTString, FTInteger, ...\n" |
442 |
|
|
"- name: the name of the field as a string\n" |
443 |
|
|
"- width: the width of the field as a number of characters\n" |
444 |
|
|
"- decimals: the number of decimal digits" }, |
445 |
bramz |
2744 |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, |
446 |
bramz |
2745 |
"read_attribute(record_index, field_index) -> value\n\n" |
447 |
|
|
"returns the value of one field of a record"}, |
448 |
bramz |
2744 |
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, |
449 |
bramz |
2745 |
"read_record(record_index) -> dict\n\n" |
450 |
|
|
"returns an entire record as a dictionary of field names and values"}, |
451 |
bramz |
2744 |
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, |
452 |
|
|
"write_field(record_index, field_index, new_value)\n" |
453 |
bramz |
2745 |
"writes a single field of a record"}, |
454 |
bramz |
2744 |
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, |
455 |
bramz |
2745 |
"write_record(record_index, record) -> record_index\n\n" |
456 |
|
|
"Writes an entire record as a dict or a sequence, and return index of record\n" |
457 |
|
|
"Record can either be a dictionary in which case the keys are used as field names, " |
458 |
bramz |
2744 |
"or a sequence that must have an item for every field (length = field_count())"}, |
459 |
bramz |
2742 |
#if HAVE_UPDATE_HEADER |
460 |
bramz |
2750 |
{"commit", (PyCFunction)dbffile_commit, METH_NOARGS, |
461 |
bramz |
2745 |
"commit() -> None"}, |
462 |
bramz |
2742 |
#endif |
463 |
|
|
{NULL} |
464 |
|
|
}; |
465 |
jan |
1611 |
|
466 |
|
|
|
467 |
bh |
1917 |
|
468 |
bramz |
2742 |
static struct PyGetSetDef dbffile_getsetters[] = |
469 |
|
|
{ |
470 |
|
|
{NULL} |
471 |
|
|
}; |
472 |
jan |
1611 |
|
473 |
|
|
|
474 |
|
|
|
475 |
bramz |
2742 |
static PyTypeObject DBFFileType = PYSHAPELIB_DEFINE_TYPE(DBFFileObject, dbffile, "shapelib.DBFFile", 0); |
476 |
bh |
2453 |
|
477 |
|
|
|
478 |
jan |
1611 |
|
479 |
bramz |
2742 |
/* --- dbflib -------------------------------------------------------------------------------------------------------- */ |
480 |
jan |
1611 |
|
481 |
bramz |
2742 |
static PyObject* dbflib_open(PyObject* module, PyObject* args) |
482 |
|
|
{ |
483 |
|
|
return PyObject_CallObject((PyObject*)&DBFFileType, args); |
484 |
|
|
} |
485 |
jan |
1611 |
|
486 |
|
|
|
487 |
|
|
|
488 |
bramz |
2742 |
static PyObject* dbflib_create(PyObject* module, PyObject* args) |
489 |
|
|
{ |
490 |
|
|
char* file; |
491 |
|
|
DBFFileObject* result; |
492 |
|
|
|
493 |
bramz |
2744 |
if (!PyArg_ParseTuple(args, "et:create", Py_FileSystemDefaultEncoding, &file)) return NULL; |
494 |
bramz |
2742 |
|
495 |
|
|
result = PyObject_New(DBFFileObject, &DBFFileType); |
496 |
|
|
if (!result) |
497 |
|
|
{ |
498 |
|
|
return PyErr_NoMemory(); |
499 |
|
|
} |
500 |
|
|
|
501 |
|
|
result->handle = DBFCreate(file); |
502 |
|
|
if (!result->handle) |
503 |
|
|
{ |
504 |
|
|
PyObject_Del((PyObject*)result); |
505 |
|
|
PyErr_SetString(PyExc_RuntimeError, "Failed to create DBFFile"); |
506 |
|
|
return NULL; |
507 |
|
|
} |
508 |
|
|
|
509 |
|
|
return (PyObject*) result; |
510 |
|
|
} |
511 |
jan |
1611 |
|
512 |
|
|
|
513 |
|
|
|
514 |
bramz |
2742 |
static struct PyMethodDef dbflib_methods[] = |
515 |
|
|
{ |
516 |
bramz |
2744 |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, |
517 |
bramz |
2745 |
"open(name [, mode]) -> DBFFile\n\n" |
518 |
|
|
"opens a DBFFile" }, |
519 |
bramz |
2744 |
{"create", (PyCFunction)dbflib_create, METH_VARARGS, |
520 |
bramz |
2745 |
"create(name) -> DBFFile\n\n" |
521 |
bramz |
2744 |
"create a DBFFile" }, |
522 |
bramz |
2742 |
{NULL} |
523 |
|
|
}; |
524 |
jan |
1611 |
|
525 |
bh |
2212 |
|
526 |
|
|
|
527 |
bramz |
2742 |
PyMODINIT_FUNC initdbflib(void) |
528 |
|
|
{ |
529 |
|
|
PyObject* module = Py_InitModule("dbflib", dbflib_methods); |
530 |
|
|
if (!module) return; |
531 |
|
|
|
532 |
|
|
PYSHAPELIB_ADD_TYPE(DBFFileType, "DBFFile"); |
533 |
|
|
|
534 |
|
|
PYSHAPELIB_ADD_CONSTANT(FTString); |
535 |
|
|
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
536 |
|
|
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
537 |
bramz |
2745 |
PYSHAPELIB_ADD_CONSTANT(FTLogical); |
538 |
bramz |
2742 |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
539 |
|
|
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
540 |
|
|
} |