36 |
*/ |
*/ |
37 |
static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds) |
static int dbffile_init(DBFFileObject* self, PyObject* args, PyObject* kwds) |
38 |
{ |
{ |
39 |
char* file; |
char* file = NULL; |
40 |
char* mode = "rb"; |
char* mode = "rb"; |
41 |
if (kwds != NULL && PyDict_Size(kwds) > 0) |
static char *kwlist[] = {"name", "mode", NULL}; |
42 |
|
|
43 |
|
DBFClose(self->handle); |
44 |
|
self->handle = NULL; |
45 |
|
|
46 |
|
#if defined(SHPAPI_HAS_WIDE) && defined(Py_WIN_WIDE_FILENAMES) |
47 |
|
if (GetVersion() < 0x80000000) { /* On NT, so wide API available */ |
48 |
|
PyObject *wfile; |
49 |
|
if (PyArg_ParseTupleAndKeywords(args, kwds, "U|s:DBFFile", kwlist, &wfile, &mode)) |
50 |
|
{ |
51 |
|
PyObject *wmode = PyUnicode_DecodeASCII(mode, strlen(mode), NULL); |
52 |
|
if (!wmode) return -1; |
53 |
|
self->handle = DBFOpenW(PyUnicode_AS_UNICODE(wfile), PyUnicode_AS_UNICODE(wmode)); |
54 |
|
Py_DECREF(wmode); |
55 |
|
if (!self->handle) |
56 |
|
{ |
57 |
|
PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, wfile); |
58 |
|
return -1; |
59 |
|
} |
60 |
|
} |
61 |
|
else |
62 |
|
{ |
63 |
|
/* Drop the argument parsing error as narrow |
64 |
|
strings are also valid. */ |
65 |
|
PyErr_Clear(); |
66 |
|
} |
67 |
|
} |
68 |
|
#endif |
69 |
|
|
70 |
|
if (!self->handle) |
71 |
{ |
{ |
72 |
PyErr_Format(PyExc_TypeError, "dbflib.DBFFile.__init__ takes no keyword arguments"); |
if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|s:DBFFile", kwlist, |
73 |
return -1; |
Py_FileSystemDefaultEncoding, &file, &mode)) return -1; |
74 |
|
self->handle = DBFOpen(file, mode); |
75 |
|
|
76 |
|
if (!self->handle) |
77 |
|
{ |
78 |
|
PyErr_SetFromErrnoWithFilename(PyExc_IOError, file); |
79 |
|
PyMem_Free(file); |
80 |
|
return -1; |
81 |
|
} |
82 |
|
|
83 |
|
PyMem_Free(file); |
84 |
} |
} |
85 |
if (!PyArg_ParseTuple(args, "s|s", &file, &mode)) return -1; |
|
86 |
|
return 0; |
|
self->handle = DBFOpen(file, mode); |
|
|
return self->handle ? 0 : -1; |
|
87 |
} |
} |
88 |
|
|
89 |
|
|
116 |
char field_name[12]; |
char field_name[12]; |
117 |
int field, width = 0, decimals = 0, field_type; |
int field, width = 0, decimals = 0, field_type; |
118 |
|
|
119 |
if (!PyArg_ParseTuple(args, "i", &field)) return NULL; |
if (!PyArg_ParseTuple(args, "i:field_info", &field)) return NULL; |
120 |
|
|
121 |
field_name[0] = '\0'; |
field_name[0] = '\0'; |
122 |
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
132 |
int type, width, decimals; |
int type, width, decimals; |
133 |
int field; |
int field; |
134 |
|
|
135 |
if (!PyArg_ParseTuple(args, "siii", &name, &type, &width, &decimals)) return NULL; |
if (!PyArg_ParseTuple(args, "siii:add_field", &name, &type, &width, &decimals)) return NULL; |
136 |
|
|
137 |
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
138 |
|
|
176 |
{ |
{ |
177 |
case FTString: |
case FTString: |
178 |
temp = DBFReadStringAttribute(handle, record, field); |
temp = DBFReadStringAttribute(handle, record, field); |
179 |
if (!temp) |
if (temp) return PyString_FromString(temp); |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"Can't read value for row %d column %d", |
|
|
record, field); |
|
|
return NULL; |
|
|
} |
|
|
return PyString_FromString(temp); |
|
180 |
|
|
181 |
case FTInteger: |
case FTInteger: |
182 |
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
183 |
|
|
184 |
case FTDouble: |
case FTDouble: |
185 |
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
186 |
|
|
187 |
|
case FTLogical: |
188 |
|
temp = DBFReadLogicalAttribute(handle, record, field); |
189 |
|
if (temp) |
190 |
|
{ |
191 |
|
switch (temp[0]) |
192 |
|
{ |
193 |
|
case 'F': |
194 |
|
case 'N': |
195 |
|
Py_RETURN_FALSE; |
196 |
|
case 'T': |
197 |
|
case 'Y': |
198 |
|
Py_RETURN_TRUE; |
199 |
|
} |
200 |
|
} |
201 |
|
break; |
202 |
|
|
203 |
default: |
default: |
204 |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
205 |
return NULL; |
return NULL; |
206 |
} |
} |
207 |
} |
} |
208 |
|
|
209 |
|
PyErr_Format(PyExc_IOError, "Can't read value for row %d column %d", record, field); |
210 |
|
return NULL; |
211 |
} |
} |
212 |
|
|
213 |
|
|
219 |
{ |
{ |
220 |
int record, field; |
int record, field; |
221 |
|
|
222 |
if (!PyArg_ParseTuple(args, "ii", &record, &field)) return NULL; |
if (!PyArg_ParseTuple(args, "ii:read_field", &record, &field)) return NULL; |
223 |
|
|
224 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
225 |
{ |
{ |
255 |
PyObject *dict; |
PyObject *dict; |
256 |
PyObject *value = NULL; |
PyObject *value = NULL; |
257 |
|
|
258 |
if (!PyArg_ParseTuple(args, "i", &record)) return NULL; |
if (!PyArg_ParseTuple(args, "i:read_record", &record)) return NULL; |
259 |
|
|
260 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
261 |
{ |
{ |
293 |
char * string_value; |
char * string_value; |
294 |
int int_value; |
int int_value; |
295 |
double double_value; |
double double_value; |
296 |
|
int logical_value; |
297 |
|
|
298 |
if (value == Py_None) |
if (value == Py_None) |
299 |
{ |
{ |
300 |
if (!DBFWriteNULLAttribute(handle, record, field)) |
if (DBFWriteNULLAttribute(handle, record, field)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write NULL field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
301 |
} |
} |
302 |
else |
else |
303 |
{ |
{ |
306 |
case FTString: |
case FTString: |
307 |
string_value = PyString_AsString(value); |
string_value = PyString_AsString(value); |
308 |
if (!string_value) return 0; |
if (!string_value) return 0; |
309 |
if (!DBFWriteStringAttribute(handle, record, field, string_value)) |
if (DBFWriteStringAttribute(handle, record, field, string_value)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
310 |
break; |
break; |
311 |
|
|
312 |
case FTInteger: |
case FTInteger: |
313 |
int_value = PyInt_AsLong(value); |
int_value = PyInt_AsLong(value); |
314 |
if (int_value == -1 && PyErr_Occurred()) return 0; |
if (int_value == -1 && PyErr_Occurred()) return 0; |
315 |
if (!DBFWriteIntegerAttribute(handle, record, field, int_value)) |
if (DBFWriteIntegerAttribute(handle, record, field, int_value)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
316 |
break; |
break; |
317 |
|
|
318 |
case FTDouble: |
case FTDouble: |
319 |
double_value = PyFloat_AsDouble(value); |
double_value = PyFloat_AsDouble(value); |
320 |
if (double_value == -1 && PyErr_Occurred()) return 0; |
if (double_value == -1 && PyErr_Occurred()) return 0; |
321 |
if (!DBFWriteDoubleAttribute(handle, record, field, double_value)) |
if (DBFWriteDoubleAttribute(handle, record, field, double_value)) return 1; |
322 |
{ |
break; |
323 |
PyErr_Format(PyExc_IOError, |
|
324 |
"can't write field %d of record %d", |
case FTLogical: |
325 |
field, record); |
logical_value = PyObject_IsTrue(value); |
326 |
return 0; |
if (logical_value == -1) return 0; |
327 |
} |
if (DBFWriteLogicalAttribute(handle, record, field, logical_value ? 'T' : 'F')) return 1; |
328 |
break; |
break; |
329 |
|
|
330 |
default: |
default: |
333 |
} |
} |
334 |
} |
} |
335 |
|
|
336 |
return 1; |
PyErr_Format(PyExc_IOError, "can't write field %d of record %d", field, record); |
337 |
|
return 0; |
338 |
} |
} |
339 |
|
|
340 |
|
|
345 |
PyObject* value; |
PyObject* value; |
346 |
int type; |
int type; |
347 |
|
|
348 |
if (!PyArg_ParseTuple(args, "iiO", &record, &field, &value)) return NULL; |
if (!PyArg_ParseTuple(args, "iiO:write_field", &record, &field, &value)) return NULL; |
349 |
|
|
350 |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
351 |
{ |
{ |
372 |
char name[12]; |
char name[12]; |
373 |
PyObject* value = NULL; |
PyObject* value = NULL; |
374 |
|
|
375 |
if (!PyArg_ParseTuple(args, "iO", &record, &record_object)) return NULL; |
if (!PyArg_ParseTuple(args, "iO:write_record", &record, &record_object)) return NULL; |
376 |
|
|
377 |
num_fields = DBFGetFieldCount(self->handle); |
num_fields = DBFGetFieldCount(self->handle); |
378 |
|
|
451 |
|
|
452 |
static struct PyMethodDef dbffile_methods[] = |
static struct PyMethodDef dbffile_methods[] = |
453 |
{ |
{ |
454 |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, "close DBFFile"}, |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, |
455 |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, "return number of fields currently defined"}, |
"close() -> None\n\n" |
456 |
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, "return number of records that currently exist"}, |
"closes DBFFile"}, |
457 |
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, |
458 |
"returns info of a field as a tuple (type, name, width, decimals) with:\n" |
"field_count() -> integer\n\n" |
459 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
"returns number of fields currently defined"}, |
460 |
"-name: the name of the field as a string\n" |
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, |
461 |
"-width: the width of the field as a number of characters\n" |
"record_count() -> integer\n\n" |
462 |
"-decimals: the number of decimal digits" }, |
"returns number of records that currently exist"}, |
463 |
|
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
464 |
|
"field_info(field_index) -> (type, name, width, decimals)\n\n" |
465 |
|
"returns info of a field as a tuple with:\n" |
466 |
|
"- type: the type of the field corresponding to the integer value of one " |
467 |
|
" of the constants FTString, FTInteger, ...\n" |
468 |
|
"- name: the name of the field as a string\n" |
469 |
|
"- width: the width of the field as a number of characters\n" |
470 |
|
"- decimals: the number of decimal digits" }, |
471 |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
472 |
|
"add_field(type, name, width, decimals) -> field_index\n\n" |
473 |
"adds a new field and returns field index if successful\n" |
"adds a new field and returns field index if successful\n" |
474 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
"- type: the type of the field corresponding to the integer value of one " |
475 |
"-name: the name of the field as a string\n" |
" of the constants FTString, FTInteger, ...\n" |
476 |
"-width: the width of the field as a number of characters\n" |
"- name: the name of the field as a string\n" |
477 |
"-decimals: the number of decimal digits" }, |
"- width: the width of the field as a number of characters\n" |
478 |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, "return the value of one field of a record"}, |
"- decimals: the number of decimal digits" }, |
479 |
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, "return an entire record as a dict of field names and values"}, |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, |
480 |
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, "write a single field of a record"}, |
"read_attribute(record_index, field_index) -> value\n\n" |
481 |
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, "write an entire record as a dict or a sequence"}, |
"returns the value of one field of a record"}, |
482 |
|
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, |
483 |
|
"read_record(record_index) -> dict\n\n" |
484 |
|
"returns an entire record as a dictionary of field names and values"}, |
485 |
|
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, |
486 |
|
"write_field(record_index, field_index, new_value)\n" |
487 |
|
"writes a single field of a record"}, |
488 |
|
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, |
489 |
|
"write_record(record_index, record) -> record_index\n\n" |
490 |
|
"Writes an entire record as a dict or a sequence, and return index of record\n" |
491 |
|
"Record can either be a dictionary in which case the keys are used as field names, " |
492 |
|
"or a sequence that must have an item for every field (length = field_count())"}, |
493 |
#if HAVE_UPDATE_HEADER |
#if HAVE_UPDATE_HEADER |
494 |
{"commit", (PyCFunction)dbffile_read_record, METH_NOARGS, NULL}, |
{"commit", (PyCFunction)dbffile_commit, METH_NOARGS, |
495 |
|
"commit() -> None"}, |
496 |
#endif |
#endif |
497 |
{NULL} |
{NULL} |
498 |
}; |
}; |
523 |
{ |
{ |
524 |
char* file; |
char* file; |
525 |
DBFFileObject* result; |
DBFFileObject* result; |
526 |
|
DBFHandle handle = NULL; |
527 |
|
int wideargument = 0; |
528 |
|
|
529 |
|
#if defined(SHPAPI_HAS_WIDE) && defined(Py_WIN_WIDE_FILENAMES) |
530 |
|
if (GetVersion() < 0x80000000) { /* On NT, so wide API available */ |
531 |
|
PyObject *wfile; |
532 |
|
if (PyArg_ParseTuple(args, "U:create", &wfile)) |
533 |
|
{ |
534 |
|
wideargument = 1; |
535 |
|
handle = DBFCreateW(PyUnicode_AS_UNICODE(wfile)); |
536 |
|
if (!handle) |
537 |
|
{ |
538 |
|
PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, wfile); |
539 |
|
return NULL; |
540 |
|
} |
541 |
|
} |
542 |
|
else |
543 |
|
{ |
544 |
|
/* Drop the argument parsing error as narrow |
545 |
|
strings are also valid. */ |
546 |
|
PyErr_Clear(); |
547 |
|
} |
548 |
|
} |
549 |
|
#endif |
550 |
|
|
551 |
if (!PyArg_ParseTuple(args, "s", &file)) return NULL; |
if (!handle) |
552 |
|
{ |
553 |
|
if (!PyArg_ParseTuple(args, "et:create", Py_FileSystemDefaultEncoding, &file)) return NULL; |
554 |
|
handle = DBFCreate(file); |
555 |
|
if (!handle) |
556 |
|
{ |
557 |
|
PyErr_SetFromErrnoWithFilename(PyExc_IOError, file); |
558 |
|
PyMem_Free(file); |
559 |
|
return NULL; |
560 |
|
} |
561 |
|
PyMem_Free(file); |
562 |
|
} |
563 |
|
|
564 |
result = PyObject_New(DBFFileObject, &DBFFileType); |
result = PyObject_New(DBFFileObject, &DBFFileType); |
565 |
if (!result) |
if (!result) |
566 |
{ |
{ |
567 |
|
DBFClose(handle); |
568 |
return PyErr_NoMemory(); |
return PyErr_NoMemory(); |
569 |
} |
} |
570 |
|
|
571 |
result->handle = DBFCreate(file); |
result->handle = handle; |
|
if (!result->handle) |
|
|
{ |
|
|
PyObject_Del((PyObject*)result); |
|
|
PyErr_SetString(PyExc_RuntimeError, "Failed to create DBFFile"); |
|
|
return NULL; |
|
|
} |
|
|
|
|
572 |
return (PyObject*) result; |
return (PyObject*) result; |
573 |
} |
} |
574 |
|
|
576 |
|
|
577 |
static struct PyMethodDef dbflib_methods[] = |
static struct PyMethodDef dbflib_methods[] = |
578 |
{ |
{ |
579 |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, "open a DBFFile" }, |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, |
580 |
{"create", (PyCFunction)dbflib_create, METH_VARARGS, "create a DBFFile" }, |
"open(name [, mode]) -> DBFFile\n\n" |
581 |
|
"opens a DBFFile" }, |
582 |
|
{"create", (PyCFunction)dbflib_create, METH_VARARGS, |
583 |
|
"create(name) -> DBFFile\n\n" |
584 |
|
"create a DBFFile" }, |
585 |
{NULL} |
{NULL} |
586 |
}; |
}; |
587 |
|
|
597 |
PYSHAPELIB_ADD_CONSTANT(FTString); |
PYSHAPELIB_ADD_CONSTANT(FTString); |
598 |
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
599 |
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
600 |
|
PYSHAPELIB_ADD_CONSTANT(FTLogical); |
601 |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
602 |
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
603 |
} |
} |