38 |
{ |
{ |
39 |
char* file; |
char* file; |
40 |
char* mode = "rb"; |
char* mode = "rb"; |
41 |
if (kwds != NULL && PyDict_Size(kwds) > 0) |
static char *kwlist[] = {"name", "mode", NULL}; |
42 |
{ |
if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|s:__init__", kwlist, |
43 |
PyErr_Format(PyExc_TypeError, "dbflib.DBFFile.__init__ takes no keyword arguments"); |
Py_FileSystemDefaultEncoding, &file, &mode)) return -1; |
|
return -1; |
|
|
} |
|
|
if (!PyArg_ParseTuple(args, "s|s", &file, &mode)) return -1; |
|
44 |
|
|
45 |
self->handle = DBFOpen(file, mode); |
self->handle = DBFOpen(file, mode); |
46 |
|
PyMem_Free(file); |
47 |
|
|
48 |
return self->handle ? 0 : -1; |
return self->handle ? 0 : -1; |
49 |
} |
} |
50 |
|
|
78 |
char field_name[12]; |
char field_name[12]; |
79 |
int field, width = 0, decimals = 0, field_type; |
int field, width = 0, decimals = 0, field_type; |
80 |
|
|
81 |
if (!PyArg_ParseTuple(args, "i", &field)) return NULL; |
if (!PyArg_ParseTuple(args, "i:field_info", &field)) return NULL; |
82 |
|
|
83 |
field_name[0] = '\0'; |
field_name[0] = '\0'; |
84 |
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
field_type = DBFGetFieldInfo(self->handle, field, field_name, &width, &decimals); |
94 |
int type, width, decimals; |
int type, width, decimals; |
95 |
int field; |
int field; |
96 |
|
|
97 |
if (!PyArg_ParseTuple(args, "siii", &name, &type, &width, &decimals)) return NULL; |
if (!PyArg_ParseTuple(args, "siii:add_field", &name, &type, &width, &decimals)) return NULL; |
98 |
|
|
99 |
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
field = DBFAddField(self->handle, name, (DBFFieldType)type, width, decimals); |
100 |
|
|
138 |
{ |
{ |
139 |
case FTString: |
case FTString: |
140 |
temp = DBFReadStringAttribute(handle, record, field); |
temp = DBFReadStringAttribute(handle, record, field); |
141 |
if (!temp) |
if (temp) return PyString_FromString(temp); |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"Can't read value for row %d column %d", |
|
|
record, field); |
|
|
return NULL; |
|
|
} |
|
|
return PyString_FromString(temp); |
|
142 |
|
|
143 |
case FTInteger: |
case FTInteger: |
144 |
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
return PyInt_FromLong((long)DBFReadIntegerAttribute(handle, record, field)); |
145 |
|
|
146 |
case FTDouble: |
case FTDouble: |
147 |
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
return PyFloat_FromDouble(DBFReadDoubleAttribute(handle, record, field)); |
148 |
|
|
149 |
|
case FTLogical: |
150 |
|
temp = DBFReadLogicalAttribute(handle, record, field); |
151 |
|
if (temp) |
152 |
|
{ |
153 |
|
switch (temp[0]) |
154 |
|
{ |
155 |
|
case 'F': |
156 |
|
case 'N': |
157 |
|
Py_RETURN_FALSE; |
158 |
|
case 'T': |
159 |
|
case 'Y': |
160 |
|
Py_RETURN_TRUE; |
161 |
|
} |
162 |
|
} |
163 |
|
break; |
164 |
|
|
165 |
default: |
default: |
166 |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
PyErr_Format(PyExc_TypeError, "Invalid field data type %d", type); |
167 |
return NULL; |
return NULL; |
168 |
} |
} |
169 |
} |
} |
170 |
|
|
171 |
|
PyErr_Format(PyExc_IOError, "Can't read value for row %d column %d", record, field); |
172 |
|
return NULL; |
173 |
} |
} |
174 |
|
|
175 |
|
|
181 |
{ |
{ |
182 |
int record, field; |
int record, field; |
183 |
|
|
184 |
if (!PyArg_ParseTuple(args, "ii", &record, &field)) return NULL; |
if (!PyArg_ParseTuple(args, "ii:read_field", &record, &field)) return NULL; |
185 |
|
|
186 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
187 |
{ |
{ |
217 |
PyObject *dict; |
PyObject *dict; |
218 |
PyObject *value = NULL; |
PyObject *value = NULL; |
219 |
|
|
220 |
if (!PyArg_ParseTuple(args, "i", &record)) return NULL; |
if (!PyArg_ParseTuple(args, "i:read_record", &record)) return NULL; |
221 |
|
|
222 |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
if (record < 0 || record >= DBFGetRecordCount(self->handle)) |
223 |
{ |
{ |
255 |
char * string_value; |
char * string_value; |
256 |
int int_value; |
int int_value; |
257 |
double double_value; |
double double_value; |
258 |
|
int logical_value; |
259 |
|
|
260 |
if (value == Py_None) |
if (value == Py_None) |
261 |
{ |
{ |
262 |
if (!DBFWriteNULLAttribute(handle, record, field)) |
if (DBFWriteNULLAttribute(handle, record, field)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write NULL field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
263 |
} |
} |
264 |
else |
else |
265 |
{ |
{ |
268 |
case FTString: |
case FTString: |
269 |
string_value = PyString_AsString(value); |
string_value = PyString_AsString(value); |
270 |
if (!string_value) return 0; |
if (!string_value) return 0; |
271 |
if (!DBFWriteStringAttribute(handle, record, field, string_value)) |
if (DBFWriteStringAttribute(handle, record, field, string_value)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
272 |
break; |
break; |
273 |
|
|
274 |
case FTInteger: |
case FTInteger: |
275 |
int_value = PyInt_AsLong(value); |
int_value = PyInt_AsLong(value); |
276 |
if (int_value == -1 && PyErr_Occurred()) return 0; |
if (int_value == -1 && PyErr_Occurred()) return 0; |
277 |
if (!DBFWriteIntegerAttribute(handle, record, field, int_value)) |
if (DBFWriteIntegerAttribute(handle, record, field, int_value)) return 1; |
|
{ |
|
|
PyErr_Format(PyExc_IOError, |
|
|
"can't write field %d of record %d", |
|
|
field, record); |
|
|
return 0; |
|
|
} |
|
278 |
break; |
break; |
279 |
|
|
280 |
case FTDouble: |
case FTDouble: |
281 |
double_value = PyFloat_AsDouble(value); |
double_value = PyFloat_AsDouble(value); |
282 |
if (double_value == -1 && PyErr_Occurred()) return 0; |
if (double_value == -1 && PyErr_Occurred()) return 0; |
283 |
if (!DBFWriteDoubleAttribute(handle, record, field, double_value)) |
if (DBFWriteDoubleAttribute(handle, record, field, double_value)) return 1; |
284 |
{ |
break; |
285 |
PyErr_Format(PyExc_IOError, |
|
286 |
"can't write field %d of record %d", |
case FTLogical: |
287 |
field, record); |
logical_value = PyObject_IsTrue(value); |
288 |
return 0; |
if (logical_value == -1) return 0; |
289 |
} |
if (DBFWriteLogicalAttribute(handle, record, field, logical_value ? 'T' : 'F')) return 1; |
290 |
break; |
break; |
291 |
|
|
292 |
default: |
default: |
295 |
} |
} |
296 |
} |
} |
297 |
|
|
298 |
return 1; |
PyErr_Format(PyExc_IOError, "can't write field %d of record %d", field, record); |
299 |
|
return 0; |
300 |
} |
} |
301 |
|
|
302 |
|
|
307 |
PyObject* value; |
PyObject* value; |
308 |
int type; |
int type; |
309 |
|
|
310 |
if (!PyArg_ParseTuple(args, "iiO", &record, &field, &value)) return NULL; |
if (!PyArg_ParseTuple(args, "iiO:write_field", &record, &field, &value)) return NULL; |
311 |
|
|
312 |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
if (field < 0 || field >= DBFGetFieldCount(self->handle)) |
313 |
{ |
{ |
334 |
char name[12]; |
char name[12]; |
335 |
PyObject* value = NULL; |
PyObject* value = NULL; |
336 |
|
|
337 |
if (!PyArg_ParseTuple(args, "iO", &record, &record_object)) return NULL; |
if (!PyArg_ParseTuple(args, "iO:write_record", &record, &record_object)) return NULL; |
338 |
|
|
339 |
num_fields = DBFGetFieldCount(self->handle); |
num_fields = DBFGetFieldCount(self->handle); |
340 |
|
|
413 |
|
|
414 |
static struct PyMethodDef dbffile_methods[] = |
static struct PyMethodDef dbffile_methods[] = |
415 |
{ |
{ |
416 |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, "close DBFFile"}, |
{"close", (PyCFunction)dbffile_close, METH_NOARGS, |
417 |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, "return number of fields currently defined"}, |
"close() -> None\n\n" |
418 |
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, "return number of records that currently exist"}, |
"closes DBFFile"}, |
419 |
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
{"field_count", (PyCFunction)dbffile_field_count, METH_NOARGS, |
420 |
"returns info of a field as a tuple (type, name, width, decimals) with:\n" |
"field_count() -> integer\n\n" |
421 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
"returns number of fields currently defined"}, |
422 |
"-name: the name of the field as a string\n" |
{"record_count", (PyCFunction)dbffile_record_count, METH_NOARGS, |
423 |
"-width: the width of the field as a number of characters\n" |
"record_count() -> integer\n\n" |
424 |
"-decimals: the number of decimal digits" }, |
"returns number of records that currently exist"}, |
425 |
|
{"field_info", (PyCFunction)dbffile_field_info, METH_VARARGS, |
426 |
|
"field_info(field_index) -> (type, name, width, decimals)\n\n" |
427 |
|
"returns info of a field as a tuple with:\n" |
428 |
|
"- type: the type of the field corresponding to the integer value of one " |
429 |
|
" of the constants FTString, FTInteger, ...\n" |
430 |
|
"- name: the name of the field as a string\n" |
431 |
|
"- width: the width of the field as a number of characters\n" |
432 |
|
"- decimals: the number of decimal digits" }, |
433 |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
{"add_field", (PyCFunction)dbffile_add_field, METH_VARARGS, |
434 |
|
"add_field(type, name, width, decimals) -> field_index\n\n" |
435 |
"adds a new field and returns field index if successful\n" |
"adds a new field and returns field index if successful\n" |
436 |
"-type: the type of the field corresponding to the integer value of one of the constants FTString, FTInteger, ...\n" |
"- type: the type of the field corresponding to the integer value of one " |
437 |
"-name: the name of the field as a string\n" |
" of the constants FTString, FTInteger, ...\n" |
438 |
"-width: the width of the field as a number of characters\n" |
"- name: the name of the field as a string\n" |
439 |
"-decimals: the number of decimal digits" }, |
"- width: the width of the field as a number of characters\n" |
440 |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, "return the value of one field of a record"}, |
"- decimals: the number of decimal digits" }, |
441 |
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, "return an entire record as a dict of field names and values"}, |
{"read_attribute", (PyCFunction)dbffile_read_attribute, METH_VARARGS, |
442 |
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, "write a single field of a record"}, |
"read_attribute(record_index, field_index) -> value\n\n" |
443 |
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, "write an entire record as a dict or a sequence"}, |
"returns the value of one field of a record"}, |
444 |
|
{"read_record", (PyCFunction)dbffile_read_record, METH_VARARGS, |
445 |
|
"read_record(record_index) -> dict\n\n" |
446 |
|
"returns an entire record as a dictionary of field names and values"}, |
447 |
|
{"write_field", (PyCFunction)dbffile_write_field, METH_VARARGS, |
448 |
|
"write_field(record_index, field_index, new_value)\n" |
449 |
|
"writes a single field of a record"}, |
450 |
|
{"write_record", (PyCFunction)dbffile_write_record, METH_VARARGS, |
451 |
|
"write_record(record_index, record) -> record_index\n\n" |
452 |
|
"Writes an entire record as a dict or a sequence, and return index of record\n" |
453 |
|
"Record can either be a dictionary in which case the keys are used as field names, " |
454 |
|
"or a sequence that must have an item for every field (length = field_count())"}, |
455 |
#if HAVE_UPDATE_HEADER |
#if HAVE_UPDATE_HEADER |
456 |
{"commit", (PyCFunction)dbffile_read_record, METH_NOARGS, NULL}, |
{"commit", (PyCFunction)dbffile_read_record, METH_NOARGS, |
457 |
|
"commit() -> None"}, |
458 |
#endif |
#endif |
459 |
{NULL} |
{NULL} |
460 |
}; |
}; |
486 |
char* file; |
char* file; |
487 |
DBFFileObject* result; |
DBFFileObject* result; |
488 |
|
|
489 |
if (!PyArg_ParseTuple(args, "s", &file)) return NULL; |
if (!PyArg_ParseTuple(args, "et:create", Py_FileSystemDefaultEncoding, &file)) return NULL; |
490 |
|
|
491 |
result = PyObject_New(DBFFileObject, &DBFFileType); |
result = PyObject_New(DBFFileObject, &DBFFileType); |
492 |
if (!result) |
if (!result) |
509 |
|
|
510 |
static struct PyMethodDef dbflib_methods[] = |
static struct PyMethodDef dbflib_methods[] = |
511 |
{ |
{ |
512 |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, "open a DBFFile" }, |
{"open", (PyCFunction)dbflib_open, METH_VARARGS, |
513 |
{"create", (PyCFunction)dbflib_create, METH_VARARGS, "create a DBFFile" }, |
"open(name [, mode]) -> DBFFile\n\n" |
514 |
|
"opens a DBFFile" }, |
515 |
|
{"create", (PyCFunction)dbflib_create, METH_VARARGS, |
516 |
|
"create(name) -> DBFFile\n\n" |
517 |
|
"create a DBFFile" }, |
518 |
{NULL} |
{NULL} |
519 |
}; |
}; |
520 |
|
|
530 |
PYSHAPELIB_ADD_CONSTANT(FTString); |
PYSHAPELIB_ADD_CONSTANT(FTString); |
531 |
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
PYSHAPELIB_ADD_CONSTANT(FTInteger); |
532 |
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
PYSHAPELIB_ADD_CONSTANT(FTDouble); |
533 |
|
PYSHAPELIB_ADD_CONSTANT(FTLogical); |
534 |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
PYSHAPELIB_ADD_CONSTANT(FTInvalid); |
535 |
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
PyModule_AddIntConstant(module, "_have_commit", HAVE_UPDATE_HEADER); |
536 |
} |
} |