1 |
# Copyright (C) 2001, 2002 by Intevation GmbH |
# Copyright (C) 2001, 2002, 2003, 2004, 2005 by Intevation GmbH |
2 |
# Authors: |
# Authors: |
3 |
# Jan-Oliver Wagner <[email protected]> |
# Jan-Oliver Wagner <[email protected]> |
4 |
# Bernhard Herzog <[email protected]> |
# Bernhard Herzog <[email protected]> |
5 |
# Jonathan Coles <[email protected]> |
# Jonathan Coles <[email protected]> |
6 |
|
# Frank Koormann <[email protected]> |
7 |
# |
# |
8 |
# This program is free software under the GPL (>=v2) |
# This program is free software under the GPL (>=v2) |
9 |
# Read the file COPYING coming with GRASS for details. |
# Read the file COPYING coming with GRASS for details. |
14 |
|
|
15 |
__version__ = "$Revision$" |
__version__ = "$Revision$" |
16 |
|
|
17 |
import sys, string, os |
import string, os |
18 |
|
|
19 |
import xml.sax |
import xml.sax |
20 |
import xml.sax.handler |
import xml.sax.handler |
21 |
from xml.sax import make_parser, ErrorHandler |
from xml.sax import make_parser, ErrorHandler, SAXNotRecognizedException |
22 |
|
|
23 |
from Thuban import _ |
from Thuban import _ |
24 |
from Thuban.common import * |
|
25 |
|
from Thuban.Model.table import FIELDTYPE_INT, FIELDTYPE_DOUBLE, \ |
26 |
|
FIELDTYPE_STRING |
27 |
|
|
28 |
|
from Thuban.Model.color import Color, Transparent |
29 |
|
|
30 |
from Thuban.Model.session import Session |
from Thuban.Model.session import Session |
31 |
from Thuban.Model.map import Map |
from Thuban.Model.map import Map |
32 |
from Thuban.Model.layer import Layer |
from Thuban.Model.layer import Layer, RasterLayer |
|
from Thuban.Model.color import Color |
|
33 |
from Thuban.Model.proj import Projection |
from Thuban.Model.proj import Projection |
34 |
|
from Thuban.Model.range import Range |
35 |
from Thuban.Model.classification import Classification, \ |
from Thuban.Model.classification import Classification, \ |
36 |
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, ClassGroupMap, \ |
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, \ |
37 |
|
ClassGroupPattern, ClassGroupMap, \ |
38 |
ClassGroupProperties |
ClassGroupProperties |
39 |
|
from Thuban.Model.data import DerivedShapeStore, ShapefileStore |
40 |
|
from Thuban.Model.table import DBFTable |
41 |
|
from Thuban.Model.transientdb import TransientJoinedTable |
42 |
|
|
43 |
|
from Thuban.Model.xmlreader import XMLReader |
44 |
|
import resource |
45 |
|
|
46 |
|
import postgisdb |
47 |
|
|
48 |
|
class LoadError(Exception): |
49 |
|
|
50 |
|
"""Exception raised when the thuban file is corrupted |
51 |
|
|
52 |
|
Not all cases of corrupted thuban files will lead to this exception |
53 |
|
but those that are found by checks in the loading code itself are. |
54 |
|
""" |
55 |
|
|
56 |
|
|
57 |
|
class LoadCancelled(Exception): |
58 |
|
|
59 |
|
"""Exception raised to indicate that loading was interrupted by the user""" |
60 |
|
|
61 |
|
|
62 |
def parse_color(color): |
def parse_color(color): |
67 |
""" |
""" |
68 |
color = string.strip(color) |
color = string.strip(color) |
69 |
if color == "None": |
if color == "None": |
70 |
result = Color.None |
result = Transparent |
71 |
elif color[0] == '#': |
elif color[0] == '#': |
72 |
if len(color) == 7: |
if len(color) == 7: |
73 |
r = string.atoi(color[1:3], 16) / 255.0 |
r = string.atoi(color[1:3], 16) / 255.0 |
81 |
raise ValueError(_("Invalid color specification %s") % color) |
raise ValueError(_("Invalid color specification %s") % color) |
82 |
return result |
return result |
83 |
|
|
84 |
|
class AttrDesc: |
85 |
|
|
86 |
class ProcessSession(xml.sax.handler.ContentHandler): |
def __init__(self, name, required = False, default = "", |
87 |
|
conversion = None): |
88 |
|
if not isinstance(name, tuple): |
89 |
|
fullname = (None, name) |
90 |
|
else: |
91 |
|
fullname = name |
92 |
|
name = name[1] |
93 |
|
self.name = name |
94 |
|
self.fullname = fullname |
95 |
|
self.required = required |
96 |
|
self.default = default |
97 |
|
self.conversion = conversion |
98 |
|
|
99 |
|
# set by the SessionLoader's check_attrs method |
100 |
|
self.value = None |
101 |
|
|
102 |
|
|
103 |
|
class SessionLoader(XMLReader): |
104 |
|
|
105 |
|
def __init__(self, db_connection_callback = None, |
106 |
|
shapefile_callback = None): |
107 |
|
"""Inititialize the Sax handler.""" |
108 |
|
XMLReader.__init__(self) |
109 |
|
|
110 |
# Dictionary mapping element names (or (URI, element name) pairs for |
self.db_connection_callback = db_connection_callback |
111 |
# documents using namespaces) to method names. The methods should |
self.shapefile_callback = shapefile_callback |
|
# accept the same parameters as the startElement (or startElementNS) |
|
|
# methods. The start_dispatcher is used by the default startElement |
|
|
# and startElementNS methods to call a method for the open tag of an |
|
|
# element. |
|
|
start_dispatcher = {} |
|
|
|
|
|
# end_dispatcher works just like start_dispatcher but it's used by |
|
|
# endElement and endElementNS. The method whose names it maps to |
|
|
# should accept the same parameters as endElement and endElementNS. |
|
|
end_dispatcher = {} |
|
|
|
|
|
|
|
|
def __init__(self, directory): |
|
|
"""Inititialize the Sax handler. |
|
|
|
|
|
The directory parameter should be the directory containing the |
|
|
session file. It's needed to interpret embedded relative |
|
|
filenames. |
|
|
""" |
|
|
self.directory = directory |
|
|
self.chars = '' |
|
112 |
self.theSession = None |
self.theSession = None |
113 |
self.aMap = None |
self.aMap = None |
114 |
self.aLayer = None |
self.aLayer = None |
115 |
|
|
116 |
def startElementNS(self, name, qname, attrs): |
# Map ids used in the thuban file to the corresponding objects |
117 |
"""Call the method given for name in self.start_dispatcher |
# in the session |
118 |
|
self.idmap = {} |
119 |
|
|
120 |
|
dispatchers = { |
121 |
|
'session' : ("start_session", "end_session"), |
122 |
|
|
123 |
|
'dbconnection': ("start_dbconnection", None), |
124 |
|
|
125 |
|
'dbshapesource': ("start_dbshapesource", None), |
126 |
|
'fileshapesource': ("start_fileshapesource", None), |
127 |
|
'derivedshapesource': ("start_derivedshapesource", None), |
128 |
|
'filetable': ("start_filetable", None), |
129 |
|
'jointable': ("start_jointable", None), |
130 |
|
|
131 |
|
'map' : ("start_map", "end_map"), |
132 |
|
'projection' : ("start_projection", "end_projection"), |
133 |
|
'parameter' : ("start_parameter", None), |
134 |
|
'layer' : ("start_layer", "end_layer"), |
135 |
|
'rasterlayer' : ("start_rasterlayer", "end_rasterlayer"), |
136 |
|
'classification': ("start_classification", "end_classification"), |
137 |
|
'clnull' : ("start_clnull", "end_clnull"), |
138 |
|
'clpoint' : ("start_clpoint", "end_clpoint"), |
139 |
|
'clrange' : ("start_clrange", "end_clrange"), |
140 |
|
'clpattern' : ("start_clpattern", "end_clpattern"), |
141 |
|
'cldata' : ("start_cldata", "end_cldata"), |
142 |
|
'table' : ("start_table", "end_table"), |
143 |
|
'labellayer' : ("start_labellayer", None), |
144 |
|
'label' : ("start_label", None)} |
145 |
|
|
146 |
|
# all dispatchers should be used for the 0.8 and 0.9 namespaces too |
147 |
|
for xmlns in ("http://thuban.intevation.org/dtds/thuban-0.8.dtd", |
148 |
|
"http://thuban.intevation.org/dtds/thuban-0.9-dev.dtd", |
149 |
|
"http://thuban.intevation.org/dtds/thuban-0.9.dtd", |
150 |
|
"http://thuban.intevation.org/dtds/thuban-1.0-dev.dtd", |
151 |
|
"http://thuban.intevation.org/dtds/thuban-1.0rc1.dtd", |
152 |
|
"http://thuban.intevation.org/dtds/thuban-1.0.0.dtd", |
153 |
|
"http://thuban.intevation.org/dtds/thuban-1.1-dev.dtd"): |
154 |
|
for key, value in dispatchers.items(): |
155 |
|
dispatchers[(xmlns, key)] = value |
156 |
|
|
157 |
|
XMLReader.AddDispatchers(self, dispatchers) |
158 |
|
|
159 |
|
def Destroy(self): |
160 |
|
"""Clear all instance variables to cut cyclic references. |
161 |
|
|
162 |
|
The GC would have collected the loader eventually but it can |
163 |
|
happen that it doesn't run at all until Thuban is closed (2.3 |
164 |
|
but not 2.2 tries a bit harder and forces a collection when the |
165 |
|
interpreter terminates) |
166 |
""" |
""" |
167 |
if name[0] is None: |
self.__dict__.clear() |
|
method_name = self.start_dispatcher.get(name[1]) |
|
|
else: |
|
|
# Dispatch with namespace |
|
|
method_name = self.start_dispatcher.get(name) |
|
|
if method_name is not None: |
|
|
getattr(self, method_name)(name, qname, attrs) |
|
|
|
|
|
def endElementNS(self, name, qname): |
|
|
"""Call the method given for name in self.end_dispatcher |
|
|
""" |
|
|
if name[0] is None: |
|
|
method_name = self.end_dispatcher.get(name[1]) |
|
|
else: |
|
|
# Dispatch with namespace |
|
|
method_name = self.end_dispatcher.get(name) |
|
|
if method_name is not None: |
|
|
getattr(self, method_name)(name, qname) |
|
168 |
|
|
169 |
def start_session(self, name, qname, attrs): |
def start_session(self, name, qname, attrs): |
170 |
self.theSession = Session(attrs.get((None, 'title'), None)) |
self.theSession = Session(self.encode(attrs.get((None, 'title'), |
171 |
start_dispatcher['session'] = "start_session" |
None))) |
172 |
|
|
173 |
def end_session(self, name, qname): |
def end_session(self, name, qname): |
174 |
pass |
pass |
175 |
end_dispatcher['session'] = "end_session" |
|
176 |
|
def check_attrs(self, element, attrs, descr): |
177 |
|
"""Check and convert some of the attributes of an element |
178 |
|
|
179 |
|
Parameters: |
180 |
|
element -- The element name |
181 |
|
attrs -- The attrs mapping as passed to the start_* methods |
182 |
|
descr -- Sequence of attribute descriptions (AttrDesc instances) |
183 |
|
|
184 |
|
Return a dictionary containig normalized versions of the |
185 |
|
attributes described in descr. The keys of that dictionary are |
186 |
|
the name attributes of the attribute descriptions. The attrs |
187 |
|
dictionary will not be modified. |
188 |
|
|
189 |
|
If the attribute is required, i.e. the 'required' attribute of |
190 |
|
the descrtiption is true, but it is not in attrs, raise a |
191 |
|
LoadError. |
192 |
|
|
193 |
|
If the attribute has a default value and it is not present in |
194 |
|
attrs, use that default value as the value in the returned dict. |
195 |
|
|
196 |
|
The value is converted before putting it into the returned dict. |
197 |
|
The following conversions are available: |
198 |
|
|
199 |
|
'filename' -- The attribute is a filename. |
200 |
|
|
201 |
|
If the filename is a relative name, interpret |
202 |
|
it relative to the directory containing the |
203 |
|
.thuban file and make it an absolute name |
204 |
|
|
205 |
|
'shapestore' -- The attribute is the ID of a shapestore |
206 |
|
defined earlier in the .thuban file. Look it |
207 |
|
up self.idmap |
208 |
|
|
209 |
|
'table' -- The attribute is the ID of a table or shapestore |
210 |
|
defined earlier in the .thuban file. Look it up |
211 |
|
self.idmap. If it's the ID of a shapestore the |
212 |
|
value will be the table of the shapestore. |
213 |
|
|
214 |
|
'idref' -- The attribute is the id of an object defined |
215 |
|
earlier in the .thuban file. Look it up self.idmap |
216 |
|
|
217 |
|
'ascii' -- The attribute is converted to a bytestring with |
218 |
|
ascii encoding. |
219 |
|
|
220 |
|
a callable -- The attribute value is passed to the callable |
221 |
|
and the return value is used as the converted |
222 |
|
value |
223 |
|
|
224 |
|
If no conversion is specified for an attribute it is converted |
225 |
|
with self.encode. |
226 |
|
""" |
227 |
|
normalized = {} |
228 |
|
|
229 |
|
for d in descr: |
230 |
|
if d.required and not attrs.has_key(d.fullname): |
231 |
|
raise LoadError("Element %s requires an attribute %r" |
232 |
|
% (element, d.name)) |
233 |
|
value = attrs.get(d.fullname, d.default) |
234 |
|
|
235 |
|
if d.conversion in ("idref", "shapesource"): |
236 |
|
if value in self.idmap: |
237 |
|
value = self.idmap[value] |
238 |
|
else: |
239 |
|
raise LoadError("Element %s requires an already defined ID" |
240 |
|
" in attribute %r" |
241 |
|
% (element, d.name)) |
242 |
|
elif d.conversion == "table": |
243 |
|
if value in self.idmap: |
244 |
|
value = self.idmap[value] |
245 |
|
if isinstance(value, ShapefileStore): |
246 |
|
value = value.Table() |
247 |
|
else: |
248 |
|
raise LoadError("Element %s requires an already defined ID" |
249 |
|
" in attribute %r" |
250 |
|
% (element, d.name)) |
251 |
|
elif d.conversion == "filename": |
252 |
|
value = os.path.abspath(os.path.join(self.GetDirectory(), |
253 |
|
self.encode(value))) |
254 |
|
elif d.conversion == "ascii": |
255 |
|
value = value.encode("ascii") |
256 |
|
elif d.conversion: |
257 |
|
# Assume it's a callable |
258 |
|
value = d.conversion(value) |
259 |
|
else: |
260 |
|
value = self.encode(value) |
261 |
|
|
262 |
|
normalized[d.name] = value |
263 |
|
return normalized |
264 |
|
|
265 |
|
def open_shapefile(self, filename): |
266 |
|
"""Open shapefile, with alternative path handling. |
267 |
|
|
268 |
|
If a shapefile cannot be opened and an IOError is raised, check for |
269 |
|
an alternative. This alternative can be specified interactively by |
270 |
|
the user or taken from a list of (potential) locations, depending on |
271 |
|
the callback implementation. |
272 |
|
|
273 |
|
The alternative is rechecked. If taken from a list the user |
274 |
|
has to confirm the alternative. |
275 |
|
""" |
276 |
|
|
277 |
|
# Flag if the alternative path was specified interactively / from list. |
278 |
|
from_list = 0 |
279 |
|
while 1: |
280 |
|
try: |
281 |
|
store = self.theSession.OpenShapefile(filename) |
282 |
|
if from_list: |
283 |
|
# A valid path has been guessed from a list |
284 |
|
# Let the user confirm - or select an alternative. |
285 |
|
filename, from_list = self.shapefile_callback( |
286 |
|
filename, "check") |
287 |
|
if filename is None: |
288 |
|
# Selection cancelled |
289 |
|
raise LoadCancelled |
290 |
|
elif store.FileName() == filename: |
291 |
|
# Proposed file has been accepted |
292 |
|
break |
293 |
|
else: |
294 |
|
# the filename has been changed, try the new file |
295 |
|
pass |
296 |
|
else: |
297 |
|
break |
298 |
|
except IOError: |
299 |
|
if self.shapefile_callback is not None: |
300 |
|
filename, from_list = self.shapefile_callback( |
301 |
|
filename, |
302 |
|
mode = "search", |
303 |
|
second_try = from_list) |
304 |
|
if filename is None: |
305 |
|
raise LoadCancelled |
306 |
|
else: |
307 |
|
raise |
308 |
|
return store |
309 |
|
|
310 |
|
def start_dbconnection(self, name, qname, attrs): |
311 |
|
attrs = self.check_attrs(name, attrs, |
312 |
|
[AttrDesc("id", True), |
313 |
|
AttrDesc("dbtype", True), |
314 |
|
AttrDesc("host", False, ""), |
315 |
|
AttrDesc("port", False, ""), |
316 |
|
AttrDesc("user", False, ""), |
317 |
|
AttrDesc("dbname", True)]) |
318 |
|
ID = attrs["id"] |
319 |
|
dbtype = attrs["dbtype"] |
320 |
|
if dbtype != "postgis": |
321 |
|
raise LoadError("dbtype %r not supported" % filetype) |
322 |
|
|
323 |
|
del attrs["id"] |
324 |
|
del attrs["dbtype"] |
325 |
|
|
326 |
|
# Try to open the connection and if it fails ask the user for |
327 |
|
# the correct parameters repeatedly. |
328 |
|
# FIXME: it would be better not to insist on getting a |
329 |
|
# connection here. We should handle this more like the raster |
330 |
|
# images where the layers etc still are created but are not |
331 |
|
# drawn in case Thuban can't use the data for various reasons |
332 |
|
while 1: |
333 |
|
try: |
334 |
|
conn = postgisdb.PostGISConnection(**attrs) |
335 |
|
break |
336 |
|
except postgisdb.ConnectionError, val: |
337 |
|
if self.db_connection_callback is not None: |
338 |
|
attrs = self.db_connection_callback(attrs, str(val)) |
339 |
|
if attrs is None: |
340 |
|
raise LoadCancelled |
341 |
|
else: |
342 |
|
raise |
343 |
|
|
344 |
|
self.idmap[ID] = conn |
345 |
|
self.theSession.AddDBConnection(conn) |
346 |
|
|
347 |
|
def start_dbshapesource(self, name, qname, attrs): |
348 |
|
attrs = self.check_attrs(name, attrs, |
349 |
|
[AttrDesc("id", True), |
350 |
|
AttrDesc("dbconn", True, |
351 |
|
conversion = "idref"), |
352 |
|
AttrDesc("tablename", True, |
353 |
|
conversion = "ascii"), |
354 |
|
# id_column and geometry_column were |
355 |
|
# newly introduced with thuban-1.1.dtd |
356 |
|
# where they're required. Since we |
357 |
|
# support the older formats too we |
358 |
|
# have them optional here. |
359 |
|
AttrDesc("id_column", False, "gid", |
360 |
|
conversion = "ascii"), |
361 |
|
AttrDesc("geometry_column", False, |
362 |
|
conversion = "ascii")]) |
363 |
|
# The default value of geometry_column to use when instantiating |
364 |
|
# the db shapestore is None which we currently can't easily use |
365 |
|
# in check_attrs |
366 |
|
geometry_column = attrs["geometry_column"] |
367 |
|
if not geometry_column: |
368 |
|
geometry_column = None |
369 |
|
dbopen = self.theSession.OpenDBShapeStore |
370 |
|
self.idmap[attrs["id"]] = dbopen(attrs["dbconn"], attrs["tablename"], |
371 |
|
id_column = attrs["id_column"], |
372 |
|
geometry_column=geometry_column) |
373 |
|
|
374 |
|
def start_fileshapesource(self, name, qname, attrs): |
375 |
|
attrs = self.check_attrs(name, attrs, |
376 |
|
[AttrDesc("id", True), |
377 |
|
AttrDesc("filename", True, |
378 |
|
conversion = "filename"), |
379 |
|
AttrDesc("filetype", True)]) |
380 |
|
ID = attrs["id"] |
381 |
|
filename = attrs["filename"] |
382 |
|
filetype = attrs["filetype"] |
383 |
|
if filetype != "shapefile": |
384 |
|
raise LoadError("shapesource filetype %r not supported" % filetype) |
385 |
|
self.idmap[ID] = self.open_shapefile(filename) |
386 |
|
|
387 |
|
def start_derivedshapesource(self, name, qname, attrs): |
388 |
|
attrs = self.check_attrs(name, attrs, |
389 |
|
[AttrDesc("id", True), |
390 |
|
AttrDesc("shapesource", True, |
391 |
|
conversion = "shapesource"), |
392 |
|
AttrDesc("table", True, conversion="table")]) |
393 |
|
store = DerivedShapeStore(attrs["shapesource"], attrs["table"]) |
394 |
|
self.theSession.AddShapeStore(store) |
395 |
|
self.idmap[attrs["id"]] = store |
396 |
|
|
397 |
|
def start_filetable(self, name, qname, attrs): |
398 |
|
attrs = self.check_attrs(name, attrs, |
399 |
|
[AttrDesc("id", True), |
400 |
|
AttrDesc("title", True), |
401 |
|
AttrDesc("filename", True, |
402 |
|
conversion = "filename"), |
403 |
|
AttrDesc("filetype")]) |
404 |
|
filetype = attrs["filetype"] |
405 |
|
if filetype != "DBF": |
406 |
|
raise LoadError("shapesource filetype %r not supported" % filetype) |
407 |
|
table = DBFTable(attrs["filename"]) |
408 |
|
table.SetTitle(attrs["title"]) |
409 |
|
self.idmap[attrs["id"]] = self.theSession.AddTable(table) |
410 |
|
|
411 |
|
def start_jointable(self, name, qname, attrs): |
412 |
|
attrs = self.check_attrs(name, attrs, |
413 |
|
[AttrDesc("id", True), |
414 |
|
AttrDesc("title", True), |
415 |
|
AttrDesc("left", True, conversion="table"), |
416 |
|
AttrDesc("leftcolumn", True), |
417 |
|
AttrDesc("right", True, conversion="table"), |
418 |
|
AttrDesc("rightcolumn", True), |
419 |
|
|
420 |
|
# jointype is required for file |
421 |
|
# version 0.9 but this attribute |
422 |
|
# wasn't in the 0.8 version because of |
423 |
|
# an oversight so we assume it's |
424 |
|
# optional since we want to handle |
425 |
|
# both file format versions here. |
426 |
|
AttrDesc("jointype", False, |
427 |
|
default="INNER")]) |
428 |
|
|
429 |
|
jointype = attrs["jointype"] |
430 |
|
if jointype == "LEFT OUTER": |
431 |
|
outer_join = True |
432 |
|
elif jointype == "INNER": |
433 |
|
outer_join = False |
434 |
|
else: |
435 |
|
raise LoadError("jointype %r not supported" % jointype ) |
436 |
|
table = TransientJoinedTable(self.theSession.TransientDB(), |
437 |
|
attrs["left"], attrs["leftcolumn"], |
438 |
|
attrs["right"], attrs["rightcolumn"], |
439 |
|
outer_join = outer_join) |
440 |
|
table.SetTitle(attrs["title"]) |
441 |
|
self.idmap[attrs["id"]] = self.theSession.AddTable(table) |
442 |
|
|
443 |
def start_map(self, name, qname, attrs): |
def start_map(self, name, qname, attrs): |
444 |
"""Start a map.""" |
"""Start a map.""" |
445 |
self.aMap = Map(attrs.get((None, 'title'), None)) |
self.aMap = Map(self.encode(attrs.get((None, 'title'), None))) |
|
start_dispatcher['map'] = "start_map" |
|
446 |
|
|
447 |
def end_map(self, name, qname): |
def end_map(self, name, qname): |
448 |
self.theSession.AddMap(self.aMap) |
self.theSession.AddMap(self.aMap) |
449 |
end_dispatcher['map'] = "end_map" |
self.aMap = None |
450 |
|
|
451 |
def start_projection(self, name, qname, attrs): |
def start_projection(self, name, qname, attrs): |
452 |
self.ProjectionParams = [ ] |
attrs = self.check_attrs(name, attrs, |
453 |
start_dispatcher['projection'] = "start_projection" |
[AttrDesc("name", conversion=self.encode), |
454 |
|
AttrDesc("epsg", default=None, |
455 |
|
conversion=self.encode)]) |
456 |
|
self.projection_name = attrs["name"] |
457 |
|
self.projection_epsg = attrs["epsg"] |
458 |
|
self.projection_params = [ ] |
459 |
|
|
460 |
def end_projection(self, name, qname): |
def end_projection(self, name, qname): |
461 |
self.aMap.SetProjection(Projection(self.ProjectionParams)) |
if self.aLayer is not None: |
462 |
end_dispatcher['projection'] = "end_projection" |
obj = self.aLayer |
463 |
|
elif self.aMap is not None: |
464 |
|
obj = self.aMap |
465 |
|
else: |
466 |
|
assert False, "projection tag out of context" |
467 |
|
pass |
468 |
|
|
469 |
|
obj.SetProjection(Projection(self.projection_params, |
470 |
|
self.projection_name, |
471 |
|
epsg = self.projection_epsg)) |
472 |
|
|
473 |
def start_parameter(self, name, qname, attrs): |
def start_parameter(self, name, qname, attrs): |
474 |
s = attrs.get((None, 'value')) |
s = attrs.get((None, 'value')) |
475 |
s = str(s) # we can't handle unicode in proj |
s = str(s) # we can't handle unicode in proj |
476 |
self.ProjectionParams.append(s) |
self.projection_params.append(s) |
|
start_dispatcher['parameter'] = "start_parameter" |
|
477 |
|
|
478 |
def start_layer(self, name, qname, attrs, layer_class = Layer): |
def start_layer(self, name, qname, attrs, layer_class = Layer): |
479 |
"""Start a layer |
"""Start a layer |
482 |
attrs which may be a dictionary as well as the normal SAX attrs |
attrs which may be a dictionary as well as the normal SAX attrs |
483 |
object and bind it to self.aLayer. |
object and bind it to self.aLayer. |
484 |
""" |
""" |
485 |
title = attrs.get((None, 'title'), "") |
title = self.encode(attrs.get((None, 'title'), "")) |
486 |
filename = attrs.get((None, 'filename'), "") |
filename = attrs.get((None, 'filename'), "") |
487 |
filename = os.path.join(self.directory, filename) |
filename = os.path.join(self.GetDirectory(), filename) |
488 |
|
filename = self.encode(filename) |
489 |
|
visible = self.encode(attrs.get((None, 'visible'), "true")) != "false" |
490 |
fill = parse_color(attrs.get((None, 'fill'), "None")) |
fill = parse_color(attrs.get((None, 'fill'), "None")) |
491 |
stroke = parse_color(attrs.get((None, 'stroke'), "#000000")) |
stroke = parse_color(attrs.get((None, 'stroke'), "#000000")) |
492 |
stroke_width = int(attrs.get((None, 'stroke_width'), "1")) |
stroke_width = int(attrs.get((None, 'stroke_width'), "1")) |
493 |
self.aLayer = layer_class(title, filename, fill = fill, |
if attrs.has_key((None, "shapestore")): |
494 |
stroke = stroke, lineWidth = stroke_width) |
store = self.idmap[attrs[(None, "shapestore")]] |
495 |
start_dispatcher['layer'] = "start_layer" |
else: |
496 |
|
store = self.open_shapefile(filename) |
497 |
|
|
498 |
|
self.aLayer = layer_class(title, store, |
499 |
|
fill = fill, stroke = stroke, |
500 |
|
lineWidth = stroke_width, |
501 |
|
visible = visible) |
502 |
|
|
503 |
def end_layer(self, name, qname): |
def end_layer(self, name, qname): |
504 |
self.aMap.AddLayer(self.aLayer) |
self.aMap.AddLayer(self.aLayer) |
505 |
end_dispatcher['layer'] = "end_layer" |
self.aLayer = None |
506 |
|
|
507 |
|
def start_rasterlayer(self, name, qname, attrs, layer_class = RasterLayer): |
508 |
|
title = self.encode(attrs.get((None, 'title'), "")) |
509 |
|
filename = attrs.get((None, 'filename'), "") |
510 |
|
filename = os.path.join(self.GetDirectory(), filename) |
511 |
|
filename = self.encode(filename) |
512 |
|
visible = self.encode(attrs.get((None, 'visible'), "true")) != "false" |
513 |
|
opacity = float(attrs.get((None, 'opacity'), "1")) |
514 |
|
masktype = str(attrs.get((None, 'masktype'), "bit")) |
515 |
|
|
516 |
|
masktypes = {"none": layer_class.MASK_NONE, |
517 |
|
"bit": layer_class.MASK_BIT, |
518 |
|
"alpha": layer_class.MASK_ALPHA} |
519 |
|
|
520 |
|
self.aLayer = layer_class(title, filename, |
521 |
|
visible = visible, |
522 |
|
opacity = opacity, |
523 |
|
masktype = masktypes[masktype]) |
524 |
|
|
525 |
|
def end_rasterlayer(self, name, qname): |
526 |
|
self.aMap.AddLayer(self.aLayer) |
527 |
|
self.aLayer = None |
528 |
|
|
529 |
def start_classification(self, name, qname, attrs): |
def start_classification(self, name, qname, attrs): |
530 |
field = attrs.get((None, 'field'), None) |
# field and field_type are optional because the classification |
531 |
|
# can also be empty, ie. have only a default. |
532 |
|
attrs = self.check_attrs(name, attrs, |
533 |
|
[AttrDesc("field", False), |
534 |
|
AttrDesc("field_type", False)]) |
535 |
|
|
536 |
|
field = attrs["field"] |
537 |
|
fieldType = attrs["field_type"] |
538 |
|
|
539 |
|
if field == "": return # no need to set classification column. |
540 |
|
|
|
fieldType = attrs.get((None, 'field_type'), None) |
|
541 |
dbFieldType = self.aLayer.GetFieldType(field) |
dbFieldType = self.aLayer.GetFieldType(field) |
542 |
|
|
543 |
if fieldType != dbFieldType: |
if fieldType != dbFieldType: |
552 |
elif fieldType == FIELDTYPE_DOUBLE: |
elif fieldType == FIELDTYPE_DOUBLE: |
553 |
self.conv = float |
self.conv = float |
554 |
|
|
555 |
self.aLayer.GetClassification().SetField(field) |
self.aLayer.SetClassificationColumn(field) |
|
|
|
|
start_dispatcher['classification'] = "start_classification" |
|
556 |
|
|
557 |
def end_classification(self, name, qname): |
def end_classification(self, name, qname): |
558 |
pass |
pass |
|
end_dispatcher['classification'] = "end_classification" |
|
559 |
|
|
560 |
def start_clnull(self, name, qname, attrs): |
def start_clnull(self, name, qname, attrs): |
561 |
self.cl_group = ClassGroupDefault() |
self.cl_group = ClassGroupDefault() |
562 |
self.cl_group.SetLabel(attrs.get((None, 'label'), "")) |
self.cl_group.SetLabel(self.encode(attrs.get((None, 'label'), ""))) |
563 |
self.cl_prop = ClassGroupProperties() |
self.cl_prop = ClassGroupProperties() |
|
start_dispatcher['clnull'] = "start_clnull" |
|
564 |
|
|
565 |
def end_clnull(self, name, qname): |
def end_clnull(self, name, qname): |
566 |
self.cl_group.SetProperties(self.cl_prop) |
self.cl_group.SetProperties(self.cl_prop) |
567 |
self.aLayer.GetClassification().SetDefaultGroup(self.cl_group) |
self.aLayer.GetClassification().SetDefaultGroup(self.cl_group) |
568 |
del self.cl_group, self.cl_prop |
del self.cl_group, self.cl_prop |
|
end_dispatcher['clnull'] = "end_clnull" |
|
569 |
|
|
570 |
def start_clpoint(self, name, qname, attrs): |
def start_clpoint(self, name, qname, attrs): |
571 |
attrib_value = attrs.get((None, 'value'), "0") |
attrib_value = attrs.get((None, 'value'), "0") |
572 |
|
|
573 |
#try: |
field = self.aLayer.GetClassificationColumn() |
574 |
#value = Str2Num(attrib_value) |
if self.aLayer.GetFieldType(field) == FIELDTYPE_STRING: |
575 |
#except: |
value = self.encode(attrib_value) |
576 |
#value = attrib_value |
else: |
577 |
|
value = self.conv(attrib_value) |
|
value = self.conv(attrib_value) |
|
|
|
|
578 |
self.cl_group = ClassGroupSingleton(value) |
self.cl_group = ClassGroupSingleton(value) |
579 |
self.cl_group.SetLabel(attrs.get((None, 'label'), "")) |
self.cl_group.SetLabel(self.encode(attrs.get((None, 'label'), ""))) |
580 |
self.cl_prop = ClassGroupProperties() |
self.cl_prop = ClassGroupProperties() |
581 |
|
|
|
start_dispatcher['clpoint'] = "start_clpoint" |
|
582 |
|
|
583 |
def end_clpoint(self, name, qname): |
def end_clpoint(self, name, qname): |
584 |
self.cl_group.SetProperties(self.cl_prop) |
self.cl_group.SetProperties(self.cl_prop) |
585 |
self.aLayer.GetClassification().AddGroup(self.cl_group) |
self.aLayer.GetClassification().AppendGroup(self.cl_group) |
586 |
del self.cl_group, self.cl_prop |
del self.cl_group, self.cl_prop |
|
end_dispatcher['clpoint'] = "end_clpoint" |
|
587 |
|
|
588 |
def start_clrange(self, name, qname, attrs): |
def start_clrange(self, name, qname, attrs): |
589 |
|
attrs = self.check_attrs(name, attrs, |
590 |
|
[AttrDesc("range", False, None), |
591 |
|
AttrDesc("min", False, None), |
592 |
|
AttrDesc("max", False, None)]) |
593 |
|
|
594 |
|
range = attrs['range'] |
595 |
|
# for backward compatibility (min/max are not saved) |
596 |
|
min = attrs['min'] |
597 |
|
max = attrs['max'] |
598 |
|
|
599 |
try: |
try: |
600 |
min = self.conv(attrs.get((None, 'min'), "0")) |
if range is not None: |
601 |
max = self.conv(attrs.get((None, 'max'), "0")) |
self.cl_group = ClassGroupRange(Range(range)) |
602 |
#min = Str2Num(attrs.get((None, 'min'), "0")) |
elif min is not None and max is not None: |
603 |
#max = Str2Num(attrs.get((None, 'max'), "0")) |
self.cl_group = ClassGroupRange((self.conv(min), |
604 |
|
self.conv(max))) |
605 |
|
else: |
606 |
|
self.cl_group = ClassGroupRange(Range(None)) |
607 |
|
|
608 |
except ValueError: |
except ValueError: |
609 |
raise ValueError(_("Classification range is not a number!")) |
raise ValueError(_("Classification range is not a number!")) |
610 |
|
|
|
self.cl_group = ClassGroupRange(min, max) |
|
611 |
self.cl_group.SetLabel(attrs.get((None, 'label'), "")) |
self.cl_group.SetLabel(attrs.get((None, 'label'), "")) |
612 |
self.cl_prop = ClassGroupProperties() |
self.cl_prop = ClassGroupProperties() |
613 |
|
|
|
start_dispatcher['clrange'] = "start_clrange" |
|
614 |
|
|
615 |
def end_clrange(self, name, qname): |
def end_clrange(self, name, qname): |
616 |
self.cl_group.SetProperties(self.cl_prop) |
self.cl_group.SetProperties(self.cl_prop) |
617 |
self.aLayer.GetClassification().AddGroup(self.cl_group) |
self.aLayer.GetClassification().AppendGroup(self.cl_group) |
618 |
del self.cl_group, self.cl_prop |
del self.cl_group, self.cl_prop |
619 |
end_dispatcher['clrange'] = "end_clrange" |
|
620 |
|
|
621 |
|
def start_clpattern(self, name, qname, attrs): |
622 |
|
pattern = attrs.get((None, 'pattern'), "") |
623 |
|
|
624 |
|
self.cl_group = ClassGroupPattern(self.encode(pattern)) |
625 |
|
self.cl_group.SetLabel(self.encode(attrs.get((None, 'label'), ""))) |
626 |
|
self.cl_prop = ClassGroupProperties() |
627 |
|
|
628 |
|
def end_clpattern(self, name, qname): |
629 |
|
self.cl_group.SetProperties(self.cl_prop) |
630 |
|
self.aLayer.GetClassification().AppendGroup(self.cl_group) |
631 |
|
del self.cl_group, self.cl_prop |
632 |
|
|
633 |
|
|
634 |
def start_cldata(self, name, qname, attrs): |
def start_cldata(self, name, qname, attrs): |
635 |
self.cl_prop.SetLineColor( |
self.cl_prop.SetLineColor( |
636 |
parse_color(attrs.get((None, 'stroke'), "None"))) |
parse_color(attrs.get((None, 'stroke'), "None"))) |
637 |
self.cl_prop.SetLineWidth( |
self.cl_prop.SetLineWidth( |
638 |
int(attrs.get((None, 'stroke_width'), "0"))) |
int(attrs.get((None, 'stroke_width'), "0"))) |
639 |
|
self.cl_prop.SetSize(int(attrs.get((None, 'size'), "5"))) |
640 |
self.cl_prop.SetFill(parse_color(attrs.get((None, 'fill'), "None"))) |
self.cl_prop.SetFill(parse_color(attrs.get((None, 'fill'), "None"))) |
|
start_dispatcher['cldata'] = "start_cldata" |
|
641 |
|
|
642 |
def end_cldata(self, name, qname): |
def end_cldata(self, name, qname): |
643 |
pass |
pass |
|
end_dispatcher['cldata'] = "end_cldata" |
|
|
|
|
|
def start_table(self, name, qname, attrs): |
|
|
print "table title: %s" % attrs.get('title', None) |
|
|
start_dispatcher['table'] = "start_table" |
|
|
|
|
|
def end_table(self, name, qname): |
|
|
pass |
|
|
end_dispatcher['table'] = "end_table" |
|
644 |
|
|
645 |
def start_labellayer(self, name, qname, attrs): |
def start_labellayer(self, name, qname, attrs): |
646 |
self.aLayer = self.aMap.LabelLayer() |
self.aLayer = self.aMap.LabelLayer() |
|
start_dispatcher['labellayer'] = "start_labellayer" |
|
647 |
|
|
648 |
def start_label(self, name, qname, attrs): |
def start_label(self, name, qname, attrs): |
649 |
x = float(attrs[(None, 'x')]) |
attrs = self.check_attrs(name, attrs, |
650 |
y = float(attrs[(None, 'y')]) |
[AttrDesc("x", True, conversion = float), |
651 |
text = attrs[(None, 'text')] |
AttrDesc("y", True, conversion = float), |
652 |
halign = attrs[(None, 'halign')] |
AttrDesc("text", True), |
653 |
valign = attrs[(None, 'valign')] |
AttrDesc("halign", True, |
654 |
|
conversion = "ascii"), |
655 |
|
AttrDesc("valign", True, |
656 |
|
conversion = "ascii")]) |
657 |
|
x = attrs['x'] |
658 |
|
y = attrs['y'] |
659 |
|
text = attrs['text'] |
660 |
|
halign = attrs['halign'] |
661 |
|
valign = attrs['valign'] |
662 |
|
if halign not in ("left", "center", "right"): |
663 |
|
raise LoadError("Unsupported halign value %r" % halign) |
664 |
|
if valign not in ("top", "center", "bottom"): |
665 |
|
raise LoadError("Unsupported valign value %r" % valign) |
666 |
self.aLayer.AddLabel(x, y, text, halign = halign, valign = valign) |
self.aLayer.AddLabel(x, y, text, halign = halign, valign = valign) |
|
start_dispatcher['label'] = "start_label" |
|
667 |
|
|
668 |
def characters(self, chars): |
def characters(self, chars): |
669 |
pass |
pass |
670 |
|
|
671 |
|
|
672 |
def load_session(filename): |
def load_session(filename, db_connection_callback = None, |
673 |
"""Load a Thuban session from the file object file""" |
shapefile_callback = None): |
674 |
dir = os.path.dirname(filename) |
"""Load a Thuban session from the file object file |
675 |
file = open(filename) |
|
676 |
handler = ProcessSession(dir) |
The db_connection_callback, if given should be a callable object |
677 |
|
that can be called like this: |
678 |
parser = make_parser() |
db_connection_callback(params, message) |
679 |
parser.setContentHandler(handler) |
|
680 |
parser.setErrorHandler(ErrorHandler()) |
where params is a dictionary containing the known connection |
681 |
parser.setFeature(xml.sax.handler.feature_namespaces, 1) |
parameters and message is a string with a message why the connection |
682 |
parser.parse(file) |
failed. db_connection_callback should return a new dictionary with |
683 |
|
corrected and perhaps additional parameters like a password or None |
684 |
|
to indicate that the user cancelled. |
685 |
|
""" |
686 |
|
handler = SessionLoader(db_connection_callback, shapefile_callback) |
687 |
|
handler.read(filename) |
688 |
|
|
689 |
session = handler.theSession |
session = handler.theSession |
690 |
# Newly loaded session aren't modified |
# Newly loaded session aren't modified |
691 |
session.UnsetModified() |
session.UnsetModified() |
692 |
|
|
693 |
|
handler.Destroy() |
694 |
|
|
695 |
return session |
return session |
696 |
|
|