1 |
# Copyright (c) 2001, 2002 by Intevation GmbH |
# $Id$ |
2 |
|
# Copyright (c) 2001-2004 by Intevation GmbH |
3 |
# Authors: |
# Authors: |
4 |
# Jan-Oliver Wagner <[email protected]> |
# Jan-Oliver Wagner <[email protected]> (2004) |
5 |
# Bernhard Herzog <[email protected]> |
# Bernhard Herzog <[email protected]> (2001-2004) |
6 |
|
# Jonathan Coles <[email protected]> (2003) |
7 |
|
# Frank Koormann <[email protected]> (2003) |
8 |
# |
# |
9 |
# This program is free software under the GPL (>=v2) |
# This program is free software under the GPL (>=v2) |
10 |
# Read the file COPYING coming with Thuban for details. |
# Read the file COPYING coming with Thuban for details. |
16 |
__version__ = "$Revision$" |
__version__ = "$Revision$" |
17 |
|
|
18 |
import os |
import os |
|
import string |
|
19 |
|
|
20 |
import Thuban.Lib.fileutil |
import Thuban.Lib.fileutil |
21 |
|
|
22 |
from Thuban.Model.color import Color |
from Thuban.Model.layer import Layer, RasterLayer |
23 |
|
|
24 |
# |
from Thuban.Model.classification import \ |
25 |
# one level of indention |
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, ClassGroupMap |
26 |
# |
from Thuban.Model.transientdb import AutoTransientTable, TransientJoinedTable |
27 |
TAB = " " |
from Thuban.Model.table import DBFTable, FIELDTYPE_STRING |
28 |
|
from Thuban.Model.data import DerivedShapeStore, ShapefileStore, \ |
29 |
|
SHAPETYPE_POINT |
30 |
|
|
31 |
|
from Thuban.Model.xmlwriter import XMLWriter |
32 |
|
from postgisdb import PostGISConnection, PostGISShapeStore |
33 |
|
|
34 |
def relative_filename(dir, filename): |
def relative_filename(dir, filename): |
35 |
"""Return a filename relative to dir for the absolute file name absname. |
"""Return a filename relative to dir for the absolute file name absname. |
43 |
else: |
else: |
44 |
return filename |
return filename |
45 |
|
|
46 |
def escape(data): |
|
47 |
"""Escape &, \", ', <, and > in a string of data. |
def unify_filename(filename): |
48 |
|
"""Return a 'unified' version of filename |
49 |
|
|
50 |
|
The .thuban files should be as platform independent as possible. |
51 |
|
Since they must contain filenames the filenames have to unified. We |
52 |
|
unify on unix-like filenames for now, which means we do nothing on a |
53 |
|
posix system and simply replace backslashes with slashes on windows |
54 |
""" |
""" |
55 |
data = string.replace(data, "&", "&") |
if os.name == "posix": |
56 |
data = string.replace(data, "<", "<") |
return filename |
57 |
data = string.replace(data, ">", ">") |
elif os.name == "nt": |
58 |
data = string.replace(data, '"', """) |
return "/".join(filename.split("\\")) |
59 |
data = string.replace(data, "'", "'") |
else: |
60 |
return data |
raise RuntimeError("Unsupported platform for unify_filename: %s" |
61 |
|
% os.name) |
62 |
|
|
63 |
class Saver: |
def sort_data_stores(stores): |
64 |
|
"""Return a topologically sorted version of the sequence of data containers |
65 |
|
|
66 |
|
The list is sorted so that data containers that depend on other data |
67 |
|
containers have higher indexes than the containers they depend on. |
68 |
|
""" |
69 |
|
if not stores: |
70 |
|
return [] |
71 |
|
processed = {} |
72 |
|
result = [] |
73 |
|
todo = stores[:] |
74 |
|
while todo: |
75 |
|
# It doesn't really matter which if the items of todo is |
76 |
|
# processed next, but if we take the first one, the order is |
77 |
|
# preserved to some degree which makes writing some of the test |
78 |
|
# cases easier. |
79 |
|
container = todo.pop(0) |
80 |
|
if id(container) in processed: |
81 |
|
continue |
82 |
|
deps = [dep for dep in container.Dependencies() |
83 |
|
if id(dep) not in processed] |
84 |
|
if deps: |
85 |
|
todo.append(container) |
86 |
|
todo.extend(deps) |
87 |
|
else: |
88 |
|
result.append(container) |
89 |
|
processed[id(container)] = 1 |
90 |
|
return result |
91 |
|
|
92 |
|
|
93 |
|
class SessionSaver(XMLWriter): |
94 |
|
|
95 |
"""Class to serialize a session into an XML file. |
"""Class to serialize a session into an XML file. |
96 |
|
|
97 |
Applications built on top of Thuban may derive from this class and |
Applications built on top of Thuban may derive from this class and |
98 |
override or extend the methods to save additinal information. This |
override or extend the methods to save additional information. This |
99 |
additional information should take the form of additional attributes |
additional information should take the form of additional attributes |
100 |
or elements whose names are prefixed with a namespace. To define a |
or elements whose names are prefixed with a namespace. To define a |
101 |
namespace derived classes should extend the write_session method to |
namespace derived classes should extend the write_session method to |
104 |
|
|
105 |
|
|
106 |
def __init__(self, session): |
def __init__(self, session): |
107 |
|
XMLWriter.__init__(self) |
108 |
self.session = session |
self.session = session |
109 |
|
# Map object ids to the ids used in the thuban files |
110 |
|
self.idmap = {} |
111 |
|
|
112 |
def write(self, file_or_filename): |
def get_id(self, obj): |
113 |
"""Write the session to a file. |
"""Return the id used in the thuban file for the object obj""" |
114 |
|
return self.idmap.get(id(obj)) |
115 |
The argument may be either a file object or a filename. If it's |
|
116 |
a filename, the file will be opened for writing. Files of |
def define_id(self, obj, value = None): |
117 |
shapefiles will be stored as filenames relative to the directory |
if value is None: |
118 |
the file is stored in (as given by os.path.dirname(filename)) if |
value = "D" + str(id(obj)) |
119 |
they have a common parent directory other than the root |
self.idmap[id(obj)] = value |
120 |
directory. |
return value |
121 |
|
|
122 |
If the argument is a file object (which is determined by the |
def has_id(self, obj): |
123 |
presence of a write method) all filenames will be absolut |
return self.idmap.has_key(id(obj)) |
124 |
filenames. |
|
125 |
|
def prepare_filename(self, filename): |
126 |
|
"""Return the string to use when writing filename to the thuban file |
127 |
|
|
128 |
|
The returned string is a unified version (only slashes as |
129 |
|
directory separators, see unify_filename) of filename expressed |
130 |
|
relative to the directory the .thuban file is written to. |
131 |
""" |
""" |
132 |
|
return unify_filename(relative_filename(self.dir, filename)) |
133 |
|
|
134 |
self.indent_level = 0 |
def write(self, file_or_filename): |
135 |
|
XMLWriter.write(self, file_or_filename) |
136 |
|
|
137 |
if hasattr(file_or_filename, "write"): |
self.write_header("session", "thuban-1.1.dtd") |
|
# it's a file object |
|
|
self.file = file_or_filename |
|
|
self.dir = "" |
|
|
else: |
|
|
filename = file_or_filename |
|
|
self.dir = os.path.dirname(filename) |
|
|
self.file = open(filename, 'w') |
|
|
self.write_header() |
|
138 |
self.write_session(self.session) |
self.write_session(self.session) |
139 |
|
self.close() |
|
if self.indent_level != 0: |
|
|
raise ValueError("indent_level still positive!") |
|
|
|
|
|
def write_attribs(self, attrs): |
|
|
for name, value in attrs.items(): |
|
|
if isinstance(value, Color): |
|
|
value = value.hex() |
|
|
else: |
|
|
value = str(value) |
|
|
self.file.write(' %s="%s"' % (escape(name), escape(value))) |
|
|
|
|
|
def open_element(self, element, attrs = {}): |
|
|
# Helper function to write an element open tag with attributes |
|
|
self.file.write("%s<%s" % (TAB*self.indent_level, element)) |
|
|
self.write_attribs(attrs) |
|
|
self.file.write(">\n") |
|
|
|
|
|
self.indent_level += 1 |
|
|
|
|
|
def close_element(self, element): |
|
|
self.indent_level -= 1 |
|
|
if self.indent_level < 0: |
|
|
raise ValueError("close_element called too many times!") |
|
|
self.file.write("%s</%s>\n" % (TAB*self.indent_level, element)) |
|
|
|
|
|
def write_element(self, element, attrs = {}): |
|
|
# Helper function to write an element open tag with attributes |
|
|
self.file.write("%s<%s" % (TAB*self.indent_level, element)) |
|
|
self.write_attribs(attrs) |
|
|
self.file.write("/>\n") |
|
|
|
|
|
def write_header(self): |
|
|
"""Write the XML header""" |
|
|
self.file.write('<?xml version="1.0" encoding="UTF-8"?>\n') |
|
|
self.file.write('<!DOCTYPE session SYSTEM "thuban.dtd">\n') |
|
140 |
|
|
141 |
def write_session(self, session, attrs = None, namespaces = ()): |
def write_session(self, session, attrs = None, namespaces = ()): |
142 |
"""Write the session and its contents |
"""Write the session and its contents |
159 |
attrs["title"] = session.title |
attrs["title"] = session.title |
160 |
for name, uri in namespaces: |
for name, uri in namespaces: |
161 |
attrs["xmlns:" + name] = uri |
attrs["xmlns:" + name] = uri |
162 |
|
# default name space |
163 |
|
attrs["xmlns"] = \ |
164 |
|
"http://thuban.intevation.org/dtds/thuban-1.1-dev.dtd" |
165 |
self.open_element("session", attrs) |
self.open_element("session", attrs) |
166 |
|
self.write_db_connections(session) |
167 |
|
self.write_data_containers(session) |
168 |
for map in session.Maps(): |
for map in session.Maps(): |
169 |
self.write_map(map) |
self.write_map(map) |
170 |
self.close_element("session") |
self.close_element("session") |
171 |
|
|
172 |
|
def write_db_connections(self, session): |
173 |
|
for conn in session.DBConnections(): |
174 |
|
if isinstance(conn, PostGISConnection): |
175 |
|
self.write_element("dbconnection", |
176 |
|
{"id": self.define_id(conn), |
177 |
|
"dbtype": "postgis", |
178 |
|
"host": conn.host, |
179 |
|
"port": conn.port, |
180 |
|
"user": conn.user, |
181 |
|
"dbname": conn.dbname}) |
182 |
|
else: |
183 |
|
raise ValueError("Can't handle db connection %r" % conn) |
184 |
|
|
185 |
|
def write_data_containers(self, session): |
186 |
|
containers = sort_data_stores(session.DataContainers()) |
187 |
|
for container in containers: |
188 |
|
if isinstance(container, AutoTransientTable): |
189 |
|
# AutoTransientTable instances are invisible in the |
190 |
|
# thuban files. They're only used internally. To make |
191 |
|
# sure that containers depending on AutoTransientTable |
192 |
|
# instances refer to the right real containers we give |
193 |
|
# the AutoTransientTable instances the same id as the |
194 |
|
# source they depend on. |
195 |
|
self.define_id(container, |
196 |
|
self.get_id(container.Dependencies()[0])) |
197 |
|
continue |
198 |
|
|
199 |
|
idvalue = self.define_id(container) |
200 |
|
if isinstance(container, ShapefileStore): |
201 |
|
self.define_id(container.Table(), idvalue) |
202 |
|
filename = self.prepare_filename(container.FileName()) |
203 |
|
self.write_element("fileshapesource", |
204 |
|
{"id": idvalue, "filename": filename, |
205 |
|
"filetype": "shapefile"}) |
206 |
|
elif isinstance(container, DerivedShapeStore): |
207 |
|
shapesource, table = container.Dependencies() |
208 |
|
self.write_element("derivedshapesource", |
209 |
|
{"id": idvalue, |
210 |
|
"shapesource": self.get_id(shapesource), |
211 |
|
"table": self.get_id(table)}) |
212 |
|
elif isinstance(container, PostGISShapeStore): |
213 |
|
conn = container.DBConnection() |
214 |
|
self.write_element("dbshapesource", |
215 |
|
{"id": idvalue, |
216 |
|
"dbconn": self.get_id(conn), |
217 |
|
"tablename": container.TableName(), |
218 |
|
"id_column": container.IDColumn().name, |
219 |
|
"geometry_column": |
220 |
|
container.GeometryColumn().name, |
221 |
|
}) |
222 |
|
elif isinstance(container, DBFTable): |
223 |
|
filename = self.prepare_filename(container.FileName()) |
224 |
|
self.write_element("filetable", |
225 |
|
{"id": idvalue, |
226 |
|
"title": container.Title(), |
227 |
|
"filename": filename, |
228 |
|
"filetype": "DBF"}) |
229 |
|
elif isinstance(container, TransientJoinedTable): |
230 |
|
left, right = container.Dependencies() |
231 |
|
left_field = container.left_field |
232 |
|
right_field = container.right_field |
233 |
|
self.write_element("jointable", |
234 |
|
{"id": idvalue, |
235 |
|
"title": container.Title(), |
236 |
|
"right": self.get_id(right), |
237 |
|
"rightcolumn": right_field, |
238 |
|
"left": self.get_id(left), |
239 |
|
"leftcolumn": left_field, |
240 |
|
"jointype": container.JoinType()}) |
241 |
|
else: |
242 |
|
raise ValueError("Can't handle container %r" % container) |
243 |
|
|
244 |
|
|
245 |
def write_map(self, map): |
def write_map(self, map): |
246 |
"""Write the map and its contents. |
"""Write the map and its contents. |
247 |
|
|
250 |
element, call write_layer for each layer contained in the map |
element, call write_layer for each layer contained in the map |
251 |
and finally call write_label_layer to write the label layer. |
and finally call write_label_layer to write the label layer. |
252 |
""" |
""" |
253 |
write = self.file.write |
self.open_element('map title="%s"' % self.encode(map.title)) |
|
self.open_element('map title="%s"' % escape(map.title)) |
|
254 |
self.write_projection(map.projection) |
self.write_projection(map.projection) |
255 |
for layer in map.Layers(): |
for layer in map.Layers(): |
256 |
self.write_layer(layer) |
self.write_layer(layer) |
261 |
"""Write the projection. |
"""Write the projection. |
262 |
""" |
""" |
263 |
if projection and len(projection.params) > 0: |
if projection and len(projection.params) > 0: |
264 |
self.open_element("projection") |
attrs = {"name": projection.GetName()} |
265 |
|
epsg = projection.EPSGCode() |
266 |
|
if epsg is not None: |
267 |
|
attrs["epsg"] = epsg |
268 |
|
self.open_element("projection", attrs) |
269 |
for param in projection.params: |
for param in projection.params: |
270 |
self.write_element('parameter value="%s"' % escape(param)) |
self.write_element('parameter value="%s"' % |
271 |
|
self.encode(param)) |
272 |
self.close_element("projection") |
self.close_element("projection") |
273 |
|
|
274 |
def write_layer(self, layer, attrs = None): |
def write_layer(self, layer, attrs = None): |
278 |
given, should be a mapping from attribute names to attribute |
given, should be a mapping from attribute names to attribute |
279 |
values. The values should not be XML-escaped yet. |
values. The values should not be XML-escaped yet. |
280 |
""" |
""" |
281 |
|
|
282 |
if attrs is None: |
if attrs is None: |
283 |
attrs = {} |
attrs = {} |
|
attrs["title"] = layer.title |
|
|
attrs["filename"] = relative_filename(self.dir, layer.filename) |
|
|
attrs["stroke_width"] = str(layer.stroke_width) |
|
|
fill = layer.fill |
|
|
if fill is None: |
|
|
attrs["fill"] = "None" |
|
|
else: |
|
|
attrs["fill"] = fill.hex() |
|
|
stroke = layer.stroke |
|
|
if stroke is None: |
|
|
attrs["stroke"] = "None" |
|
|
else: |
|
|
attrs["stroke"] = stroke.hex() |
|
284 |
|
|
285 |
self.open_element("layer", attrs) |
attrs["title"] = layer.title |
286 |
self.write_classification(layer) |
attrs["visible"] = ("false", "true")[int(layer.Visible())] |
287 |
self.close_element("layer") |
|
288 |
|
if isinstance(layer, Layer): |
289 |
|
attrs["shapestore"] = self.get_id(layer.ShapeStore()) |
290 |
|
|
291 |
|
lc = layer.GetClassification() |
292 |
|
attrs["stroke"] = lc.GetDefaultLineColor().hex() |
293 |
|
attrs["stroke_width"] = str(lc.GetDefaultLineWidth()) |
294 |
|
attrs["fill"] = lc.GetDefaultFill().hex() |
295 |
|
|
296 |
|
self.open_element("layer", attrs) |
297 |
|
self.write_projection(layer.GetProjection()) |
298 |
|
self.write_classification(layer) |
299 |
|
self.close_element("layer") |
300 |
|
elif isinstance(layer, RasterLayer): |
301 |
|
attrs["filename"] = self.prepare_filename(layer.filename) |
302 |
|
self.open_element("rasterlayer", attrs) |
303 |
|
self.write_projection(layer.GetProjection()) |
304 |
|
self.close_element("rasterlayer") |
305 |
|
|
306 |
def write_classification(self, layer, attrs = None): |
def write_classification(self, layer, attrs = None): |
307 |
|
"""Write Classification information.""" |
308 |
|
|
309 |
if attrs is None: |
if attrs is None: |
310 |
attrs = {} |
attrs = {} |
311 |
|
|
312 |
lc = layer.classification |
lc = layer.GetClassification() |
313 |
|
|
314 |
|
field = layer.GetClassificationColumn() |
315 |
|
|
|
field = lc.field |
|
|
|
|
316 |
# |
# |
317 |
# there isn't a classification of anything |
# there isn't a classification of anything so do nothing |
|
# so don't do anything |
|
318 |
# |
# |
319 |
if field is None: return |
if field is None: return |
320 |
|
|
321 |
attrs["field"] = field |
attrs["field"] = field |
322 |
|
attrs["field_type"] = str(layer.GetFieldType(field)) |
323 |
self.open_element("classification", attrs) |
self.open_element("classification", attrs) |
324 |
|
|
325 |
if lc.NullData is not None: |
for g in lc: |
326 |
self.open_element("clnull") |
if isinstance(g, ClassGroupDefault): |
327 |
self.write_element("cldata", lc.NullData) |
open_el = 'clnull label="%s"' % self.encode(g.GetLabel()) |
328 |
self.close_element("clnull") |
close_el = 'clnull' |
329 |
|
elif isinstance(g, ClassGroupSingleton): |
330 |
if lc.points != {}: |
if layer.GetFieldType(field) == FIELDTYPE_STRING: |
331 |
for value, data in lc.points.items(): |
value = self.encode(g.GetValue()) |
332 |
self.open_element('clpoint value="%s"' % (escape(str(value)))) |
else: |
333 |
self.write_element("cldata", data) |
value = str(g.GetValue()) |
334 |
self.close_element('clpoint') |
open_el = 'clpoint label="%s" value="%s"' \ |
335 |
|
% (self.encode(g.GetLabel()), value) |
336 |
if lc.ranges != []: |
close_el = 'clpoint' |
337 |
for p in lc.ranges: |
elif isinstance(g, ClassGroupRange): |
338 |
self.open_element('clrange min="%s" max="%s"' |
open_el = 'clrange label="%s" range="%s"' \ |
339 |
% (escape(str(p[0])), escape(str(p[1])))) |
% (self.encode(g.GetLabel()), str(g.GetRange())) |
340 |
self.write_element("cldata", p[2]) |
close_el = 'clrange' |
341 |
self.close_element('clrange') |
else: |
342 |
|
assert False, _("Unsupported group type in classification") |
343 |
|
continue |
344 |
|
|
345 |
|
data = g.GetProperties() |
346 |
|
dict = {'stroke' : data.GetLineColor().hex(), |
347 |
|
'stroke_width': str(data.GetLineWidth()), |
348 |
|
'fill' : data.GetFill().hex()} |
349 |
|
|
350 |
|
# only for point layers write the size attribute |
351 |
|
if layer.ShapeType() == SHAPETYPE_POINT: |
352 |
|
dict['size'] = str(data.GetSize()) |
353 |
|
|
354 |
|
self.open_element(open_el) |
355 |
|
self.write_element("cldata", dict) |
356 |
|
self.close_element(close_el) |
357 |
|
|
358 |
self.close_element("classification") |
self.close_element("classification") |
359 |
|
|
366 |
for label in labels: |
for label in labels: |
367 |
self.write_element(('label x="%g" y="%g" text="%s"' |
self.write_element(('label x="%g" y="%g" text="%s"' |
368 |
' halign="%s" valign="%s"') |
' halign="%s" valign="%s"') |
369 |
% (label.x, label.y, label.text, label.halign, |
% (label.x, label.y, |
370 |
|
self.encode(label.text), |
371 |
|
label.halign, |
372 |
label.valign)) |
label.valign)) |
373 |
self.close_element('labellayer') |
self.close_element('labellayer') |
374 |
|
|
381 |
|
|
382 |
The optional argument saver_class is the class to use to serialize |
The optional argument saver_class is the class to use to serialize |
383 |
the session. By default or if it's None, the saver class will be |
the session. By default or if it's None, the saver class will be |
384 |
Saver. |
SessionSaver. |
385 |
|
|
386 |
If writing the session is successful call the session's |
If writing the session is successful call the session's |
387 |
UnsetModified method |
UnsetModified method |
388 |
""" |
""" |
389 |
if saver_class is None: |
if saver_class is None: |
390 |
saver_class = Saver |
saver_class = SessionSaver |
391 |
saver = saver_class(session) |
saver = saver_class(session) |
392 |
saver.write(file) |
saver.write(file) |
393 |
|
|