1 |
# Copyright (c) 2001, 2002 by Intevation GmbH |
# Copyright (c) 2001-2005 by Intevation GmbH |
2 |
# Authors: |
# Authors: |
3 |
# Jan-Oliver Wagner <[email protected]> |
# Jan-Oliver Wagner <[email protected]> (2004) |
4 |
# Bernhard Herzog <[email protected]> |
# Bernhard Herzog <[email protected]> (2001-2004) |
5 |
# Jonathan Coles <[email protected]> |
# Jonathan Coles <[email protected]> (2003) |
6 |
|
# Frank Koormann <[email protected]> (2003) |
7 |
# |
# |
8 |
# This program is free software under the GPL (>=v2) |
# This program is free software under the GPL (>=v2) |
9 |
# Read the file COPYING coming with Thuban for details. |
# Read the file COPYING coming with Thuban for details. |
13 |
""" |
""" |
14 |
|
|
15 |
__version__ = "$Revision$" |
__version__ = "$Revision$" |
16 |
|
# $Source$ |
17 |
# fix for people using python2.1 |
# $Id$ |
|
from __future__ import nested_scopes |
|
18 |
|
|
19 |
import os |
import os |
|
import string |
|
20 |
|
|
21 |
import Thuban.Lib.fileutil |
import Thuban.Lib.fileutil |
22 |
|
|
23 |
from Thuban.Model.color import Color |
from Thuban.Model.layer import Layer, RasterLayer |
24 |
|
|
25 |
from Thuban.Model.classification import * |
from Thuban.Model.classification import \ |
26 |
|
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, ClassGroupMap |
27 |
|
from Thuban.Model.transientdb import AutoTransientTable, TransientJoinedTable |
28 |
|
from Thuban.Model.table import DBFTable, FIELDTYPE_STRING |
29 |
|
from Thuban.Model.data import DerivedShapeStore, FileShapeStore, \ |
30 |
|
SHAPETYPE_POINT |
31 |
|
|
32 |
# |
from Thuban.Model.xmlwriter import XMLWriter |
33 |
# one level of indention |
from postgisdb import PostGISConnection, PostGISShapeStore |
|
# |
|
|
TAB = " " |
|
34 |
|
|
35 |
def relative_filename(dir, filename): |
def relative_filename(dir, filename): |
36 |
"""Return a filename relative to dir for the absolute file name absname. |
"""Return a filename relative to dir for the absolute file name absname. |
44 |
else: |
else: |
45 |
return filename |
return filename |
46 |
|
|
47 |
def escape(data): |
|
48 |
"""Escape &, \", ', <, and > in a string of data. |
def unify_filename(filename): |
49 |
|
"""Return a 'unified' version of filename |
50 |
|
|
51 |
|
The .thuban files should be as platform independent as possible. |
52 |
|
Since they must contain filenames the filenames have to unified. We |
53 |
|
unify on unix-like filenames for now, which means we do nothing on a |
54 |
|
posix system and simply replace backslashes with slashes on windows |
55 |
|
""" |
56 |
|
if os.name == "posix": |
57 |
|
return filename |
58 |
|
elif os.name == "nt": |
59 |
|
return "/".join(filename.split("\\")) |
60 |
|
else: |
61 |
|
raise RuntimeError("Unsupported platform for unify_filename: %s" |
62 |
|
% os.name) |
63 |
|
|
64 |
|
def sort_data_stores(stores): |
65 |
|
"""Return a topologically sorted version of the sequence of data containers |
66 |
|
|
67 |
|
The list is sorted so that data containers that depend on other data |
68 |
|
containers have higher indexes than the containers they depend on. |
69 |
""" |
""" |
70 |
data = string.replace(data, "&", "&") |
if not stores: |
71 |
data = string.replace(data, "<", "<") |
return [] |
72 |
data = string.replace(data, ">", ">") |
processed = {} |
73 |
data = string.replace(data, '"', """) |
result = [] |
74 |
data = string.replace(data, "'", "'") |
todo = stores[:] |
75 |
return data |
while todo: |
76 |
|
# It doesn't really matter which if the items of todo is |
77 |
|
# processed next, but if we take the first one, the order is |
78 |
|
# preserved to some degree which makes writing some of the test |
79 |
|
# cases easier. |
80 |
|
container = todo.pop(0) |
81 |
|
if id(container) in processed: |
82 |
|
continue |
83 |
|
deps = [dep for dep in container.Dependencies() |
84 |
|
if id(dep) not in processed] |
85 |
|
if deps: |
86 |
|
todo.append(container) |
87 |
|
todo.extend(deps) |
88 |
|
else: |
89 |
|
result.append(container) |
90 |
|
processed[id(container)] = 1 |
91 |
|
return result |
92 |
|
|
93 |
|
def bool2str(b): |
94 |
|
if b: return "true" |
95 |
|
else: return "false" |
96 |
|
|
97 |
class Saver: |
class SessionSaver(XMLWriter): |
98 |
|
|
99 |
"""Class to serialize a session into an XML file. |
"""Class to serialize a session into an XML file. |
100 |
|
|
101 |
Applications built on top of Thuban may derive from this class and |
Applications built on top of Thuban may derive from this class and |
102 |
override or extend the methods to save additinal information. This |
override or extend the methods to save additional information. This |
103 |
additional information should take the form of additional attributes |
additional information should take the form of additional attributes |
104 |
or elements whose names are prefixed with a namespace. To define a |
or elements whose names are prefixed with a namespace. To define a |
105 |
namespace derived classes should extend the write_session method to |
namespace derived classes should extend the write_session method to |
108 |
|
|
109 |
|
|
110 |
def __init__(self, session): |
def __init__(self, session): |
111 |
|
XMLWriter.__init__(self) |
112 |
self.session = session |
self.session = session |
113 |
|
# Map object ids to the ids used in the thuban files |
114 |
|
self.idmap = {} |
115 |
|
|
116 |
def write(self, file_or_filename): |
def get_id(self, obj): |
117 |
"""Write the session to a file. |
"""Return the id used in the thuban file for the object obj""" |
118 |
|
return self.idmap.get(id(obj)) |
119 |
The argument may be either a file object or a filename. If it's |
|
120 |
a filename, the file will be opened for writing. Files of |
def define_id(self, obj, value = None): |
121 |
shapefiles will be stored as filenames relative to the directory |
if value is None: |
122 |
the file is stored in (as given by os.path.dirname(filename)) if |
value = "D" + str(id(obj)) |
123 |
they have a common parent directory other than the root |
self.idmap[id(obj)] = value |
124 |
directory. |
return value |
125 |
|
|
126 |
If the argument is a file object (which is determined by the |
def has_id(self, obj): |
127 |
presence of a write method) all filenames will be absolut |
return self.idmap.has_key(id(obj)) |
128 |
filenames. |
|
129 |
|
def prepare_filename(self, filename): |
130 |
|
"""Return the string to use when writing filename to the thuban file |
131 |
|
|
132 |
|
The returned string is a unified version (only slashes as |
133 |
|
directory separators, see unify_filename) of filename expressed |
134 |
|
relative to the directory the .thuban file is written to. |
135 |
""" |
""" |
136 |
|
return unify_filename(relative_filename(self.dir, filename)) |
137 |
|
|
138 |
# keep track of how many levels of indentation to write |
def write(self, file_or_filename): |
139 |
self.indent_level = 0 |
XMLWriter.write(self, file_or_filename) |
|
# track whether an element is currently open. see open_element(). |
|
|
self.element_open = 0 |
|
|
|
|
|
if hasattr(file_or_filename, "write"): |
|
|
# it's a file object |
|
|
self.file = file_or_filename |
|
|
self.dir = "" |
|
|
else: |
|
|
filename = file_or_filename |
|
|
self.dir = os.path.dirname(filename) |
|
|
self.file = open(filename, 'w') |
|
|
self.write_header() |
|
|
self.write_session(self.session) |
|
|
|
|
|
assert(self.indent_level == 0) |
|
|
|
|
|
def write_attribs(self, attrs): |
|
|
for name, value in attrs.items(): |
|
|
self.file.write(' %s="%s"' % (escape(name), escape(value))) |
|
|
|
|
|
def open_element(self, element, attrs = {}): |
|
|
|
|
|
# |
|
|
# we note when an element is opened so that if two open_element() |
|
|
# calls are made successively we can end the currently open |
|
|
# tag and will later write a proper close tag. otherwise, |
|
|
# if a close_element() call is made directly after an open_element() |
|
|
# call we will close the tag with a /> |
|
|
# |
|
|
if self.element_open == 1: |
|
|
self.file.write(">\n") |
|
|
|
|
|
self.element_open = 1 |
|
|
|
|
|
# Helper function to write an element open tag with attributes |
|
|
self.file.write("%s<%s" % (TAB*self.indent_level, element)) |
|
|
self.write_attribs(attrs) |
|
|
|
|
|
self.indent_level += 1 |
|
|
|
|
|
def close_element(self, element): |
|
|
self.indent_level -= 1 |
|
|
assert(self.indent_level >= 0) |
|
|
|
|
|
# see open_element() for an explanation |
|
|
if self.element_open == 1: |
|
|
self.element_open = 0 |
|
|
self.file.write("/>\n") |
|
|
else: |
|
|
self.file.write("%s</%s>\n" % (TAB*self.indent_level, element)) |
|
140 |
|
|
141 |
def write_element(self, element, attrs = {}): |
self.write_header("session", "thuban-1.1.dtd") |
142 |
"""write an element that won't need a closing tag""" |
self.write_session(self.session) |
143 |
self.open_element(element, attrs) |
self.close() |
|
self.close_element(element) |
|
|
|
|
|
def write_header(self): |
|
|
"""Write the XML header""" |
|
|
self.file.write('<?xml version="1.0" encoding="UTF-8"?>\n') |
|
|
self.file.write('<!DOCTYPE session SYSTEM "thuban.dtd">\n') |
|
144 |
|
|
145 |
def write_session(self, session, attrs = None, namespaces = ()): |
def write_session(self, session, attrs = None, namespaces = ()): |
146 |
"""Write the session and its contents |
"""Write the session and its contents |
163 |
attrs["title"] = session.title |
attrs["title"] = session.title |
164 |
for name, uri in namespaces: |
for name, uri in namespaces: |
165 |
attrs["xmlns:" + name] = uri |
attrs["xmlns:" + name] = uri |
166 |
|
# default name space |
167 |
|
attrs["xmlns"] = \ |
168 |
|
"http://thuban.intevation.org/dtds/thuban-1.1-dev.dtd" |
169 |
self.open_element("session", attrs) |
self.open_element("session", attrs) |
170 |
|
self.write_db_connections(session) |
171 |
|
self.write_data_containers(session) |
172 |
for map in session.Maps(): |
for map in session.Maps(): |
173 |
self.write_map(map) |
self.write_map(map) |
174 |
self.close_element("session") |
self.close_element("session") |
175 |
|
|
176 |
|
def write_db_connections(self, session): |
177 |
|
for conn in session.DBConnections(): |
178 |
|
if isinstance(conn, PostGISConnection): |
179 |
|
self.write_element("dbconnection", |
180 |
|
{"id": self.define_id(conn), |
181 |
|
"dbtype": "postgis", |
182 |
|
"host": conn.host, |
183 |
|
"port": conn.port, |
184 |
|
"user": conn.user, |
185 |
|
"dbname": conn.dbname}) |
186 |
|
else: |
187 |
|
raise ValueError("Can't handle db connection %r" % conn) |
188 |
|
|
189 |
|
def write_data_containers(self, session): |
190 |
|
containers = sort_data_stores(session.DataContainers()) |
191 |
|
for container in containers: |
192 |
|
if isinstance(container, AutoTransientTable): |
193 |
|
# AutoTransientTable instances are invisible in the |
194 |
|
# thuban files. They're only used internally. To make |
195 |
|
# sure that containers depending on AutoTransientTable |
196 |
|
# instances refer to the right real containers we give |
197 |
|
# the AutoTransientTable instances the same id as the |
198 |
|
# source they depend on. |
199 |
|
self.define_id(container, |
200 |
|
self.get_id(container.Dependencies()[0])) |
201 |
|
continue |
202 |
|
|
203 |
|
idvalue = self.define_id(container) |
204 |
|
if isinstance(container, FileShapeStore): |
205 |
|
self.define_id(container.Table(), idvalue) |
206 |
|
filename = self.prepare_filename(container.FileName()) |
207 |
|
self.write_element("fileshapesource", |
208 |
|
{"id": idvalue, "filename": filename, |
209 |
|
"filetype": container.FileType()}) |
210 |
|
elif isinstance(container, DerivedShapeStore): |
211 |
|
shapesource, table = container.Dependencies() |
212 |
|
self.write_element("derivedshapesource", |
213 |
|
{"id": idvalue, |
214 |
|
"shapesource": self.get_id(shapesource), |
215 |
|
"table": self.get_id(table)}) |
216 |
|
elif isinstance(container, PostGISShapeStore): |
217 |
|
conn = container.DBConnection() |
218 |
|
self.write_element("dbshapesource", |
219 |
|
{"id": idvalue, |
220 |
|
"dbconn": self.get_id(conn), |
221 |
|
"tablename": container.TableName(), |
222 |
|
"id_column": container.IDColumn().name, |
223 |
|
"geometry_column": |
224 |
|
container.GeometryColumn().name, |
225 |
|
}) |
226 |
|
elif isinstance(container, DBFTable): |
227 |
|
filename = self.prepare_filename(container.FileName()) |
228 |
|
self.write_element("filetable", |
229 |
|
{"id": idvalue, |
230 |
|
"title": container.Title(), |
231 |
|
"filename": filename, |
232 |
|
"filetype": "DBF"}) |
233 |
|
elif isinstance(container, TransientJoinedTable): |
234 |
|
left, right = container.Dependencies() |
235 |
|
left_field = container.left_field |
236 |
|
right_field = container.right_field |
237 |
|
self.write_element("jointable", |
238 |
|
{"id": idvalue, |
239 |
|
"title": container.Title(), |
240 |
|
"right": self.get_id(right), |
241 |
|
"rightcolumn": right_field, |
242 |
|
"left": self.get_id(left), |
243 |
|
"leftcolumn": left_field, |
244 |
|
"jointype": container.JoinType()}) |
245 |
|
else: |
246 |
|
raise ValueError("Can't handle container %r" % container) |
247 |
|
|
248 |
|
|
249 |
def write_map(self, map): |
def write_map(self, map): |
250 |
"""Write the map and its contents. |
"""Write the map and its contents. |
251 |
|
|
254 |
element, call write_layer for each layer contained in the map |
element, call write_layer for each layer contained in the map |
255 |
and finally call write_label_layer to write the label layer. |
and finally call write_label_layer to write the label layer. |
256 |
""" |
""" |
257 |
write = self.file.write |
self.open_element('map title="%s"' % self.encode(map.title)) |
|
self.open_element('map title="%s"' % escape(map.title)) |
|
258 |
self.write_projection(map.projection) |
self.write_projection(map.projection) |
259 |
for layer in map.Layers(): |
for layer in map.Layers(): |
260 |
self.write_layer(layer) |
self.write_layer(layer) |
265 |
"""Write the projection. |
"""Write the projection. |
266 |
""" |
""" |
267 |
if projection and len(projection.params) > 0: |
if projection and len(projection.params) > 0: |
268 |
self.open_element("projection") |
attrs = {"name": projection.GetName()} |
269 |
|
epsg = projection.EPSGCode() |
270 |
|
if epsg is not None: |
271 |
|
attrs["epsg"] = epsg |
272 |
|
self.open_element("projection", attrs) |
273 |
for param in projection.params: |
for param in projection.params: |
274 |
self.write_element('parameter value="%s"' % escape(param)) |
self.write_element('parameter value="%s"' % |
275 |
|
self.encode(param)) |
276 |
self.close_element("projection") |
self.close_element("projection") |
277 |
|
|
278 |
def write_layer(self, layer, attrs = None): |
def write_layer(self, layer, attrs = None): |
282 |
given, should be a mapping from attribute names to attribute |
given, should be a mapping from attribute names to attribute |
283 |
values. The values should not be XML-escaped yet. |
values. The values should not be XML-escaped yet. |
284 |
""" |
""" |
|
lc = layer.GetClassification() |
|
285 |
|
|
286 |
if attrs is None: |
if attrs is None: |
287 |
attrs = {} |
attrs = {} |
288 |
|
|
289 |
attrs["title"] = layer.title |
attrs["title"] = layer.title |
290 |
attrs["filename"] = relative_filename(self.dir, layer.filename) |
attrs["visible"] = bool2str(layer.Visible()) |
291 |
attrs["stroke"] = lc.GetDefaultStroke().hex() |
|
292 |
attrs["stroke_width"] = str(lc.GetDefaultStrokeWidth()) |
if isinstance(layer, Layer): |
293 |
attrs["fill"] = lc.GetDefaultFill().hex() |
attrs["shapestore"] = self.get_id(layer.ShapeStore()) |
294 |
|
|
295 |
self.open_element("layer", attrs) |
lc = layer.GetClassification() |
296 |
self.write_classification(layer) |
attrs["stroke"] = lc.GetDefaultLineColor().hex() |
297 |
self.close_element("layer") |
attrs["stroke_width"] = str(lc.GetDefaultLineWidth()) |
298 |
|
attrs["fill"] = lc.GetDefaultFill().hex() |
299 |
|
|
300 |
|
self.open_element("layer", attrs) |
301 |
|
self.write_projection(layer.GetProjection()) |
302 |
|
self.write_classification(layer) |
303 |
|
self.close_element("layer") |
304 |
|
elif isinstance(layer, RasterLayer): |
305 |
|
attrs["filename"] = self.prepare_filename(layer.filename) |
306 |
|
self.open_element("rasterlayer", attrs) |
307 |
|
self.write_projection(layer.GetProjection()) |
308 |
|
self.close_element("rasterlayer") |
309 |
|
|
310 |
def write_classification(self, layer, attrs = None): |
def write_classification(self, layer, attrs = None): |
311 |
|
"""Write Classification information.""" |
312 |
|
|
313 |
if attrs is None: |
if attrs is None: |
314 |
attrs = {} |
attrs = {} |
315 |
|
|
316 |
lc = layer.GetClassification() |
lc = layer.GetClassification() |
317 |
|
|
318 |
field = lc.GetField() |
field = layer.GetClassificationColumn() |
319 |
|
|
320 |
# |
# |
321 |
# there isn't a classification of anything |
# there isn't a classification of anything so do nothing |
|
# so don't do anything |
|
322 |
# |
# |
323 |
if field is None: return |
if field is None: return |
324 |
|
|
325 |
attrs["field"] = field |
attrs["field"] = field |
326 |
|
attrs["field_type"] = str(layer.GetFieldType(field)) |
327 |
self.open_element("classification", attrs) |
self.open_element("classification", attrs) |
328 |
|
|
329 |
|
for g in lc: |
330 |
# self.open_element("clnull") |
if isinstance(g, ClassGroupDefault): |
331 |
# write_class_data(lc.GetDefaultData()) |
open_el = 'clnull label="%s"' % self.encode(g.GetLabel()) |
332 |
# self.close_element("clnull") |
close_el = 'clnull' |
333 |
|
elif isinstance(g, ClassGroupSingleton): |
334 |
# just playing now with lambdas and dictionaries |
if layer.GetFieldType(field) == FIELDTYPE_STRING: |
335 |
|
value = self.encode(g.GetValue()) |
336 |
types = [[lambda p: 'clnull', |
else: |
337 |
lambda p: 'clnull'], |
value = str(g.GetValue()) |
338 |
[lambda p: 'clpoint value="%s"' % |
open_el = 'clpoint label="%s" value="%s"' \ |
339 |
str(p.GetValue()), |
% (self.encode(g.GetLabel()), value) |
340 |
lambda p: 'clpoint'], |
close_el = 'clpoint' |
341 |
[lambda p: 'clrange min="%s" max="%s"' % |
elif isinstance(g, ClassGroupRange): |
342 |
(str(p.GetMin()), |
open_el = 'clrange label="%s" range="%s"' \ |
343 |
(str(p.GetMax()))), |
% (self.encode(g.GetLabel()), str(g.GetRange())) |
344 |
lambda p: 'clrange']] |
close_el = 'clrange' |
345 |
|
else: |
346 |
def write_class_group(group): |
assert False, _("Unsupported group type in classification") |
347 |
type = -1 |
continue |
348 |
if isinstance(group, ClassGroupDefault): type = 0 |
|
349 |
elif isinstance(group, ClassGroupSingleton): type = 1 |
data = g.GetProperties() |
350 |
elif isinstance(group, ClassGroupRange): type = 2 |
dict = {'stroke' : data.GetLineColor().hex(), |
351 |
elif isinstance(group, ClassGroupMap): type = 3 |
'stroke_width': str(data.GetLineWidth()), |
352 |
assert(type >= 0) |
'fill' : data.GetFill().hex()} |
353 |
|
|
354 |
if type <= 2: |
# only for point layers write the size attribute |
355 |
data = group.GetProperties() |
if layer.ShapeType() == SHAPETYPE_POINT: |
356 |
dict = {'stroke' : data.GetStroke().hex(), |
dict['size'] = str(data.GetSize()) |
357 |
'stroke_width': str(data.GetStrokeWidth()), |
|
358 |
'fill' : data.GetFill().hex()} |
self.open_element(open_el) |
359 |
|
self.write_element("cldata", dict) |
360 |
self.open_element(types[type][0](group)) |
self.close_element(close_el) |
|
self.write_element("cldata", dict) |
|
|
self.close_element(types[type][1](group)) |
|
|
else: pass # XXX: we need to handle maps |
|
|
|
|
|
for i in lc: |
|
|
write_class_group(i) |
|
|
|
|
|
# for i in lc: |
|
|
# t = i.GetType() |
|
|
# self.open_element(types[t][0](i)) |
|
|
# write_class_data(i) |
|
|
# self.close_element(types[t][1](i)) |
|
|
|
|
|
# for p in lc: |
|
|
# type = p.GetType() |
|
|
# if p == ClassData.DEFAULT: |
|
|
# lopen = lclose = 'clnull' |
|
|
# elif p == ClassData.POINTS: |
|
|
# lopen = 'clpoint value="%s"' % escape(str(p.GetValue())) |
|
|
# lclose = 'clpoint' |
|
|
# elif p == ClassData.RANGES: |
|
|
# lopen = 'clrange min="%s" max="%s"' |
|
|
# % (escape(str(p.GetMin())), escape(str(p.GetMax())))) |
|
|
# lclose = 'clrange' |
|
|
|
|
|
# self.open_element(lopen) |
|
|
# write_class_data(p) |
|
|
# self.close_element(lclose) |
|
|
|
|
|
# if lc.points != {}: |
|
|
# for p in lc.points.values(): |
|
|
# self.open_element('clpoint value="%s"' % |
|
|
# (escape(str(p.GetValue())))) |
|
|
# write_class_data(p) |
|
|
# self.close_element('clpoint') |
|
|
# |
|
|
# if lc.ranges != []: |
|
|
# for p in lc.ranges: |
|
|
# self.open_element('clrange min="%s" max="%s"' |
|
|
# % (escape(str(p.GetMin())), escape(str(p.GetMax())))) |
|
|
# write_class_data(p) |
|
|
# self.close_element('clrange') |
|
361 |
|
|
362 |
self.close_element("classification") |
self.close_element("classification") |
363 |
|
|
370 |
for label in labels: |
for label in labels: |
371 |
self.write_element(('label x="%g" y="%g" text="%s"' |
self.write_element(('label x="%g" y="%g" text="%s"' |
372 |
' halign="%s" valign="%s"') |
' halign="%s" valign="%s"') |
373 |
% (label.x, label.y, label.text, label.halign, |
% (label.x, label.y, |
374 |
|
self.encode(label.text), |
375 |
|
label.halign, |
376 |
label.valign)) |
label.valign)) |
377 |
self.close_element('labellayer') |
self.close_element('labellayer') |
378 |
|
|
385 |
|
|
386 |
The optional argument saver_class is the class to use to serialize |
The optional argument saver_class is the class to use to serialize |
387 |
the session. By default or if it's None, the saver class will be |
the session. By default or if it's None, the saver class will be |
388 |
Saver. |
SessionSaver. |
389 |
|
|
390 |
If writing the session is successful call the session's |
If writing the session is successful call the session's |
391 |
UnsetModified method |
UnsetModified method |
392 |
""" |
""" |
393 |
if saver_class is None: |
if saver_class is None: |
394 |
saver_class = Saver |
saver_class = SessionSaver |
395 |
saver = saver_class(session) |
saver = saver_class(session) |
396 |
saver.write(file) |
saver.write(file) |
397 |
|
|