1 |
# Copyright (c) 2001, 2002, 2003 by Intevation GmbH |
# Copyright (c) 2001, 2002, 2003, 2004 by Intevation GmbH |
2 |
# Authors: |
# Authors: |
3 |
# Jan-Oliver Wagner <[email protected]> |
# Jan-Oliver Wagner <[email protected]> |
4 |
# Bernhard Herzog <[email protected]> |
# Bernhard Herzog <[email protected]> |
14 |
__version__ = "$Revision$" |
__version__ = "$Revision$" |
15 |
|
|
16 |
import os |
import os |
|
import string |
|
17 |
|
|
18 |
import Thuban.Lib.fileutil |
import Thuban.Lib.fileutil |
19 |
|
|
20 |
|
from Thuban.Model.layer import Layer, RasterLayer |
21 |
|
|
22 |
from Thuban.Model.classification import \ |
from Thuban.Model.classification import \ |
23 |
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, ClassGroupMap |
ClassGroupDefault, ClassGroupSingleton, ClassGroupRange, ClassGroupMap |
24 |
|
from Thuban.Model.transientdb import AutoTransientTable, TransientJoinedTable |
25 |
|
from Thuban.Model.table import DBFTable, FIELDTYPE_STRING |
26 |
|
from Thuban.Model.data import DerivedShapeStore, ShapefileStore |
27 |
|
|
28 |
# |
from Thuban.Model.xmlwriter import XMLWriter |
29 |
# one level of indention |
from postgisdb import PostGISConnection, PostGISShapeStore |
|
# |
|
|
TAB = " " |
|
30 |
|
|
31 |
def relative_filename(dir, filename): |
def relative_filename(dir, filename): |
32 |
"""Return a filename relative to dir for the absolute file name absname. |
"""Return a filename relative to dir for the absolute file name absname. |
40 |
else: |
else: |
41 |
return filename |
return filename |
42 |
|
|
|
def escape(data): |
|
|
"""Escape &, \", ', <, and > in a string of data. |
|
|
""" |
|
|
data = string.replace(data, "&", "&") |
|
|
data = string.replace(data, "<", "<") |
|
|
data = string.replace(data, ">", ">") |
|
|
data = string.replace(data, '"', """) |
|
|
data = string.replace(data, "'", "'") |
|
|
return data |
|
43 |
|
|
44 |
class XMLWriter: |
def unify_filename(filename): |
45 |
"""Abstract XMLWriter. |
"""Return a 'unified' version of filename |
46 |
|
|
47 |
Should be overridden to provide specific object saving functionality. |
The .thuban files should be as platform independent as possible. |
48 |
|
Since they must contain filenames the filenames have to unified. We |
49 |
|
unify on unix-like filenames for now, which means we do nothing on a |
50 |
|
posix system and simply replace backslashes with slashes on windows |
51 |
""" |
""" |
52 |
|
if os.name == "posix": |
53 |
|
return filename |
54 |
|
elif os.name == "nt": |
55 |
|
return "/".join(filename.split("\\")) |
56 |
|
else: |
57 |
|
raise RuntimeError("Unsupported platform for unify_filename: %s" |
58 |
|
% os.name) |
59 |
|
|
60 |
def __init__(self): |
def sort_data_stores(stores): |
61 |
self.filename = None |
"""Return a topologically sorted version of the sequence of data containers |
|
pass |
|
|
|
|
|
def write(self, file_or_filename): |
|
|
"""Write the session to a file. |
|
|
|
|
|
The argument may be either a file object or a filename. If it's |
|
|
a filename, the file will be opened for writing. Files of |
|
|
shapefiles will be stored as filenames relative to the directory |
|
|
the file is stored in (as given by os.path.dirname(filename)) if |
|
|
they have a common parent directory other than the root |
|
|
directory. |
|
|
|
|
|
If the argument is a file object (which is determined by the |
|
|
presence of a write method) all filenames will be absolute |
|
|
filenames. |
|
|
""" |
|
|
|
|
|
# keep track of how many levels of indentation to write |
|
|
self.indent_level = 0 |
|
|
# track whether an element is currently open. see open_element(). |
|
|
self.element_open = 0 |
|
|
|
|
|
if hasattr(file_or_filename, "write"): |
|
|
# it's a file object |
|
|
self.file = file_or_filename |
|
|
self.dir = "" |
|
|
else: |
|
|
self.filename = file_or_filename |
|
|
self.dir = os.path.dirname(self.filename) |
|
|
self.file = open(self.filename, 'w') |
|
|
|
|
|
def close(self): |
|
|
assert self.indent_level == 0 |
|
|
if self.filename is not None: |
|
|
self.file.close() |
|
|
|
|
|
def write_header(self, doctype, system): |
|
|
"""Write the XML header""" |
|
|
self.file.write('<?xml version="1.0" encoding="UTF-8"?>\n') |
|
|
self.file.write('<!DOCTYPE %s SYSTEM "%s">\n' % (doctype, system)) |
|
|
|
|
|
def open_element(self, element, attrs = {}): |
|
|
|
|
|
# |
|
|
# we note when an element is opened so that if two open_element() |
|
|
# calls are made successively we can end the currently open |
|
|
# tag and will later write a proper close tag. otherwise, |
|
|
# if a close_element() call is made directly after an open_element() |
|
|
# call we will close the tag with a /> |
|
|
# |
|
|
if self.element_open == 1: |
|
|
self.file.write(">\n") |
|
|
|
|
|
self.element_open = 1 |
|
|
|
|
|
# Helper function to write an element open tag with attributes |
|
|
self.file.write("%s<%s" % (TAB*self.indent_level, element)) |
|
|
self.__write_attribs(attrs) |
|
|
|
|
|
self.indent_level += 1 |
|
|
|
|
|
def close_element(self, element): |
|
|
self.indent_level -= 1 |
|
|
assert self.indent_level >= 0 |
|
62 |
|
|
63 |
# see open_element() for an explanation |
The list is sorted so that data containers that depend on other data |
64 |
if self.element_open == 1: |
containers have higher indexes than the containers they depend on. |
65 |
self.element_open = 0 |
""" |
66 |
self.file.write("/>\n") |
if not stores: |
67 |
|
return [] |
68 |
|
processed = {} |
69 |
|
result = [] |
70 |
|
todo = stores[:] |
71 |
|
while todo: |
72 |
|
# It doesn't really matter which if the items of todo is |
73 |
|
# processed next, but if we take the first one, the order is |
74 |
|
# preserved to some degree which makes writing some of the test |
75 |
|
# cases easier. |
76 |
|
container = todo.pop(0) |
77 |
|
if id(container) in processed: |
78 |
|
continue |
79 |
|
deps = [dep for dep in container.Dependencies() |
80 |
|
if id(dep) not in processed] |
81 |
|
if deps: |
82 |
|
todo.append(container) |
83 |
|
todo.extend(deps) |
84 |
else: |
else: |
85 |
self.file.write("%s</%s>\n" % (TAB*self.indent_level, element)) |
result.append(container) |
86 |
|
processed[id(container)] = 1 |
87 |
def write_element(self, element, attrs = {}): |
return result |
|
"""write an element that won't need a closing tag""" |
|
|
self.open_element(element, attrs) |
|
|
self.close_element(element) |
|
|
|
|
|
def __write_attribs(self, attrs): |
|
|
for name, value in attrs.items(): |
|
|
self.file.write(' %s="%s"' % (self.encode(name), |
|
|
self.encode(value))) |
|
|
|
|
|
def encode(self, str): |
|
|
"""Return an XML-escaped and UTF-8 encoded copy of the string str. |
|
|
|
|
|
Assume that the argument is a bytestring in Latin 1. |
|
|
""" |
|
|
return unicode(escape(str),'latin1').encode("utf8") |
|
88 |
|
|
89 |
|
|
90 |
class SessionSaver(XMLWriter): |
class SessionSaver(XMLWriter): |
103 |
def __init__(self, session): |
def __init__(self, session): |
104 |
XMLWriter.__init__(self) |
XMLWriter.__init__(self) |
105 |
self.session = session |
self.session = session |
106 |
|
# Map object ids to the ids used in the thuban files |
107 |
|
self.idmap = {} |
108 |
|
|
109 |
|
def get_id(self, obj): |
110 |
|
"""Return the id used in the thuban file for the object obj""" |
111 |
|
return self.idmap.get(id(obj)) |
112 |
|
|
113 |
|
def define_id(self, obj, value = None): |
114 |
|
if value is None: |
115 |
|
value = "D" + str(id(obj)) |
116 |
|
self.idmap[id(obj)] = value |
117 |
|
return value |
118 |
|
|
119 |
|
def has_id(self, obj): |
120 |
|
return self.idmap.has_key(id(obj)) |
121 |
|
|
122 |
|
def prepare_filename(self, filename): |
123 |
|
"""Return the string to use when writing filename to the thuban file |
124 |
|
|
125 |
|
The returned string is a unified version (only slashes as |
126 |
|
directory separators, see unify_filename) of filename expressed |
127 |
|
relative to the directory the .thuban file is written to. |
128 |
|
""" |
129 |
|
return unify_filename(relative_filename(self.dir, filename)) |
130 |
|
|
131 |
def write(self, file_or_filename): |
def write(self, file_or_filename): |
132 |
XMLWriter.write(self, file_or_filename) |
XMLWriter.write(self, file_or_filename) |
133 |
|
|
134 |
self.write_header("session", "thuban.dtd") |
self.write_header("session", "thuban-1.1.dtd") |
135 |
self.write_session(self.session) |
self.write_session(self.session) |
136 |
self.close() |
self.close() |
137 |
|
|
156 |
attrs["title"] = session.title |
attrs["title"] = session.title |
157 |
for name, uri in namespaces: |
for name, uri in namespaces: |
158 |
attrs["xmlns:" + name] = uri |
attrs["xmlns:" + name] = uri |
159 |
|
# default name space |
160 |
|
attrs["xmlns"] = \ |
161 |
|
"http://thuban.intevation.org/dtds/thuban-1.1-dev.dtd" |
162 |
self.open_element("session", attrs) |
self.open_element("session", attrs) |
163 |
|
self.write_db_connections(session) |
164 |
|
self.write_data_containers(session) |
165 |
for map in session.Maps(): |
for map in session.Maps(): |
166 |
self.write_map(map) |
self.write_map(map) |
167 |
self.close_element("session") |
self.close_element("session") |
168 |
|
|
169 |
|
def write_db_connections(self, session): |
170 |
|
for conn in session.DBConnections(): |
171 |
|
if isinstance(conn, PostGISConnection): |
172 |
|
self.write_element("dbconnection", |
173 |
|
{"id": self.define_id(conn), |
174 |
|
"dbtype": "postgis", |
175 |
|
"host": conn.host, |
176 |
|
"port": conn.port, |
177 |
|
"user": conn.user, |
178 |
|
"dbname": conn.dbname}) |
179 |
|
else: |
180 |
|
raise ValueError("Can't handle db connection %r" % conn) |
181 |
|
|
182 |
|
def write_data_containers(self, session): |
183 |
|
containers = sort_data_stores(session.DataContainers()) |
184 |
|
for container in containers: |
185 |
|
if isinstance(container, AutoTransientTable): |
186 |
|
# AutoTransientTable instances are invisible in the |
187 |
|
# thuban files. They're only used internally. To make |
188 |
|
# sure that containers depending on AutoTransientTable |
189 |
|
# instances refer to the right real containers we give |
190 |
|
# the AutoTransientTable instances the same id as the |
191 |
|
# source they depend on. |
192 |
|
self.define_id(container, |
193 |
|
self.get_id(container.Dependencies()[0])) |
194 |
|
continue |
195 |
|
|
196 |
|
idvalue = self.define_id(container) |
197 |
|
if isinstance(container, ShapefileStore): |
198 |
|
self.define_id(container.Table(), idvalue) |
199 |
|
filename = self.prepare_filename(container.FileName()) |
200 |
|
self.write_element("fileshapesource", |
201 |
|
{"id": idvalue, "filename": filename, |
202 |
|
"filetype": "shapefile"}) |
203 |
|
elif isinstance(container, DerivedShapeStore): |
204 |
|
shapesource, table = container.Dependencies() |
205 |
|
self.write_element("derivedshapesource", |
206 |
|
{"id": idvalue, |
207 |
|
"shapesource": self.get_id(shapesource), |
208 |
|
"table": self.get_id(table)}) |
209 |
|
elif isinstance(container, PostGISShapeStore): |
210 |
|
conn = container.DBConnection() |
211 |
|
self.write_element("dbshapesource", |
212 |
|
{"id": idvalue, |
213 |
|
"dbconn": self.get_id(conn), |
214 |
|
"tablename": container.TableName(), |
215 |
|
"id_column": container.IDColumn().name, |
216 |
|
"geometry_column": |
217 |
|
container.GeometryColumn().name, |
218 |
|
}) |
219 |
|
elif isinstance(container, DBFTable): |
220 |
|
filename = self.prepare_filename(container.FileName()) |
221 |
|
self.write_element("filetable", |
222 |
|
{"id": idvalue, |
223 |
|
"title": container.Title(), |
224 |
|
"filename": filename, |
225 |
|
"filetype": "DBF"}) |
226 |
|
elif isinstance(container, TransientJoinedTable): |
227 |
|
left, right = container.Dependencies() |
228 |
|
left_field = container.left_field |
229 |
|
right_field = container.right_field |
230 |
|
self.write_element("jointable", |
231 |
|
{"id": idvalue, |
232 |
|
"title": container.Title(), |
233 |
|
"right": self.get_id(right), |
234 |
|
"rightcolumn": right_field, |
235 |
|
"left": self.get_id(left), |
236 |
|
"leftcolumn": left_field, |
237 |
|
"jointype": container.JoinType()}) |
238 |
|
else: |
239 |
|
raise ValueError("Can't handle container %r" % container) |
240 |
|
|
241 |
|
|
242 |
def write_map(self, map): |
def write_map(self, map): |
243 |
"""Write the map and its contents. |
"""Write the map and its contents. |
244 |
|
|
258 |
"""Write the projection. |
"""Write the projection. |
259 |
""" |
""" |
260 |
if projection and len(projection.params) > 0: |
if projection and len(projection.params) > 0: |
261 |
self.open_element("projection", {"name": projection.GetName()}) |
attrs = {"name": projection.GetName()} |
262 |
|
epsg = projection.EPSGCode() |
263 |
|
if epsg is not None: |
264 |
|
attrs["epsg"] = epsg |
265 |
|
self.open_element("projection", attrs) |
266 |
for param in projection.params: |
for param in projection.params: |
267 |
self.write_element('parameter value="%s"' % |
self.write_element('parameter value="%s"' % |
268 |
self.encode(param)) |
self.encode(param)) |
275 |
given, should be a mapping from attribute names to attribute |
given, should be a mapping from attribute names to attribute |
276 |
values. The values should not be XML-escaped yet. |
values. The values should not be XML-escaped yet. |
277 |
""" |
""" |
|
lc = layer.GetClassification() |
|
278 |
|
|
279 |
if attrs is None: |
if attrs is None: |
280 |
attrs = {} |
attrs = {} |
281 |
|
|
282 |
attrs["title"] = layer.title |
attrs["title"] = layer.title |
283 |
attrs["filename"] = relative_filename(self.dir, layer.filename) |
attrs["visible"] = ("false", "true")[int(layer.Visible())] |
|
attrs["stroke"] = lc.GetDefaultLineColor().hex() |
|
|
attrs["stroke_width"] = str(lc.GetDefaultLineWidth()) |
|
|
attrs["fill"] = lc.GetDefaultFill().hex() |
|
|
attrs["visible"] = ("false", "true")[int(layer.Visible())] |
|
|
|
|
|
self.open_element("layer", attrs) |
|
|
|
|
|
proj = layer.GetProjection() |
|
|
if proj is not None: |
|
|
self.write_projection(proj) |
|
284 |
|
|
285 |
self.write_classification(layer) |
if isinstance(layer, Layer): |
286 |
|
attrs["shapestore"] = self.get_id(layer.ShapeStore()) |
287 |
|
|
288 |
self.close_element("layer") |
lc = layer.GetClassification() |
289 |
|
attrs["stroke"] = lc.GetDefaultLineColor().hex() |
290 |
|
attrs["stroke_width"] = str(lc.GetDefaultLineWidth()) |
291 |
|
attrs["fill"] = lc.GetDefaultFill().hex() |
292 |
|
|
293 |
|
self.open_element("layer", attrs) |
294 |
|
self.write_projection(layer.GetProjection()) |
295 |
|
self.write_classification(layer) |
296 |
|
self.close_element("layer") |
297 |
|
elif isinstance(layer, RasterLayer): |
298 |
|
attrs["filename"] = self.prepare_filename(layer.filename) |
299 |
|
self.open_element("rasterlayer", attrs) |
300 |
|
self.write_projection(layer.GetProjection()) |
301 |
|
self.close_element("rasterlayer") |
302 |
|
|
303 |
def write_classification(self, layer, attrs = None): |
def write_classification(self, layer, attrs = None): |
304 |
|
"""Write Classification information.""" |
305 |
|
|
306 |
if attrs is None: |
if attrs is None: |
307 |
attrs = {} |
attrs = {} |
308 |
|
|
309 |
lc = layer.GetClassification() |
lc = layer.GetClassification() |
310 |
|
|
311 |
field = lc.GetField() |
field = layer.GetClassificationColumn() |
312 |
|
|
313 |
# |
# |
314 |
# there isn't a classification of anything |
# there isn't a classification of anything so do nothing |
|
# so don't do anything |
|
315 |
# |
# |
316 |
if field is None: return |
if field is None: return |
317 |
|
|
318 |
attrs["field"] = field |
attrs["field"] = field |
319 |
attrs["field_type"] = str(lc.GetFieldType()) |
attrs["field_type"] = str(layer.GetFieldType(field)) |
320 |
self.open_element("classification", attrs) |
self.open_element("classification", attrs) |
321 |
|
|
322 |
|
for g in lc: |
323 |
types = [[lambda p: 'clnull label="%s"' % self.encode(p.GetLabel()), |
if isinstance(g, ClassGroupDefault): |
324 |
lambda p: 'clnull'], |
open_el = 'clnull label="%s"' % self.encode(g.GetLabel()) |
325 |
[lambda p: 'clpoint label="%s" value="%s"' % |
close_el = 'clnull' |
326 |
(self.encode(p.GetLabel()), str(p.GetValue())), |
elif isinstance(g, ClassGroupSingleton): |
327 |
lambda p: 'clpoint'], |
if layer.GetFieldType(field) == FIELDTYPE_STRING: |
328 |
[lambda p: 'clrange label="%s" range="%s"' % |
value = self.encode(g.GetValue()) |
329 |
(self.encode(p.GetLabel()), |
else: |
330 |
str(p.GetRange())), |
value = str(g.GetValue()) |
331 |
lambda p: 'clrange']] |
open_el = 'clpoint label="%s" value="%s"' \ |
332 |
|
% (self.encode(g.GetLabel()), value) |
333 |
def write_class_group(group): |
close_el = 'clpoint' |
334 |
type = -1 |
elif isinstance(g, ClassGroupRange): |
335 |
if isinstance(group, ClassGroupDefault): type = 0 |
open_el = 'clrange label="%s" range="%s"' \ |
336 |
elif isinstance(group, ClassGroupSingleton): type = 1 |
% (self.encode(g.GetLabel()), str(g.GetRange())) |
337 |
elif isinstance(group, ClassGroupRange): type = 2 |
close_el = 'clrange' |
338 |
elif isinstance(group, ClassGroupMap): type = 3 |
else: |
339 |
assert type >= 0 |
assert False, _("Unsupported group type in classification") |
340 |
|
continue |
341 |
if type <= 2: |
|
342 |
data = group.GetProperties() |
data = g.GetProperties() |
343 |
dict = {'stroke' : data.GetLineColor().hex(), |
dict = {'stroke' : data.GetLineColor().hex(), |
344 |
'stroke_width': str(data.GetLineWidth()), |
'stroke_width': str(data.GetLineWidth()), |
345 |
'fill' : data.GetFill().hex()} |
'fill' : data.GetFill().hex()} |
346 |
|
|
347 |
self.open_element(types[type][0](group)) |
self.open_element(open_el) |
348 |
self.write_element("cldata", dict) |
self.write_element("cldata", dict) |
349 |
self.close_element(types[type][1](group)) |
self.close_element(close_el) |
|
else: pass # XXX: we need to handle maps |
|
|
|
|
|
for i in lc: |
|
|
write_class_group(i) |
|
350 |
|
|
351 |
self.close_element("classification") |
self.close_element("classification") |
352 |
|
|