ensure the tests can be run even with no datasource plugins
This commit is contained in:
parent
988567e040
commit
247755df3f
13 changed files with 621 additions and 584 deletions
|
@ -62,6 +62,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
</Parameter>
|
</Parameter>
|
||||||
|
<Parameter name="driver">GeoJson</Parameter>
|
||||||
<Parameter name="layer_by_index">0</Parameter>
|
<Parameter name="layer_by_index">0</Parameter>
|
||||||
<Parameter name="type">ogr</Parameter>
|
<Parameter name="type">ogr</Parameter>
|
||||||
</Datasource>
|
</Datasource>
|
||||||
|
|
|
@ -12,59 +12,66 @@ def setup():
|
||||||
|
|
||||||
def test_field_listing():
|
def test_field_listing():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
fields = lyr.datasource.fields()
|
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||||
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
fields = lyr.datasource.fields()
|
||||||
|
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
||||||
|
|
||||||
def test_total_feature_count_shp():
|
def test_total_feature_count_shp():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||||
num_feats = len(features)
|
features = lyr.datasource.all_features()
|
||||||
eq_(num_feats, 10)
|
num_feats = len(features)
|
||||||
|
eq_(num_feats, 10)
|
||||||
|
|
||||||
def test_total_feature_count_json():
|
def test_total_feature_count_json():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Ogr(file='../data/json/points.json',layer_by_index=0)
|
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Ogr(file='../data/json/points.json',layer_by_index=0)
|
||||||
num_feats = len(features)
|
features = lyr.datasource.all_features()
|
||||||
eq_(num_feats, 5)
|
num_feats = len(features)
|
||||||
|
eq_(num_feats, 5)
|
||||||
|
|
||||||
def test_reading_json_from_string():
|
def test_reading_json_from_string():
|
||||||
json = open('../data/json/points.json','r').read()
|
json = open('../data/json/points.json','r').read()
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Ogr(file=json,layer_by_index=0)
|
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Ogr(file=json,layer_by_index=0)
|
||||||
num_feats = len(features)
|
features = lyr.datasource.all_features()
|
||||||
eq_(num_feats, 5)
|
num_feats = len(features)
|
||||||
|
eq_(num_feats, 5)
|
||||||
|
|
||||||
def test_feature_envelope():
|
def test_feature_envelope():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||||
for feat in features:
|
features = lyr.datasource.all_features()
|
||||||
env = feat.envelope()
|
for feat in features:
|
||||||
contains = lyr.envelope().contains(env)
|
env = feat.envelope()
|
||||||
eq_(contains, True)
|
contains = lyr.envelope().contains(env)
|
||||||
intersects = lyr.envelope().contains(env)
|
eq_(contains, True)
|
||||||
eq_(intersects, True)
|
intersects = lyr.envelope().contains(env)
|
||||||
|
eq_(intersects, True)
|
||||||
|
|
||||||
def test_feature_attributes():
|
def test_feature_attributes():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||||
feat = features[0]
|
features = lyr.datasource.all_features()
|
||||||
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
|
feat = features[0]
|
||||||
eq_(feat.attributes, attrs)
|
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
|
||||||
eq_(lyr.datasource.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
|
eq_(feat.attributes, attrs)
|
||||||
eq_(lyr.datasource.field_types(),['float','int','str'])
|
eq_(lyr.datasource.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
|
||||||
|
eq_(lyr.datasource.field_types(),['float','int','str'])
|
||||||
|
|
||||||
def test_ogr_layer_by_sql():
|
def test_ogr_layer_by_sql():
|
||||||
lyr = mapnik2.Layer('test')
|
lyr = mapnik2.Layer('test')
|
||||||
lyr.datasource = mapnik2.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
features = lyr.datasource.all_features()
|
lyr.datasource = mapnik2.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
||||||
num_feats = len(features)
|
features = lyr.datasource.all_features()
|
||||||
eq_(num_feats, 1)
|
num_feats = len(features)
|
||||||
|
eq_(num_feats, 1)
|
||||||
|
|
||||||
def test_hit_grid():
|
def test_hit_grid():
|
||||||
import os
|
import os
|
||||||
|
@ -75,22 +82,28 @@ def test_hit_grid():
|
||||||
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
||||||
|
|
||||||
m = mapnik2.Map(256,256);
|
m = mapnik2.Map(256,256);
|
||||||
mapnik2.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
try:
|
||||||
m.zoom_all()
|
mapnik2.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
||||||
join_field = 'NAME'
|
m.zoom_all()
|
||||||
fg = [] # feature grid
|
join_field = 'NAME'
|
||||||
for y in range(0, 256, 4):
|
fg = [] # feature grid
|
||||||
for x in range(0, 256, 4):
|
for y in range(0, 256, 4):
|
||||||
featureset = m.query_map_point(0,x,y)
|
for x in range(0, 256, 4):
|
||||||
added = False
|
featureset = m.query_map_point(0,x,y)
|
||||||
for feature in featureset.features:
|
added = False
|
||||||
fg.append(feature[join_field])
|
for feature in featureset.features:
|
||||||
added = True
|
fg.append(feature[join_field])
|
||||||
if not added:
|
added = True
|
||||||
fg.append('')
|
if not added:
|
||||||
hit_list = '|'.join(rle_encode(fg))
|
fg.append('')
|
||||||
eq_(hit_list[:16],'730:|2:Greenland')
|
hit_list = '|'.join(rle_encode(fg))
|
||||||
eq_(hit_list[-12:],'1:Chile|812:')
|
eq_(hit_list[:16],'730:|2:Greenland')
|
||||||
|
eq_(hit_list[-12:],'1:Chile|812:')
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(str(e))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -12,27 +12,29 @@ def setup():
|
||||||
os.chdir(execution_path('.'))
|
os.chdir(execution_path('.'))
|
||||||
|
|
||||||
def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
|
def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
|
||||||
ds1 = mapnik2.Ogr(file=shapefile,layer_by_index=0)
|
plugins = mapnik2.DatasourceCache.instance().plugin_names()
|
||||||
ds2 = mapnik2.Shapefile(file=shapefile)
|
if 'shape' in plugins and 'ogr' in plugins:
|
||||||
if query:
|
ds1 = mapnik2.Ogr(file=shapefile,layer_by_index=0)
|
||||||
fs1 = ds1.features(query)
|
ds2 = mapnik2.Shapefile(file=shapefile)
|
||||||
fs2 = ds2.features(query)
|
if query:
|
||||||
else:
|
fs1 = ds1.features(query)
|
||||||
fs1 = ds1.featureset()
|
fs2 = ds2.features(query)
|
||||||
fs2 = ds2.featureset()
|
else:
|
||||||
count = 0;
|
fs1 = ds1.featureset()
|
||||||
while(True):
|
fs2 = ds2.featureset()
|
||||||
count += 1
|
count = 0;
|
||||||
feat1 = fs1.next()
|
while(True):
|
||||||
feat2 = fs2.next()
|
count += 1
|
||||||
if not feat1:
|
feat1 = fs1.next()
|
||||||
break
|
feat2 = fs2.next()
|
||||||
#import pdb;pdb.set_trace()
|
if not feat1:
|
||||||
eq_(feat1.id(),feat2.id(),
|
break
|
||||||
'%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"'
|
#import pdb;pdb.set_trace()
|
||||||
% (count,feat1.id(),str(feat1.attributes), feat2.id(),str(feat2.attributes)) )
|
eq_(feat1.id(),feat2.id(),
|
||||||
|
'%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"'
|
||||||
return True
|
% (count,feat1.id(),str(feat1.attributes), feat2.id(),str(feat2.attributes)) )
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,10 +47,11 @@ def test_shapefile_polygon_featureset_id():
|
||||||
def test_shapefile_polygon_feature_query_id():
|
def test_shapefile_polygon_feature_query_id():
|
||||||
bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
|
bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
|
||||||
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
||||||
ds = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
for fld in ds.fields():
|
ds = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
||||||
query.add_property_name(fld)
|
for fld in ds.fields():
|
||||||
compare_shape_between_mapnik_and_ogr('../data/shp/world_merc.shp',query)
|
query.add_property_name(fld)
|
||||||
|
compare_shape_between_mapnik_and_ogr('../data/shp/world_merc.shp',query)
|
||||||
|
|
||||||
def test_feature_hit_count():
|
def test_feature_hit_count():
|
||||||
raise Todo("need to optimize multigeom bbox handling in shapeindex")
|
raise Todo("need to optimize multigeom bbox handling in shapeindex")
|
||||||
|
@ -56,13 +59,14 @@ def test_feature_hit_count():
|
||||||
#bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
|
#bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
|
||||||
bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
|
bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
|
||||||
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
||||||
ds1 = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
for fld in ds1.fields():
|
ds1 = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
||||||
query.add_property_name(fld)
|
for fld in ds1.fields():
|
||||||
ds2 = mapnik2.Shapefile(file='../data/shp/world_merc.shp')
|
query.add_property_name(fld)
|
||||||
count1 = len(ds1.features(query).features)
|
ds2 = mapnik2.Shapefile(file='../data/shp/world_merc.shp')
|
||||||
count2 = len(ds2.features(query).features)
|
count1 = len(ds1.features(query).features)
|
||||||
eq_(count1,count2,"Feature count differs between OGR driver (%s features) and Shapefile Driver (%s features) when querying the same bbox" % (count1,count2))
|
count2 = len(ds2.features(query).features)
|
||||||
|
eq_(count1,count2,"Feature count differs between OGR driver (%s features) and Shapefile Driver (%s features) when querying the same bbox" % (count1,count2))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -99,7 +99,7 @@ def test_feature_expression_evaluation_attr_with_spaces():
|
||||||
f['name with space'] = u'a'
|
f['name with space'] = u'a'
|
||||||
eq_(f['name with space'],u'a')
|
eq_(f['name with space'],u'a')
|
||||||
expr = mapnik2.Expression("[name with space]='a'")
|
expr = mapnik2.Expression("[name with space]='a'")
|
||||||
eq_(str(expr),"[name with space]='a'")
|
eq_(str(expr),"([name with space]='a')")
|
||||||
eq_(expr.evaluate(f),True)
|
eq_(expr.evaluate(f),True)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -16,10 +16,11 @@ def test_renders_with_agg():
|
||||||
sym.color = mapnik2.Expression("'#ff0000'")
|
sym.color = mapnik2.Expression("'#ff0000'")
|
||||||
|
|
||||||
_map = create_map_and_append_symbolyzer(sym)
|
_map = create_map_and_append_symbolyzer(sym)
|
||||||
im = mapnik2.Image(_map.width,_map.height)
|
if _map:
|
||||||
mapnik2.render(_map, im)
|
im = mapnik2.Image(_map.width,_map.height)
|
||||||
save_data('agg_glyph_symbolizer.png', im.tostring('png'))
|
mapnik2.render(_map, im)
|
||||||
assert contains_word('\xff\x00\x00\xff', im.tostring())
|
save_data('agg_glyph_symbolizer.png', im.tostring('png'))
|
||||||
|
assert contains_word('\xff\x00\x00\xff', im.tostring())
|
||||||
|
|
||||||
def test_renders_with_cairo():
|
def test_renders_with_cairo():
|
||||||
if not mapnik2.has_pycairo():
|
if not mapnik2.has_pycairo():
|
||||||
|
@ -31,31 +32,36 @@ def test_renders_with_cairo():
|
||||||
sym.size = mapnik2.Expression("[value]")
|
sym.size = mapnik2.Expression("[value]")
|
||||||
sym.color = mapnik2.Expression("'#ff0000'")
|
sym.color = mapnik2.Expression("'#ff0000'")
|
||||||
_map = create_map_and_append_symbolyzer(sym)
|
_map = create_map_and_append_symbolyzer(sym)
|
||||||
|
if _map:
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
import cairo
|
import cairo
|
||||||
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 256, 256)
|
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 256, 256)
|
||||||
mapnik2.render(_map, surface)
|
mapnik2.render(_map, surface)
|
||||||
im = mapnik2.Image.from_cairo(surface)
|
im = mapnik2.Image.from_cairo(surface)
|
||||||
save_data('cairo_glyph_symbolizer.png', im.tostring('png'))
|
save_data('cairo_glyph_symbolizer.png', im.tostring('png'))
|
||||||
assert contains_word('\xff\x00\x00\xff', im.tostring())
|
assert contains_word('\xff\x00\x00\xff', im.tostring())
|
||||||
|
|
||||||
def test_load_save_load_map():
|
def test_load_save_load_map():
|
||||||
map = mapnik2.Map(256,256)
|
map = mapnik2.Map(256,256)
|
||||||
in_map = "../data/good_maps/glyph_symbolizer.xml"
|
in_map = "../data/good_maps/glyph_symbolizer.xml"
|
||||||
mapnik2.load_map(map, in_map)
|
try:
|
||||||
style = map.find_style('arrows')
|
mapnik2.load_map(map, in_map)
|
||||||
sym = style.rules[0].symbols[0]
|
style = map.find_style('arrows')
|
||||||
assert isinstance(sym, mapnik2.GlyphSymbolizer)
|
sym = style.rules[0].symbols[0]
|
||||||
assert sym.angle_mode == mapnik2.angle_mode.AZIMUTH
|
assert isinstance(sym, mapnik2.GlyphSymbolizer)
|
||||||
|
assert sym.angle_mode == mapnik2.angle_mode.AZIMUTH
|
||||||
out_map = mapnik2.save_map_to_string(map).decode('utf8')
|
|
||||||
map = mapnik2.Map(256,256)
|
out_map = mapnik2.save_map_to_string(map).decode('utf8')
|
||||||
mapnik2.load_map_from_string(map, out_map.encode('utf8'))
|
map = mapnik2.Map(256,256)
|
||||||
assert 'GlyphSymbolizer' in out_map
|
mapnik2.load_map_from_string(map, out_map.encode('utf8'))
|
||||||
# make sure non-ascii characters are well supported since most interesting
|
assert 'GlyphSymbolizer' in out_map
|
||||||
# glyphs for symbology are usually in that range
|
# make sure non-ascii characters are well supported since most interesting
|
||||||
assert u'í' in out_map, out_map
|
# glyphs for symbology are usually in that range
|
||||||
|
assert u'í' in out_map, out_map
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Utilities and setup code
|
# Utilities and setup code
|
||||||
|
@ -69,29 +75,34 @@ def setup():
|
||||||
def create_map_and_append_symbolyzer(sym):
|
def create_map_and_append_symbolyzer(sym):
|
||||||
srs = '+init=epsg:32630'
|
srs = '+init=epsg:32630'
|
||||||
lyr = mapnik2.Layer('arrows')
|
lyr = mapnik2.Layer('arrows')
|
||||||
lyr.datasource = mapnik2.Shapefile(
|
try:
|
||||||
file = '../data/shp/arrows.shp',
|
lyr.datasource = mapnik2.Shapefile(
|
||||||
)
|
file = '../data/shp/arrows.shp',
|
||||||
lyr.srs = srs
|
)
|
||||||
_map = mapnik2.Map(256,256, srs)
|
lyr.srs = srs
|
||||||
style = mapnik2.Style()
|
_map = mapnik2.Map(256,256, srs)
|
||||||
rule = mapnik2.Rule()
|
style = mapnik2.Style()
|
||||||
rule.symbols.append(sym)
|
rule = mapnik2.Rule()
|
||||||
|
rule.symbols.append(sym)
|
||||||
# put a test symbolizer to see what is the azimuth being read
|
|
||||||
ts = mapnik2.TextSymbolizer(mapnik2.Expression('[azimuth]'),
|
# put a test symbolizer to see what is the azimuth being read
|
||||||
"DejaVu Sans Book",
|
ts = mapnik2.TextSymbolizer(mapnik2.Expression('[azimuth]'),
|
||||||
10,
|
"DejaVu Sans Book",
|
||||||
mapnik2.Color("black"))
|
10,
|
||||||
ts.allow_overlap = True
|
mapnik2.Color("black"))
|
||||||
rule.symbols.append(ts)
|
ts.allow_overlap = True
|
||||||
|
rule.symbols.append(ts)
|
||||||
style.rules.append(rule)
|
|
||||||
_map.append_style('foo', style)
|
style.rules.append(rule)
|
||||||
lyr.styles.append('foo')
|
_map.append_style('foo', style)
|
||||||
_map.layers.append(lyr)
|
lyr.styles.append('foo')
|
||||||
_map.zoom_to_box(mapnik2.Box2d(0,0,8,8))
|
_map.layers.append(lyr)
|
||||||
return _map
|
_map.zoom_to_box(mapnik2.Box2d(0,0,8,8))
|
||||||
|
return _map
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -32,40 +32,45 @@ def test_adding_datasource_to_layer():
|
||||||
'''
|
'''
|
||||||
m = mapnik2.Map(256, 256)
|
m = mapnik2.Map(256, 256)
|
||||||
|
|
||||||
mapnik2.load_map_from_string(m, map_string)
|
try:
|
||||||
|
mapnik2.load_map_from_string(m, map_string)
|
||||||
|
|
||||||
|
# validate it loaded fine
|
||||||
|
eq_(m.layers[0].styles[0],'world_borders_style')
|
||||||
|
eq_(m.layers[0].styles[1],'point_style')
|
||||||
|
eq_(len(m.layers),1)
|
||||||
|
|
||||||
|
# also assign a variable reference to that layer
|
||||||
|
# below we will test that this variable references
|
||||||
|
# the same object that is attached to the map
|
||||||
|
lyr = m.layers[0]
|
||||||
|
|
||||||
# validate it loaded fine
|
# ensure that there was no datasource for the layer...
|
||||||
eq_(m.layers[0].styles[0],'world_borders_style')
|
eq_(m.layers[0].datasource,None)
|
||||||
eq_(m.layers[0].styles[1],'point_style')
|
eq_(lyr.datasource,None)
|
||||||
eq_(len(m.layers),1)
|
|
||||||
|
# also note that since the srs was black it defaulted to wgs84
|
||||||
# also assign a variable reference to that layer
|
eq_(m.layers[0].srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||||
# below we will test that this variable references
|
eq_(lyr.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||||
# the same object that is attached to the map
|
|
||||||
lyr = m.layers[0]
|
# now add a datasource one...
|
||||||
|
ds = mapnik2.Shapefile(file='../data/shp/world_merc.shp')
|
||||||
# ensure that there was no datasource for the layer...
|
m.layers[0].datasource = ds
|
||||||
eq_(m.layers[0].datasource,None)
|
|
||||||
eq_(lyr.datasource,None)
|
# now ensure it is attached
|
||||||
|
eq_(m.layers[0].datasource.name(),"shape")
|
||||||
# also note that since the srs was black it defaulted to wgs84
|
eq_(lyr.datasource.name(),"shape")
|
||||||
eq_(m.layers[0].srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
|
||||||
eq_(lyr.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
# and since we have now added a shapefile in spherical mercator, adjust the projection
|
||||||
|
lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
|
||||||
# now add a datasource one...
|
|
||||||
ds = mapnik2.Shapefile(file='../data/shp/world_merc.shp')
|
# test that assignment
|
||||||
m.layers[0].datasource = ds
|
eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||||
|
eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||||
# now ensure it is attached
|
except RuntimeError, e:
|
||||||
eq_(m.layers[0].datasource.name(),"shape")
|
# only test datasources that we have installed
|
||||||
eq_(lyr.datasource.name(),"shape")
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
# and since we have now added a shapefile in spherical mercator, adjust the projection
|
|
||||||
lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
|
|
||||||
|
|
||||||
# test that assignment
|
|
||||||
eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
|
||||||
eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -15,13 +15,18 @@ def setup():
|
||||||
def assert_loads_successfully(file):
|
def assert_loads_successfully(file):
|
||||||
m = mapnik2.Map(512, 512)
|
m = mapnik2.Map(512, 512)
|
||||||
|
|
||||||
strict = True
|
try:
|
||||||
mapnik2.load_map(m, file, strict)
|
strict = True
|
||||||
|
mapnik2.load_map(m, file, strict)
|
||||||
# libxml2 is not smart about paths, and clips the last directory off
|
|
||||||
# of a path if it does not end in a trailing slash
|
# libxml2 is not smart about paths, and clips the last directory off
|
||||||
base_path = os.path.dirname(file) + '/'
|
# of a path if it does not end in a trailing slash
|
||||||
mapnik2.load_map_from_string(m,open(file,'rb').read(),strict,base_path)
|
base_path = os.path.dirname(file) + '/'
|
||||||
|
mapnik2.load_map_from_string(m,open(file,'rb').read(),strict,base_path)
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
|
|
||||||
|
|
||||||
# We expect these files to raise a RuntimeError
|
# We expect these files to raise a RuntimeError
|
||||||
|
|
|
@ -13,57 +13,58 @@ def setup():
|
||||||
def test_multi_tile_policy():
|
def test_multi_tile_policy():
|
||||||
srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||||
lyr = mapnik2.Layer('raster')
|
lyr = mapnik2.Layer('raster')
|
||||||
lyr.datasource = mapnik2.Raster(
|
if 'raster' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
file = '../data/raster_tiles/${x}/${y}.tif',
|
lyr.datasource = mapnik2.Raster(
|
||||||
lox = -180,
|
file = '../data/raster_tiles/${x}/${y}.tif',
|
||||||
loy = -90,
|
lox = -180,
|
||||||
hix = 180,
|
loy = -90,
|
||||||
hiy = 90,
|
hix = 180,
|
||||||
multi = 1,
|
hiy = 90,
|
||||||
tile_size = 256,
|
multi = 1,
|
||||||
x_width = 2,
|
tile_size = 256,
|
||||||
y_width = 2
|
x_width = 2,
|
||||||
)
|
y_width = 2
|
||||||
lyr.srs = srs
|
)
|
||||||
_map = mapnik2.Map(256, 256, srs)
|
lyr.srs = srs
|
||||||
style = mapnik2.Style()
|
_map = mapnik2.Map(256, 256, srs)
|
||||||
rule = mapnik2.Rule()
|
style = mapnik2.Style()
|
||||||
sym = mapnik2.RasterSymbolizer()
|
rule = mapnik2.Rule()
|
||||||
rule.symbols.append(sym)
|
sym = mapnik2.RasterSymbolizer()
|
||||||
style.rules.append(rule)
|
rule.symbols.append(sym)
|
||||||
_map.append_style('foo', style)
|
style.rules.append(rule)
|
||||||
lyr.styles.append('foo')
|
_map.append_style('foo', style)
|
||||||
_map.layers.append(lyr)
|
lyr.styles.append('foo')
|
||||||
_map.zoom_to_box(lyr.envelope())
|
_map.layers.append(lyr)
|
||||||
|
_map.zoom_to_box(lyr.envelope())
|
||||||
|
|
||||||
|
im = mapnik2.Image(_map.width, _map.height)
|
||||||
|
mapnik2.render(_map, im)
|
||||||
|
|
||||||
|
save_data('test_multi_tile_policy.png', im.tostring('png'))
|
||||||
|
|
||||||
im = mapnik2.Image(_map.width, _map.height)
|
# test green chunk
|
||||||
mapnik2.render(_map, im)
|
assert im.view(0,64,1,1).tostring() == '\x00\xff\x00\xff'
|
||||||
|
assert im.view(127,64,1,1).tostring() == '\x00\xff\x00\xff'
|
||||||
|
assert im.view(0,127,1,1).tostring() == '\x00\xff\x00\xff'
|
||||||
|
assert im.view(127,127,1,1).tostring() == '\x00\xff\x00\xff'
|
||||||
|
|
||||||
save_data('test_multi_tile_policy.png', im.tostring('png'))
|
# test blue chunk
|
||||||
|
assert im.view(128,64,1,1).tostring() == '\x00\x00\xff\xff'
|
||||||
# test green chunk
|
assert im.view(255,64,1,1).tostring() == '\x00\x00\xff\xff'
|
||||||
assert im.view(0,64,1,1).tostring() == '\x00\xff\x00\xff'
|
assert im.view(128,127,1,1).tostring() == '\x00\x00\xff\xff'
|
||||||
assert im.view(127,64,1,1).tostring() == '\x00\xff\x00\xff'
|
assert im.view(255,127,1,1).tostring() == '\x00\x00\xff\xff'
|
||||||
assert im.view(0,127,1,1).tostring() == '\x00\xff\x00\xff'
|
|
||||||
assert im.view(127,127,1,1).tostring() == '\x00\xff\x00\xff'
|
# test red chunk
|
||||||
|
assert im.view(0,128,1,1).tostring() == '\xff\x00\x00\xff'
|
||||||
# test blue chunk
|
assert im.view(127,128,1,1).tostring() == '\xff\x00\x00\xff'
|
||||||
assert im.view(128,64,1,1).tostring() == '\x00\x00\xff\xff'
|
assert im.view(0,191,1,1).tostring() == '\xff\x00\x00\xff'
|
||||||
assert im.view(255,64,1,1).tostring() == '\x00\x00\xff\xff'
|
assert im.view(127,191,1,1).tostring() == '\xff\x00\x00\xff'
|
||||||
assert im.view(128,127,1,1).tostring() == '\x00\x00\xff\xff'
|
|
||||||
assert im.view(255,127,1,1).tostring() == '\x00\x00\xff\xff'
|
# test magenta chunk
|
||||||
|
assert im.view(128,128,1,1).tostring() == '\xff\x00\xff\xff'
|
||||||
# test red chunk
|
assert im.view(255,128,1,1).tostring() == '\xff\x00\xff\xff'
|
||||||
assert im.view(0,128,1,1).tostring() == '\xff\x00\x00\xff'
|
assert im.view(128,191,1,1).tostring() == '\xff\x00\xff\xff'
|
||||||
assert im.view(127,128,1,1).tostring() == '\xff\x00\x00\xff'
|
assert im.view(255,191,1,1).tostring() == '\xff\x00\xff\xff'
|
||||||
assert im.view(0,191,1,1).tostring() == '\xff\x00\x00\xff'
|
|
||||||
assert im.view(127,191,1,1).tostring() == '\xff\x00\x00\xff'
|
|
||||||
|
|
||||||
# test magenta chunk
|
|
||||||
assert im.view(128,128,1,1).tostring() == '\xff\x00\xff\xff'
|
|
||||||
assert im.view(255,128,1,1).tostring() == '\xff\x00\xff\xff'
|
|
||||||
assert im.view(128,191,1,1).tostring() == '\xff\x00\xff\xff'
|
|
||||||
assert im.view(255,191,1,1).tostring() == '\xff\x00\xff\xff'
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -247,35 +247,6 @@ def test_linesymbolizer_pickle():
|
||||||
eq_(s.line_cap, s2.line_cap)
|
eq_(s.line_cap, s2.line_cap)
|
||||||
eq_(s.line_join, s2.line_join)
|
eq_(s.line_join, s2.line_join)
|
||||||
|
|
||||||
# Shapefile initialization
|
|
||||||
def test_shapefile_init():
|
|
||||||
s = mapnik2.Shapefile(file='../../demo/data/boundaries')
|
|
||||||
|
|
||||||
e = s.envelope()
|
|
||||||
|
|
||||||
assert_almost_equal(e.minx, -11121.6896651, places=7)
|
|
||||||
assert_almost_equal(e.miny, -724724.216526, places=6)
|
|
||||||
assert_almost_equal(e.maxx, 2463000.67866, places=5)
|
|
||||||
assert_almost_equal(e.maxy, 1649661.267, places=3)
|
|
||||||
|
|
||||||
# Shapefile properties
|
|
||||||
def test_shapefile_properties():
|
|
||||||
s = mapnik2.Shapefile(file='../../demo/data/boundaries', encoding='latin1')
|
|
||||||
f = s.features_at_point(s.envelope().center()).features[0]
|
|
||||||
|
|
||||||
eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
|
|
||||||
eq_(f['COUNTRY'], u'CAN')
|
|
||||||
eq_(f['F_CODE'], u'FA001')
|
|
||||||
eq_(f['NAME_EN'], u'Quebec')
|
|
||||||
# this seems to break if icu data linking is not working
|
|
||||||
eq_(f['NOM_FR'], u'Qu\xe9bec')
|
|
||||||
eq_(f['NOM_FR'], u'Québec')
|
|
||||||
eq_(f['Shape_Area'], 1512185733150.0)
|
|
||||||
eq_(f['Shape_Leng'], 19218883.724300001)
|
|
||||||
|
|
||||||
# Check that the deprecated interface still works,
|
|
||||||
# remove me once the deprecated code is cleaned up
|
|
||||||
eq_(f.properties['Shape_Leng'], 19218883.724300001)
|
|
||||||
|
|
||||||
# TextSymbolizer initialization
|
# TextSymbolizer initialization
|
||||||
def test_textsymbolizer_init():
|
def test_textsymbolizer_init():
|
||||||
|
@ -389,19 +360,24 @@ def test_map_init_from_string():
|
||||||
|
|
||||||
m = mapnik2.Map(600, 300)
|
m = mapnik2.Map(600, 300)
|
||||||
eq_(m.base, '')
|
eq_(m.base, '')
|
||||||
mapnik2.load_map_from_string(m, map_string)
|
|
||||||
eq_(m.base, './')
|
|
||||||
mapnik2.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
|
||||||
eq_(m.base, './')
|
|
||||||
try:
|
try:
|
||||||
mapnik2.load_map_from_string(m, map_string, False, "/tmp")
|
mapnik2.load_map_from_string(m, map_string)
|
||||||
except RuntimeError:
|
eq_(m.base, './')
|
||||||
pass # runtime error expected because shapefile path should be wrong and datasource will throw
|
mapnik2.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
||||||
eq_(m.base, '/tmp') # /tmp will be set despite the exception because load_map mostly worked
|
eq_(m.base, './')
|
||||||
m.base = 'foo'
|
try:
|
||||||
mapnik2.load_map_from_string(m, map_string, True, ".")
|
mapnik2.load_map_from_string(m, map_string, False, "/tmp")
|
||||||
eq_(m.base, '.')
|
except RuntimeError:
|
||||||
raise(Todo("Need to write more map property tests in 'object_test.py'..."))
|
pass # runtime error expected because shapefile path should be wrong and datasource will throw
|
||||||
|
eq_(m.base, '/tmp') # /tmp will be set despite the exception because load_map mostly worked
|
||||||
|
m.base = 'foo'
|
||||||
|
mapnik2.load_map_from_string(m, map_string, True, ".")
|
||||||
|
eq_(m.base, '.')
|
||||||
|
raise(Todo("Need to write more map property tests in 'object_test.py'..."))
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
|
|
||||||
# Map pickling
|
# Map pickling
|
||||||
def test_map_pickle():
|
def test_map_pickle():
|
||||||
|
|
|
@ -15,10 +15,15 @@ def test_gen_map():
|
||||||
outputfile = 'raster_colorizer_test.png'
|
outputfile = 'raster_colorizer_test.png'
|
||||||
|
|
||||||
m = mapnik2.Map(800, 600)
|
m = mapnik2.Map(800, 600)
|
||||||
mapnik2.load_map(m, mapxmlfile)
|
try:
|
||||||
mapnik2.save_map(m, mapxmloutputfile)
|
mapnik2.load_map(m, mapxmlfile)
|
||||||
m.zoom_all()
|
mapnik2.save_map(m, mapxmloutputfile)
|
||||||
mapnik2.render_to_file(m, outputfile)
|
m.zoom_all()
|
||||||
|
mapnik2.render_to_file(m, outputfile)
|
||||||
|
except RuntimeError,e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(str(e))
|
||||||
|
|
||||||
#test discrete colorizer mode
|
#test discrete colorizer mode
|
||||||
def test_get_color_discrete():
|
def test_get_color_discrete():
|
||||||
|
|
|
@ -14,86 +14,93 @@ def setup():
|
||||||
def test_dataraster_coloring():
|
def test_dataraster_coloring():
|
||||||
srs = '+init=epsg:32630'
|
srs = '+init=epsg:32630'
|
||||||
lyr = mapnik2.Layer('dataraster')
|
lyr = mapnik2.Layer('dataraster')
|
||||||
lyr.datasource = mapnik2.Gdal(
|
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
file = '../data/raster/dataraster.tif',
|
lyr.datasource = mapnik2.Gdal(
|
||||||
band = 1,
|
file = '../data/raster/dataraster.tif',
|
||||||
)
|
band = 1,
|
||||||
lyr.srs = srs
|
)
|
||||||
_map = mapnik2.Map(256,256, srs)
|
lyr.srs = srs
|
||||||
style = mapnik2.Style()
|
_map = mapnik2.Map(256,256, srs)
|
||||||
rule = mapnik2.Rule()
|
style = mapnik2.Style()
|
||||||
sym = mapnik2.RasterSymbolizer()
|
rule = mapnik2.Rule()
|
||||||
# Assigning a colorizer to the RasterSymbolizer tells the later
|
sym = mapnik2.RasterSymbolizer()
|
||||||
# that it should use it to colorize the raw data raster
|
# Assigning a colorizer to the RasterSymbolizer tells the later
|
||||||
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color("transparent"))
|
# that it should use it to colorize the raw data raster
|
||||||
|
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color("transparent"))
|
||||||
|
|
||||||
|
for value, color in [
|
||||||
|
( 0, "#0044cc"),
|
||||||
|
( 10, "#00cc00"),
|
||||||
|
( 20, "#ffff00"),
|
||||||
|
( 30, "#ff7f00"),
|
||||||
|
( 40, "#ff0000"),
|
||||||
|
( 50, "#ff007f"),
|
||||||
|
( 60, "#ff00ff"),
|
||||||
|
( 70, "#cc00cc"),
|
||||||
|
( 80, "#990099"),
|
||||||
|
( 90, "#660066"),
|
||||||
|
( 200, "transparent"),
|
||||||
|
]:
|
||||||
|
sym.colorizer.add_stop(value, mapnik2.Color(color))
|
||||||
|
rule.symbols.append(sym)
|
||||||
|
style.rules.append(rule)
|
||||||
|
_map.append_style('foo', style)
|
||||||
|
lyr.styles.append('foo')
|
||||||
|
_map.layers.append(lyr)
|
||||||
|
_map.zoom_to_box(lyr.envelope())
|
||||||
|
|
||||||
for value, color in [
|
im = mapnik2.Image(_map.width,_map.height)
|
||||||
( 0, "#0044cc"),
|
mapnik2.render(_map, im)
|
||||||
( 10, "#00cc00"),
|
# save a png somewhere so we can see it
|
||||||
( 20, "#ffff00"),
|
save_data('test_dataraster_coloring.png', im.tostring('png'))
|
||||||
( 30, "#ff7f00"),
|
imdata = im.tostring()
|
||||||
( 40, "#ff0000"),
|
# we have some values in the [20,30) interval so check that they're colored
|
||||||
( 50, "#ff007f"),
|
assert contains_word('\xff\xff\x00\xff', imdata)
|
||||||
( 60, "#ff00ff"),
|
|
||||||
( 70, "#cc00cc"),
|
|
||||||
( 80, "#990099"),
|
|
||||||
( 90, "#660066"),
|
|
||||||
( 200, "transparent"),
|
|
||||||
]:
|
|
||||||
sym.colorizer.add_stop(value, mapnik2.Color(color))
|
|
||||||
rule.symbols.append(sym)
|
|
||||||
style.rules.append(rule)
|
|
||||||
_map.append_style('foo', style)
|
|
||||||
lyr.styles.append('foo')
|
|
||||||
_map.layers.append(lyr)
|
|
||||||
_map.zoom_to_box(lyr.envelope())
|
|
||||||
|
|
||||||
im = mapnik2.Image(_map.width,_map.height)
|
|
||||||
mapnik2.render(_map, im)
|
|
||||||
# save a png somewhere so we can see it
|
|
||||||
save_data('test_dataraster_coloring.png', im.tostring('png'))
|
|
||||||
imdata = im.tostring()
|
|
||||||
# we have some values in the [20,30) interval so check that they're colored
|
|
||||||
assert contains_word('\xff\xff\x00\xff', imdata)
|
|
||||||
|
|
||||||
def test_dataraster_query_point():
|
def test_dataraster_query_point():
|
||||||
srs = '+init=epsg:32630'
|
srs = '+init=epsg:32630'
|
||||||
lyr = mapnik2.Layer('dataraster')
|
lyr = mapnik2.Layer('dataraster')
|
||||||
lyr.datasource = mapnik2.Gdal(
|
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
file = '../data/raster/dataraster.tif',
|
lyr.datasource = mapnik2.Gdal(
|
||||||
band = 1,
|
file = '../data/raster/dataraster.tif',
|
||||||
)
|
band = 1,
|
||||||
lyr.srs = srs
|
)
|
||||||
_map = mapnik2.Map(256,256, srs)
|
lyr.srs = srs
|
||||||
_map.layers.append(lyr)
|
_map = mapnik2.Map(256,256, srs)
|
||||||
|
_map.layers.append(lyr)
|
||||||
# point inside raster extent with valid data
|
|
||||||
x, y = 427417, 4477517
|
# point inside raster extent with valid data
|
||||||
features = _map.query_point(0,x,y).features
|
x, y = 427417, 4477517
|
||||||
assert len(features) == 1
|
features = _map.query_point(0,x,y).features
|
||||||
feat = features[0]
|
assert len(features) == 1
|
||||||
center = feat.envelope().center()
|
feat = features[0]
|
||||||
assert center.x==x and center.y==y, center
|
center = feat.envelope().center()
|
||||||
value = feat['value']
|
assert center.x==x and center.y==y, center
|
||||||
assert value == 21.0, value
|
value = feat['value']
|
||||||
|
assert value == 21.0, value
|
||||||
# point outside raster extent
|
|
||||||
features = _map.query_point(0,-427417,4477517).features
|
# point outside raster extent
|
||||||
assert len(features) == 0
|
features = _map.query_point(0,-427417,4477517).features
|
||||||
|
assert len(features) == 0
|
||||||
# point inside raster extent with nodata
|
|
||||||
features = _map.query_point(0,126850,4596050).features
|
# point inside raster extent with nodata
|
||||||
assert len(features) == 0
|
features = _map.query_point(0,126850,4596050).features
|
||||||
|
assert len(features) == 0
|
||||||
|
|
||||||
def test_load_save_map():
|
def test_load_save_map():
|
||||||
map = mapnik2.Map(256,256)
|
map = mapnik2.Map(256,256)
|
||||||
in_map = "../data/good_maps/raster_symbolizer.xml"
|
in_map = "../data/good_maps/raster_symbolizer.xml"
|
||||||
mapnik2.load_map(map, in_map)
|
try:
|
||||||
|
mapnik2.load_map(map, in_map)
|
||||||
out_map = mapnik2.save_map_to_string(map)
|
|
||||||
assert 'RasterSymbolizer' in out_map
|
out_map = mapnik2.save_map_to_string(map)
|
||||||
assert 'RasterColorizer' in out_map
|
assert 'RasterSymbolizer' in out_map
|
||||||
assert 'stop' in out_map
|
assert 'RasterColorizer' in out_map
|
||||||
|
assert 'stop' in out_map
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(str(e))
|
||||||
|
|
||||||
def test_raster_with_alpha_blends_correctly_with_background():
|
def test_raster_with_alpha_blends_correctly_with_background():
|
||||||
WIDTH = 500
|
WIDTH = 500
|
||||||
|
@ -117,77 +124,80 @@ def test_raster_with_alpha_blends_correctly_with_background():
|
||||||
|
|
||||||
map_layer = mapnik2.Layer('test_layer')
|
map_layer = mapnik2.Layer('test_layer')
|
||||||
filepath = '../data/raster/white-alpha.png'
|
filepath = '../data/raster/white-alpha.png'
|
||||||
map_layer.datasource = mapnik2.Gdal(file=filepath)
|
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
map_layer.styles.append('raster_style')
|
map_layer.datasource = mapnik2.Gdal(file=filepath)
|
||||||
map.layers.append(map_layer)
|
map_layer.styles.append('raster_style')
|
||||||
|
map.layers.append(map_layer)
|
||||||
map.zoom_all()
|
|
||||||
|
map.zoom_all()
|
||||||
mim = mapnik2.Image(WIDTH, HEIGHT)
|
|
||||||
|
mim = mapnik2.Image(WIDTH, HEIGHT)
|
||||||
mapnik2.render(map, mim)
|
|
||||||
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
|
mapnik2.render(map, mim)
|
||||||
mim.tostring('png'))
|
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
|
||||||
imdata = mim.tostring()
|
mim.tostring('png'))
|
||||||
# All white is expected
|
imdata = mim.tostring()
|
||||||
assert contains_word('\xff\xff\xff\xff', imdata)
|
# All white is expected
|
||||||
|
assert contains_word('\xff\xff\xff\xff', imdata)
|
||||||
|
|
||||||
def test_raster_warping():
|
def test_raster_warping():
|
||||||
lyrSrs = "+init=epsg:32630"
|
lyrSrs = "+init=epsg:32630"
|
||||||
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||||
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
||||||
lyr.datasource = mapnik2.Gdal(
|
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
file = '../data/raster/dataraster.tif',
|
lyr.datasource = mapnik2.Gdal(
|
||||||
band = 1,
|
file = '../data/raster/dataraster.tif',
|
||||||
)
|
band = 1,
|
||||||
sym = mapnik2.RasterSymbolizer()
|
)
|
||||||
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color(255,255,0))
|
sym = mapnik2.RasterSymbolizer()
|
||||||
rule = mapnik2.Rule()
|
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color(255,255,0))
|
||||||
rule.symbols.append(sym)
|
rule = mapnik2.Rule()
|
||||||
style = mapnik2.Style()
|
rule.symbols.append(sym)
|
||||||
style.rules.append(rule)
|
style = mapnik2.Style()
|
||||||
_map = mapnik2.Map(256,256, mapSrs)
|
style.rules.append(rule)
|
||||||
_map.append_style('foo', style)
|
_map = mapnik2.Map(256,256, mapSrs)
|
||||||
lyr.styles.append('foo')
|
_map.append_style('foo', style)
|
||||||
_map.layers.append(lyr)
|
lyr.styles.append('foo')
|
||||||
prj_trans = mapnik2.ProjTransform(mapnik2.Projection(mapSrs),
|
_map.layers.append(lyr)
|
||||||
mapnik2.Projection(lyrSrs))
|
prj_trans = mapnik2.ProjTransform(mapnik2.Projection(mapSrs),
|
||||||
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
|
mapnik2.Projection(lyrSrs))
|
||||||
|
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
|
||||||
im = mapnik2.Image(_map.width,_map.height)
|
|
||||||
mapnik2.render(_map, im)
|
im = mapnik2.Image(_map.width,_map.height)
|
||||||
# save a png somewhere so we can see it
|
mapnik2.render(_map, im)
|
||||||
save_data('test_raster_warping.png', im.tostring('png'))
|
# save a png somewhere so we can see it
|
||||||
imdata = im.tostring()
|
save_data('test_raster_warping.png', im.tostring('png'))
|
||||||
assert contains_word('\xff\xff\x00\xff', imdata)
|
imdata = im.tostring()
|
||||||
|
assert contains_word('\xff\xff\x00\xff', imdata)
|
||||||
|
|
||||||
def test_raster_warping_does_not_overclip_source():
|
def test_raster_warping_does_not_overclip_source():
|
||||||
lyrSrs = "+init=epsg:32630"
|
lyrSrs = "+init=epsg:32630"
|
||||||
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||||
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
||||||
lyr.datasource = mapnik2.Gdal(
|
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
file = '../data/raster/dataraster.tif',
|
lyr.datasource = mapnik2.Gdal(
|
||||||
band = 1,
|
file = '../data/raster/dataraster.tif',
|
||||||
)
|
band = 1,
|
||||||
sym = mapnik2.RasterSymbolizer()
|
)
|
||||||
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color(255,255,0))
|
sym = mapnik2.RasterSymbolizer()
|
||||||
rule = mapnik2.Rule()
|
sym.colorizer = mapnik2.RasterColorizer(mapnik2.COLORIZER_DISCRETE, mapnik2.Color(255,255,0))
|
||||||
rule.symbols.append(sym)
|
rule = mapnik2.Rule()
|
||||||
style = mapnik2.Style()
|
rule.symbols.append(sym)
|
||||||
style.rules.append(rule)
|
style = mapnik2.Style()
|
||||||
_map = mapnik2.Map(256,256, mapSrs)
|
style.rules.append(rule)
|
||||||
_map.background=mapnik2.Color('white')
|
_map = mapnik2.Map(256,256, mapSrs)
|
||||||
_map.append_style('foo', style)
|
_map.background=mapnik2.Color('white')
|
||||||
lyr.styles.append('foo')
|
_map.append_style('foo', style)
|
||||||
_map.layers.append(lyr)
|
lyr.styles.append('foo')
|
||||||
_map.zoom_to_box(mapnik2.Box2d(3,42,4,43))
|
_map.layers.append(lyr)
|
||||||
|
_map.zoom_to_box(mapnik2.Box2d(3,42,4,43))
|
||||||
im = mapnik2.Image(_map.width,_map.height)
|
|
||||||
mapnik2.render(_map, im)
|
im = mapnik2.Image(_map.width,_map.height)
|
||||||
# save a png somewhere so we can see it
|
mapnik2.render(_map, im)
|
||||||
save_data('test_raster_warping_does_not_overclip_source.png',
|
# save a png somewhere so we can see it
|
||||||
im.tostring('png'))
|
save_data('test_raster_warping_does_not_overclip_source.png',
|
||||||
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
|
im.tostring('png'))
|
||||||
|
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
|
@ -86,12 +86,16 @@ def get_paired_images(w,h,mapfile):
|
||||||
return i,i2
|
return i,i2
|
||||||
|
|
||||||
def test_render_from_serialization():
|
def test_render_from_serialization():
|
||||||
i,i2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
|
try:
|
||||||
eq_(i.tostring(),i2.tostring())
|
i,i2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
|
||||||
|
eq_(i.tostring(),i2.tostring())
|
||||||
i,i2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
|
|
||||||
eq_(i.tostring(),i2.tostring())
|
i,i2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
|
||||||
|
eq_(i.tostring(),i2.tostring())
|
||||||
|
except RuntimeError, e:
|
||||||
|
# only test datasources that we have installed
|
||||||
|
if not 'Could not create datasource' in str(e):
|
||||||
|
raise RuntimeError(e)
|
||||||
|
|
||||||
grid_correct = {"keys": ["", "North West", "North East", "South West", "South East"], "data": {"South East": {"Name": "South East"}, "North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !!! ### ", " !!!!! ##### ", " !!!!! ##### ", " !!! ### ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$$$ %%%% ", " $$$$$ %%%%% ", " $$$$$ %%%%% ", " $$$ %%% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "]}
|
grid_correct = {"keys": ["", "North West", "North East", "South West", "South East"], "data": {"South East": {"Name": "South East"}, "North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !!! ### ", " !!!!! ##### ", " !!!!! ##### ", " !!! ### ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$$$ %%%% ", " $$$$$ %%%%% ", " $$$$$ %%%%% ", " $$$ %%% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "]}
|
||||||
|
|
||||||
|
|
|
@ -10,189 +10,191 @@ def setup():
|
||||||
# from another directory we need to chdir()
|
# from another directory we need to chdir()
|
||||||
os.chdir(execution_path('.'))
|
os.chdir(execution_path('.'))
|
||||||
|
|
||||||
def test_attachdb_with_relative_file():
|
if 'sqlite' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||||
# The point table and index is in the qgis_spatiallite.sqlite
|
|
||||||
# database. If either is not found, then this fails
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='point',
|
|
||||||
attachdb='scratch@qgis_spatiallite.sqlite'
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature['pkuid'],1)
|
|
||||||
|
|
||||||
def test_attachdb_with_multiple_files():
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='attachedtest',
|
|
||||||
attachdb='scratch1@:memory:,scratch2@:memory:',
|
|
||||||
initdb='''
|
|
||||||
create table scratch1.attachedtest (the_geom);
|
|
||||||
create virtual table scratch2.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
|
|
||||||
insert into scratch2.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
|
||||||
'''
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
# the above should not throw but will result in no features
|
|
||||||
eq_(feature,None)
|
|
||||||
|
|
||||||
def test_attachdb_with_absolute_file():
|
|
||||||
# The point table and index is in the qgis_spatiallite.sqlite
|
|
||||||
# database. If either is not found, then this fails
|
|
||||||
ds = mapnik2.SQLite(file=os.getcwd() + '/../data/sqlite/world.sqlite',
|
|
||||||
table='point',
|
|
||||||
attachdb='scratch@qgis_spatiallite.sqlite'
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature['pkuid'],1)
|
|
||||||
|
|
||||||
def test_attachdb_with_index():
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='attachedtest',
|
|
||||||
attachdb='scratch@:memory:',
|
|
||||||
initdb='''
|
|
||||||
create table scratch.attachedtest (the_geom);
|
|
||||||
create virtual table scratch.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
|
|
||||||
insert into scratch.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
|
||||||
'''
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature,None)
|
|
||||||
|
|
||||||
def test_attachdb_with_explicit_index():
|
def test_attachdb_with_relative_file():
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
# The point table and index is in the qgis_spatiallite.sqlite
|
||||||
table='attachedtest',
|
# database. If either is not found, then this fails
|
||||||
index_table='myindex',
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
attachdb='scratch@:memory:',
|
table='point',
|
||||||
initdb='''
|
attachdb='scratch@qgis_spatiallite.sqlite'
|
||||||
create table scratch.attachedtest (the_geom);
|
)
|
||||||
create virtual table scratch.myindex using rtree(pkid,xmin,xmax,ymin,ymax);
|
fs = ds.featureset()
|
||||||
insert into scratch.myindex values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
feature = fs.next()
|
||||||
'''
|
eq_(feature['pkuid'],1)
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature,None)
|
|
||||||
|
|
||||||
def test_attachdb_with_sql_join():
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
|
|
||||||
attachdb='busines@business.sqlite'
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature.id(),1)
|
|
||||||
expected = {
|
|
||||||
1995:0,
|
|
||||||
1996:0,
|
|
||||||
1997:0,
|
|
||||||
1998:0,
|
|
||||||
1999:0,
|
|
||||||
2000:0,
|
|
||||||
2001:0,
|
|
||||||
2002:0,
|
|
||||||
2003:0,
|
|
||||||
2004:0,
|
|
||||||
2005:0,
|
|
||||||
2006:0,
|
|
||||||
2007:0,
|
|
||||||
2008:0,
|
|
||||||
2009:0,
|
|
||||||
2010:0,
|
|
||||||
# this appears to be sqlites way of
|
|
||||||
# automatically handling clashing column names
|
|
||||||
'ISO3:1':'ATG',
|
|
||||||
'OGC_FID':1,
|
|
||||||
'area':44,
|
|
||||||
'fips':u'AC',
|
|
||||||
'iso2':u'AG',
|
|
||||||
'iso3':u'ATG',
|
|
||||||
'lat':17.078,
|
|
||||||
'lon':-61.783,
|
|
||||||
'name':u'Antigua and Barbuda',
|
|
||||||
'pop2005':83039,
|
|
||||||
'region':19,
|
|
||||||
'subregion':29,
|
|
||||||
'un':28
|
|
||||||
}
|
|
||||||
for k,v in expected.items():
|
|
||||||
try:
|
|
||||||
eq_(feature[str(k)],v)
|
|
||||||
except:
|
|
||||||
#import pdb;pdb.set_trace()
|
|
||||||
print 'invalid key/v %s/%s for: %s' % (k,v,feature)
|
|
||||||
|
|
||||||
def test_subqueries():
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='world_merc',
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature['OGC_FID'],1)
|
|
||||||
eq_(feature['fips'],u'AC')
|
|
||||||
eq_(feature['iso2'],u'AG')
|
|
||||||
eq_(feature['iso3'],u'ATG')
|
|
||||||
eq_(feature['un'],28)
|
|
||||||
eq_(feature['name'],u'Antigua and Barbuda')
|
|
||||||
eq_(feature['area'],44)
|
|
||||||
eq_(feature['pop2005'],83039)
|
|
||||||
eq_(feature['region'],19)
|
|
||||||
eq_(feature['subregion'],29)
|
|
||||||
eq_(feature['lon'],-61.783)
|
|
||||||
eq_(feature['lat'],17.078)
|
|
||||||
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='(select * from world_merc)',
|
|
||||||
)
|
|
||||||
fs = ds.featureset()
|
|
||||||
feature = fs.next()
|
|
||||||
eq_(feature['OGC_FID'],1)
|
|
||||||
eq_(feature['fips'],u'AC')
|
|
||||||
eq_(feature['iso2'],u'AG')
|
|
||||||
eq_(feature['iso3'],u'ATG')
|
|
||||||
eq_(feature['un'],28)
|
|
||||||
eq_(feature['name'],u'Antigua and Barbuda')
|
|
||||||
eq_(feature['area'],44)
|
|
||||||
eq_(feature['pop2005'],83039)
|
|
||||||
eq_(feature['region'],19)
|
|
||||||
eq_(feature['subregion'],29)
|
|
||||||
eq_(feature['lon'],-61.783)
|
|
||||||
eq_(feature['lat'],17.078)
|
|
||||||
|
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
def test_attachdb_with_multiple_files():
|
||||||
table='(select OGC_FID,GEOMETRY as geom from world_merc)',
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
)
|
table='attachedtest',
|
||||||
fs = ds.featureset()
|
attachdb='scratch1@:memory:,scratch2@:memory:',
|
||||||
feature = fs.next()
|
initdb='''
|
||||||
eq_(feature['OGC_FID'],1)
|
create table scratch1.attachedtest (the_geom);
|
||||||
eq_(len(feature),1)
|
create virtual table scratch2.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
|
||||||
|
insert into scratch2.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
'''
|
||||||
table='(select GEOMETRY,OGC_FID,fips from world_merc)',
|
)
|
||||||
)
|
fs = ds.featureset()
|
||||||
fs = ds.featureset()
|
feature = fs.next()
|
||||||
feature = fs.next()
|
# the above should not throw but will result in no features
|
||||||
eq_(feature['OGC_FID'],1)
|
eq_(feature,None)
|
||||||
eq_(feature['fips'],u'AC')
|
|
||||||
|
def test_attachdb_with_absolute_file():
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
# The point table and index is in the qgis_spatiallite.sqlite
|
||||||
table='(select GEOMETRY,rowid as aliased_id,fips from world_merc)',
|
# database. If either is not found, then this fails
|
||||||
key_field='aliased_id'
|
ds = mapnik2.SQLite(file=os.getcwd() + '/../data/sqlite/world.sqlite',
|
||||||
)
|
table='point',
|
||||||
fs = ds.featureset()
|
attachdb='scratch@qgis_spatiallite.sqlite'
|
||||||
feature = fs.next()
|
)
|
||||||
eq_(feature['aliased_id'],1)
|
fs = ds.featureset()
|
||||||
eq_(feature['fips'],u'AC')
|
feature = fs.next()
|
||||||
|
eq_(feature['pkuid'],1)
|
||||||
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
|
||||||
table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)',
|
def test_attachdb_with_index():
|
||||||
)
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
fs = ds.featureset()
|
table='attachedtest',
|
||||||
feature = fs.next()
|
attachdb='scratch@:memory:',
|
||||||
eq_(feature['rowid'],1)
|
initdb='''
|
||||||
eq_(feature['fips'],u'AC')
|
create table scratch.attachedtest (the_geom);
|
||||||
|
create virtual table scratch.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
|
||||||
|
insert into scratch.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature,None)
|
||||||
|
|
||||||
|
def test_attachdb_with_explicit_index():
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='attachedtest',
|
||||||
|
index_table='myindex',
|
||||||
|
attachdb='scratch@:memory:',
|
||||||
|
initdb='''
|
||||||
|
create table scratch.attachedtest (the_geom);
|
||||||
|
create virtual table scratch.myindex using rtree(pkid,xmin,xmax,ymin,ymax);
|
||||||
|
insert into scratch.myindex values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature,None)
|
||||||
|
|
||||||
|
def test_attachdb_with_sql_join():
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
|
||||||
|
attachdb='busines@business.sqlite'
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature.id(),1)
|
||||||
|
expected = {
|
||||||
|
1995:0,
|
||||||
|
1996:0,
|
||||||
|
1997:0,
|
||||||
|
1998:0,
|
||||||
|
1999:0,
|
||||||
|
2000:0,
|
||||||
|
2001:0,
|
||||||
|
2002:0,
|
||||||
|
2003:0,
|
||||||
|
2004:0,
|
||||||
|
2005:0,
|
||||||
|
2006:0,
|
||||||
|
2007:0,
|
||||||
|
2008:0,
|
||||||
|
2009:0,
|
||||||
|
2010:0,
|
||||||
|
# this appears to be sqlites way of
|
||||||
|
# automatically handling clashing column names
|
||||||
|
'ISO3:1':'ATG',
|
||||||
|
'OGC_FID':1,
|
||||||
|
'area':44,
|
||||||
|
'fips':u'AC',
|
||||||
|
'iso2':u'AG',
|
||||||
|
'iso3':u'ATG',
|
||||||
|
'lat':17.078,
|
||||||
|
'lon':-61.783,
|
||||||
|
'name':u'Antigua and Barbuda',
|
||||||
|
'pop2005':83039,
|
||||||
|
'region':19,
|
||||||
|
'subregion':29,
|
||||||
|
'un':28
|
||||||
|
}
|
||||||
|
for k,v in expected.items():
|
||||||
|
try:
|
||||||
|
eq_(feature[str(k)],v)
|
||||||
|
except:
|
||||||
|
#import pdb;pdb.set_trace()
|
||||||
|
print 'invalid key/v %s/%s for: %s' % (k,v,feature)
|
||||||
|
|
||||||
|
def test_subqueries():
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='world_merc',
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['OGC_FID'],1)
|
||||||
|
eq_(feature['fips'],u'AC')
|
||||||
|
eq_(feature['iso2'],u'AG')
|
||||||
|
eq_(feature['iso3'],u'ATG')
|
||||||
|
eq_(feature['un'],28)
|
||||||
|
eq_(feature['name'],u'Antigua and Barbuda')
|
||||||
|
eq_(feature['area'],44)
|
||||||
|
eq_(feature['pop2005'],83039)
|
||||||
|
eq_(feature['region'],19)
|
||||||
|
eq_(feature['subregion'],29)
|
||||||
|
eq_(feature['lon'],-61.783)
|
||||||
|
eq_(feature['lat'],17.078)
|
||||||
|
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select * from world_merc)',
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['OGC_FID'],1)
|
||||||
|
eq_(feature['fips'],u'AC')
|
||||||
|
eq_(feature['iso2'],u'AG')
|
||||||
|
eq_(feature['iso3'],u'ATG')
|
||||||
|
eq_(feature['un'],28)
|
||||||
|
eq_(feature['name'],u'Antigua and Barbuda')
|
||||||
|
eq_(feature['area'],44)
|
||||||
|
eq_(feature['pop2005'],83039)
|
||||||
|
eq_(feature['region'],19)
|
||||||
|
eq_(feature['subregion'],29)
|
||||||
|
eq_(feature['lon'],-61.783)
|
||||||
|
eq_(feature['lat'],17.078)
|
||||||
|
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select OGC_FID,GEOMETRY as geom from world_merc)',
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['OGC_FID'],1)
|
||||||
|
eq_(len(feature),1)
|
||||||
|
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select GEOMETRY,OGC_FID,fips from world_merc)',
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['OGC_FID'],1)
|
||||||
|
eq_(feature['fips'],u'AC')
|
||||||
|
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select GEOMETRY,rowid as aliased_id,fips from world_merc)',
|
||||||
|
key_field='aliased_id'
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['aliased_id'],1)
|
||||||
|
eq_(feature['fips'],u'AC')
|
||||||
|
|
||||||
|
ds = mapnik2.SQLite(file='../data/sqlite/world.sqlite',
|
||||||
|
table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)',
|
||||||
|
)
|
||||||
|
fs = ds.featureset()
|
||||||
|
feature = fs.next()
|
||||||
|
eq_(feature['rowid'],1)
|
||||||
|
eq_(feature['fips'],u'AC')
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
|
Loading…
Reference in a new issue