ensure the tests can be run even with no datasource plugins
This commit is contained in:
parent
988567e040
commit
247755df3f
13 changed files with 621 additions and 584 deletions
|
@ -62,6 +62,7 @@
|
|||
}
|
||||
|
||||
</Parameter>
|
||||
<Parameter name="driver">GeoJson</Parameter>
|
||||
<Parameter name="layer_by_index">0</Parameter>
|
||||
<Parameter name="type">ogr</Parameter>
|
||||
</Datasource>
|
||||
|
|
|
@ -12,12 +12,14 @@ def setup():
|
|||
|
||||
def test_field_listing():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||
fields = lyr.datasource.fields()
|
||||
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
||||
|
||||
def test_total_feature_count_shp():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||
features = lyr.datasource.all_features()
|
||||
num_feats = len(features)
|
||||
|
@ -25,6 +27,7 @@ def test_total_feature_count_shp():
|
|||
|
||||
def test_total_feature_count_json():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Ogr(file='../data/json/points.json',layer_by_index=0)
|
||||
features = lyr.datasource.all_features()
|
||||
num_feats = len(features)
|
||||
|
@ -33,6 +36,7 @@ def test_total_feature_count_json():
|
|||
def test_reading_json_from_string():
|
||||
json = open('../data/json/points.json','r').read()
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Ogr(file=json,layer_by_index=0)
|
||||
features = lyr.datasource.all_features()
|
||||
num_feats = len(features)
|
||||
|
@ -40,6 +44,7 @@ def test_reading_json_from_string():
|
|||
|
||||
def test_feature_envelope():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||
features = lyr.datasource.all_features()
|
||||
for feat in features:
|
||||
|
@ -51,6 +56,7 @@ def test_feature_envelope():
|
|||
|
||||
def test_feature_attributes():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
|
||||
features = lyr.datasource.all_features()
|
||||
feat = features[0]
|
||||
|
@ -61,6 +67,7 @@ def test_feature_attributes():
|
|||
|
||||
def test_ogr_layer_by_sql():
|
||||
lyr = mapnik2.Layer('test')
|
||||
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
||||
features = lyr.datasource.all_features()
|
||||
num_feats = len(features)
|
||||
|
@ -75,6 +82,7 @@ def test_hit_grid():
|
|||
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
||||
|
||||
m = mapnik2.Map(256,256);
|
||||
try:
|
||||
mapnik2.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
||||
m.zoom_all()
|
||||
join_field = 'NAME'
|
||||
|
@ -91,6 +99,11 @@ def test_hit_grid():
|
|||
hit_list = '|'.join(rle_encode(fg))
|
||||
eq_(hit_list[:16],'730:|2:Greenland')
|
||||
eq_(hit_list[-12:],'1:Chile|812:')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup()
|
||||
|
|
|
@ -12,6 +12,8 @@ def setup():
|
|||
os.chdir(execution_path('.'))
|
||||
|
||||
def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
|
||||
plugins = mapnik2.DatasourceCache.instance().plugin_names()
|
||||
if 'shape' in plugins and 'ogr' in plugins:
|
||||
ds1 = mapnik2.Ogr(file=shapefile,layer_by_index=0)
|
||||
ds2 = mapnik2.Shapefile(file=shapefile)
|
||||
if query:
|
||||
|
@ -45,6 +47,7 @@ def test_shapefile_polygon_featureset_id():
|
|||
def test_shapefile_polygon_feature_query_id():
|
||||
bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
|
||||
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
||||
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
ds = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
||||
for fld in ds.fields():
|
||||
query.add_property_name(fld)
|
||||
|
@ -56,6 +59,7 @@ def test_feature_hit_count():
|
|||
#bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
|
||||
bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
|
||||
query = mapnik2.Query(mapnik2.Box2d(*bbox))
|
||||
if 'ogr' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
ds1 = mapnik2.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
|
||||
for fld in ds1.fields():
|
||||
query.add_property_name(fld)
|
||||
|
|
|
@ -99,7 +99,7 @@ def test_feature_expression_evaluation_attr_with_spaces():
|
|||
f['name with space'] = u'a'
|
||||
eq_(f['name with space'],u'a')
|
||||
expr = mapnik2.Expression("[name with space]='a'")
|
||||
eq_(str(expr),"[name with space]='a'")
|
||||
eq_(str(expr),"([name with space]='a')")
|
||||
eq_(expr.evaluate(f),True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -16,6 +16,7 @@ def test_renders_with_agg():
|
|||
sym.color = mapnik2.Expression("'#ff0000'")
|
||||
|
||||
_map = create_map_and_append_symbolyzer(sym)
|
||||
if _map:
|
||||
im = mapnik2.Image(_map.width,_map.height)
|
||||
mapnik2.render(_map, im)
|
||||
save_data('agg_glyph_symbolizer.png', im.tostring('png'))
|
||||
|
@ -31,7 +32,7 @@ def test_renders_with_cairo():
|
|||
sym.size = mapnik2.Expression("[value]")
|
||||
sym.color = mapnik2.Expression("'#ff0000'")
|
||||
_map = create_map_and_append_symbolyzer(sym)
|
||||
|
||||
if _map:
|
||||
from cStringIO import StringIO
|
||||
import cairo
|
||||
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 256, 256)
|
||||
|
@ -43,6 +44,7 @@ def test_renders_with_cairo():
|
|||
def test_load_save_load_map():
|
||||
map = mapnik2.Map(256,256)
|
||||
in_map = "../data/good_maps/glyph_symbolizer.xml"
|
||||
try:
|
||||
mapnik2.load_map(map, in_map)
|
||||
style = map.find_style('arrows')
|
||||
sym = style.rules[0].symbols[0]
|
||||
|
@ -56,6 +58,10 @@ def test_load_save_load_map():
|
|||
# make sure non-ascii characters are well supported since most interesting
|
||||
# glyphs for symbology are usually in that range
|
||||
assert u'í' in out_map, out_map
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
#
|
||||
# Utilities and setup code
|
||||
|
@ -69,6 +75,7 @@ def setup():
|
|||
def create_map_and_append_symbolyzer(sym):
|
||||
srs = '+init=epsg:32630'
|
||||
lyr = mapnik2.Layer('arrows')
|
||||
try:
|
||||
lyr.datasource = mapnik2.Shapefile(
|
||||
file = '../data/shp/arrows.shp',
|
||||
)
|
||||
|
@ -92,6 +99,10 @@ def create_map_and_append_symbolyzer(sym):
|
|||
_map.layers.append(lyr)
|
||||
_map.zoom_to_box(mapnik2.Box2d(0,0,8,8))
|
||||
return _map
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
|
|
@ -32,6 +32,7 @@ def test_adding_datasource_to_layer():
|
|||
'''
|
||||
m = mapnik2.Map(256, 256)
|
||||
|
||||
try:
|
||||
mapnik2.load_map_from_string(m, map_string)
|
||||
|
||||
# validate it loaded fine
|
||||
|
@ -66,6 +67,10 @@ def test_adding_datasource_to_layer():
|
|||
# test that assignment
|
||||
eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
|
|
@ -15,6 +15,7 @@ def setup():
|
|||
def assert_loads_successfully(file):
|
||||
m = mapnik2.Map(512, 512)
|
||||
|
||||
try:
|
||||
strict = True
|
||||
mapnik2.load_map(m, file, strict)
|
||||
|
||||
|
@ -22,6 +23,10 @@ def assert_loads_successfully(file):
|
|||
# of a path if it does not end in a trailing slash
|
||||
base_path = os.path.dirname(file) + '/'
|
||||
mapnik2.load_map_from_string(m,open(file,'rb').read(),strict,base_path)
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
|
||||
# We expect these files to raise a RuntimeError
|
||||
|
|
|
@ -13,6 +13,7 @@ def setup():
|
|||
def test_multi_tile_policy():
|
||||
srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||
lyr = mapnik2.Layer('raster')
|
||||
if 'raster' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Raster(
|
||||
file = '../data/raster_tiles/${x}/${y}.tif',
|
||||
lox = -180,
|
||||
|
|
|
@ -247,35 +247,6 @@ def test_linesymbolizer_pickle():
|
|||
eq_(s.line_cap, s2.line_cap)
|
||||
eq_(s.line_join, s2.line_join)
|
||||
|
||||
# Shapefile initialization
|
||||
def test_shapefile_init():
|
||||
s = mapnik2.Shapefile(file='../../demo/data/boundaries')
|
||||
|
||||
e = s.envelope()
|
||||
|
||||
assert_almost_equal(e.minx, -11121.6896651, places=7)
|
||||
assert_almost_equal(e.miny, -724724.216526, places=6)
|
||||
assert_almost_equal(e.maxx, 2463000.67866, places=5)
|
||||
assert_almost_equal(e.maxy, 1649661.267, places=3)
|
||||
|
||||
# Shapefile properties
|
||||
def test_shapefile_properties():
|
||||
s = mapnik2.Shapefile(file='../../demo/data/boundaries', encoding='latin1')
|
||||
f = s.features_at_point(s.envelope().center()).features[0]
|
||||
|
||||
eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
|
||||
eq_(f['COUNTRY'], u'CAN')
|
||||
eq_(f['F_CODE'], u'FA001')
|
||||
eq_(f['NAME_EN'], u'Quebec')
|
||||
# this seems to break if icu data linking is not working
|
||||
eq_(f['NOM_FR'], u'Qu\xe9bec')
|
||||
eq_(f['NOM_FR'], u'Québec')
|
||||
eq_(f['Shape_Area'], 1512185733150.0)
|
||||
eq_(f['Shape_Leng'], 19218883.724300001)
|
||||
|
||||
# Check that the deprecated interface still works,
|
||||
# remove me once the deprecated code is cleaned up
|
||||
eq_(f.properties['Shape_Leng'], 19218883.724300001)
|
||||
|
||||
# TextSymbolizer initialization
|
||||
def test_textsymbolizer_init():
|
||||
|
@ -389,6 +360,7 @@ def test_map_init_from_string():
|
|||
|
||||
m = mapnik2.Map(600, 300)
|
||||
eq_(m.base, '')
|
||||
try:
|
||||
mapnik2.load_map_from_string(m, map_string)
|
||||
eq_(m.base, './')
|
||||
mapnik2.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
||||
|
@ -402,6 +374,10 @@ def test_map_init_from_string():
|
|||
mapnik2.load_map_from_string(m, map_string, True, ".")
|
||||
eq_(m.base, '.')
|
||||
raise(Todo("Need to write more map property tests in 'object_test.py'..."))
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
# Map pickling
|
||||
def test_map_pickle():
|
||||
|
|
|
@ -15,10 +15,15 @@ def test_gen_map():
|
|||
outputfile = 'raster_colorizer_test.png'
|
||||
|
||||
m = mapnik2.Map(800, 600)
|
||||
try:
|
||||
mapnik2.load_map(m, mapxmlfile)
|
||||
mapnik2.save_map(m, mapxmloutputfile)
|
||||
m.zoom_all()
|
||||
mapnik2.render_to_file(m, outputfile)
|
||||
except RuntimeError,e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
#test discrete colorizer mode
|
||||
def test_get_color_discrete():
|
||||
|
|
|
@ -14,6 +14,7 @@ def setup():
|
|||
def test_dataraster_coloring():
|
||||
srs = '+init=epsg:32630'
|
||||
lyr = mapnik2.Layer('dataraster')
|
||||
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Gdal(
|
||||
file = '../data/raster/dataraster.tif',
|
||||
band = 1,
|
||||
|
@ -59,6 +60,7 @@ def test_dataraster_coloring():
|
|||
def test_dataraster_query_point():
|
||||
srs = '+init=epsg:32630'
|
||||
lyr = mapnik2.Layer('dataraster')
|
||||
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Gdal(
|
||||
file = '../data/raster/dataraster.tif',
|
||||
band = 1,
|
||||
|
@ -88,12 +90,17 @@ def test_dataraster_query_point():
|
|||
def test_load_save_map():
|
||||
map = mapnik2.Map(256,256)
|
||||
in_map = "../data/good_maps/raster_symbolizer.xml"
|
||||
try:
|
||||
mapnik2.load_map(map, in_map)
|
||||
|
||||
out_map = mapnik2.save_map_to_string(map)
|
||||
assert 'RasterSymbolizer' in out_map
|
||||
assert 'RasterColorizer' in out_map
|
||||
assert 'stop' in out_map
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
def test_raster_with_alpha_blends_correctly_with_background():
|
||||
WIDTH = 500
|
||||
|
@ -117,6 +124,7 @@ def test_raster_with_alpha_blends_correctly_with_background():
|
|||
|
||||
map_layer = mapnik2.Layer('test_layer')
|
||||
filepath = '../data/raster/white-alpha.png'
|
||||
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
map_layer.datasource = mapnik2.Gdal(file=filepath)
|
||||
map_layer.styles.append('raster_style')
|
||||
map.layers.append(map_layer)
|
||||
|
@ -136,6 +144,7 @@ def test_raster_warping():
|
|||
lyrSrs = "+init=epsg:32630"
|
||||
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
||||
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Gdal(
|
||||
file = '../data/raster/dataraster.tif',
|
||||
band = 1,
|
||||
|
@ -165,6 +174,7 @@ def test_raster_warping_does_not_overclip_source():
|
|||
lyrSrs = "+init=epsg:32630"
|
||||
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
|
||||
lyr = mapnik2.Layer('dataraster', lyrSrs)
|
||||
if 'gdal' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
lyr.datasource = mapnik2.Gdal(
|
||||
file = '../data/raster/dataraster.tif',
|
||||
band = 1,
|
||||
|
|
|
@ -86,12 +86,16 @@ def get_paired_images(w,h,mapfile):
|
|||
return i,i2
|
||||
|
||||
def test_render_from_serialization():
|
||||
try:
|
||||
i,i2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
|
||||
eq_(i.tostring(),i2.tostring())
|
||||
|
||||
i,i2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
|
||||
eq_(i.tostring(),i2.tostring())
|
||||
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
grid_correct = {"keys": ["", "North West", "North East", "South West", "South East"], "data": {"South East": {"Name": "South East"}, "North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !!! ### ", " !!!!! ##### ", " !!!!! ##### ", " !!! ### ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$$$ %%%% ", " $$$$$ %%%%% ", " $$$$$ %%%%% ", " $$$ %%% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "]}
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ def setup():
|
|||
# from another directory we need to chdir()
|
||||
os.chdir(execution_path('.'))
|
||||
|
||||
if 'sqlite' in mapnik2.DatasourceCache.instance().plugin_names():
|
||||
|
||||
def test_attachdb_with_relative_file():
|
||||
# The point table and index is in the qgis_spatiallite.sqlite
|
||||
# database. If either is not found, then this fails
|
||||
|
|
Loading…
Reference in a new issue