propagate unavailable input plugin check to other test cases
This commit is contained in:
parent
7f506ff5cf
commit
7902591ac0
8 changed files with 84 additions and 72 deletions
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
import os, mapnik
|
||||
|
||||
def setup():
|
||||
|
@ -99,9 +99,10 @@ def test_hit_grid():
|
|||
""" encode a list of strings with run-length compression """
|
||||
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
||||
|
||||
m = mapnik.Map(256,256);
|
||||
try:
|
||||
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
||||
xmlfile = '../data/good_maps/agg_poly_gamma_map.xml'
|
||||
if datasources_available(xmlfile):
|
||||
m = mapnik.Map(256, 256)
|
||||
mapnik.load_map(m, xmlfile)
|
||||
m.zoom_all()
|
||||
join_field = 'NAME'
|
||||
fg = [] # feature grid
|
||||
|
@ -117,10 +118,6 @@ def test_hit_grid():
|
|||
hit_list = '|'.join(rle_encode(fg))
|
||||
eq_(hit_list[:16],'730:|2:Greenland')
|
||||
eq_(hit_list[-12:],'1:Chile|812:')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -28,9 +28,9 @@ def test_adding_datasource_to_layer():
|
|||
|
||||
</Map>
|
||||
'''
|
||||
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
||||
m = mapnik.Map(256, 256)
|
||||
|
||||
try:
|
||||
mapnik.load_map_from_string(m, map_string)
|
||||
|
||||
# validate it loaded fine
|
||||
|
@ -65,10 +65,6 @@ def test_adding_datasource_to_layer():
|
|||
# test that assignment
|
||||
eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
|
||||
import os, sys, glob, mapnik
|
||||
|
||||
|
@ -30,20 +30,27 @@ def test_broken_files():
|
|||
mapnik.logger.set_severity(default_logging_severity)
|
||||
|
||||
def test_good_files():
|
||||
good_files = glob.glob("../data/good_maps/*.xml")
|
||||
all_files = glob.glob("../data/good_maps/*.xml")
|
||||
|
||||
failures = [];
|
||||
good_files = list()
|
||||
for xmlfile in all_files:
|
||||
missing_plugins = set()
|
||||
have_inputs = datasources_available(xmlfile, missing_plugins)
|
||||
if have_inputs:
|
||||
good_files.append(xmlfile)
|
||||
else:
|
||||
print 'Notice: skipping load_map_test for %s due to unavailable input plugins: %s' % (os.path.basename(xmlfile), list(missing_plugins))
|
||||
|
||||
failures = []
|
||||
strict = True
|
||||
for filename in good_files:
|
||||
try:
|
||||
m = mapnik.Map(512, 512)
|
||||
strict = True
|
||||
mapnik.load_map(m, filename, strict)
|
||||
base_path = os.path.dirname(filename)
|
||||
mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path)
|
||||
mapnik.load_map_from_string(m, open(filename, 'rb').read(), strict, base_path)
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
failures.append('Failed to load valid map (%s)!' % filename)
|
||||
failures.append('Failed to load valid map %s (%s)!' % (filename, str(e)))
|
||||
eq_(len(failures),0,'\n'+'\n'.join(failures))
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -378,9 +378,9 @@ def test_map_init_from_string():
|
|||
</Layer>
|
||||
</Map>'''
|
||||
|
||||
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
||||
m = mapnik.Map(600, 300)
|
||||
eq_(m.base, '')
|
||||
try:
|
||||
mapnik.load_map_from_string(m, map_string)
|
||||
eq_(m.base, './')
|
||||
mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
||||
|
@ -395,10 +395,6 @@ def test_map_init_from_string():
|
|||
m.base = 'foo'
|
||||
mapnik.load_map_from_string(m, map_string, True, ".")
|
||||
eq_(m.base, '.')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
# Color initialization
|
||||
@raises(Exception) # Boost.Python.ArgumentError
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all, contains_word, get_unique_colors
|
||||
from utilities import execution_path, run_all, contains_word, get_unique_colors,\
|
||||
datasources_available
|
||||
|
||||
import os, mapnik
|
||||
|
||||
|
@ -89,27 +90,22 @@ def test_dataraster_query_point():
|
|||
assert len(features) == 0
|
||||
|
||||
def test_load_save_map():
|
||||
map = mapnik.Map(256,256)
|
||||
m = mapnik.Map(256,256)
|
||||
in_map = "../data/good_maps/raster_symbolizer.xml"
|
||||
try:
|
||||
mapnik.load_map(map, in_map)
|
||||
|
||||
out_map = mapnik.save_map_to_string(map)
|
||||
if datasources_available(in_map):
|
||||
mapnik.load_map(m, in_map)
|
||||
out_map = mapnik.save_map_to_string(m)
|
||||
assert 'RasterSymbolizer' in out_map
|
||||
assert 'RasterColorizer' in out_map
|
||||
assert 'stop' in out_map
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
def test_raster_with_alpha_blends_correctly_with_background():
|
||||
WIDTH = 500
|
||||
HEIGHT = 500
|
||||
|
||||
map = mapnik.Map(WIDTH, HEIGHT)
|
||||
m = mapnik.Map(WIDTH, HEIGHT)
|
||||
WHITE = mapnik.Color(255, 255, 255)
|
||||
map.background = WHITE
|
||||
m.background = WHITE
|
||||
|
||||
style = mapnik.Style()
|
||||
rule = mapnik.Rule()
|
||||
|
@ -119,20 +115,20 @@ def test_raster_with_alpha_blends_correctly_with_background():
|
|||
rule.symbols.append(symbolizer)
|
||||
style.rules.append(rule)
|
||||
|
||||
map.append_style('raster_style', style)
|
||||
m.append_style('raster_style', style)
|
||||
|
||||
map_layer = mapnik.Layer('test_layer')
|
||||
filepath = '../data/raster/white-alpha.png'
|
||||
if 'gdal' in mapnik.DatasourceCache.plugin_names():
|
||||
map_layer.datasource = mapnik.Gdal(file=filepath)
|
||||
map_layer.styles.append('raster_style')
|
||||
map.layers.append(map_layer)
|
||||
m.layers.append(map_layer)
|
||||
|
||||
map.zoom_all()
|
||||
m.zoom_all()
|
||||
|
||||
mim = mapnik.Image(WIDTH, HEIGHT)
|
||||
|
||||
mapnik.render(map, mim)
|
||||
mapnik.render(m, mim)
|
||||
imdata = mim.tostring()
|
||||
# All white is expected
|
||||
eq_(get_unique_colors(mim),['rgba(254,254,254,255)'])
|
||||
|
|
|
@ -5,7 +5,7 @@ from nose.tools import *
|
|||
import tempfile
|
||||
import os, mapnik
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
|
||||
def setup():
|
||||
# All of the paths used are relative, if we run the tests
|
||||
|
@ -104,16 +104,15 @@ def get_paired_images(w,h,mapfile):
|
|||
return im,im2
|
||||
|
||||
def test_render_from_serialization():
|
||||
try:
|
||||
im,im2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
|
||||
eq_(im.tostring(),im2.tostring())
|
||||
xmlfile = '../data/good_maps/building_symbolizer.xml'
|
||||
if datasources_available(xmlfile):
|
||||
im, im2 = get_paired_images(100, 100, xmlfile)
|
||||
eq_(im.tostring(), im2.tostring())
|
||||
|
||||
im,im2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
|
||||
eq_(im.tostring(),im2.tostring())
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
xmlfile = '../data/good_maps/polygon_symbolizer.xml'
|
||||
if datasources_available(xmlfile):
|
||||
im, im2 = get_paired_images(100, 100, xmlfile)
|
||||
eq_(im.tostring(), im2.tostring())
|
||||
|
||||
def test_render_points():
|
||||
if not mapnik.has_cairo(): return
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
import tempfile
|
||||
|
||||
import os, sys, glob, mapnik
|
||||
from xml.etree import ElementTree
|
||||
|
||||
def setup():
|
||||
# All of the paths used are relative, if we run the tests
|
||||
|
@ -13,19 +12,10 @@ def setup():
|
|||
os.chdir(execution_path('.'))
|
||||
|
||||
def compare_map(xmlfile):
|
||||
have_inputs = True
|
||||
missing_plugins = set()
|
||||
e = ElementTree.parse(xmlfile)
|
||||
data_source_type_params = e.findall(".//Layer/Datasource/Parameter[@name=\"type\"]")
|
||||
if data_source_type_params is not None and len(data_source_type_params) > 0:
|
||||
for p in data_source_type_params:
|
||||
dstype = p.text
|
||||
if dstype not in mapnik.DatasourceCache.plugin_names():
|
||||
have_inputs = False
|
||||
missing_plugins.add(dstype)
|
||||
|
||||
have_inputs = datasources_available(xmlfile, missing_plugins)
|
||||
if not have_inputs:
|
||||
print 'Notice: skipping map comparison for %s due to missing input plugins: %s' % (os.path.basename(xmlfile), list(missing_plugins))
|
||||
print 'Notice: skipping map comparison for %s due to unavailable input plugins: %s' % (os.path.basename(xmlfile), list(missing_plugins))
|
||||
return False
|
||||
|
||||
m = mapnik.Map(256, 256)
|
||||
|
@ -40,10 +30,10 @@ def compare_map(xmlfile):
|
|||
mapnik.save_map(m, test_map)
|
||||
new_map = mapnik.Map(256, 256)
|
||||
mapnik.load_map(new_map, test_map, False, absolute_base)
|
||||
open(test_map2,'w').write(mapnik.save_map_to_string(new_map))
|
||||
diff = ' diff %s %s' % (os.path.abspath(test_map),os.path.abspath(test_map2))
|
||||
open(test_map2, 'w').write(mapnik.save_map_to_string(new_map))
|
||||
diff = ' diff %s %s' % (os.path.abspath(test_map), os.path.abspath(test_map2))
|
||||
try:
|
||||
eq_(open(test_map).read(),open(test_map2).read())
|
||||
eq_(open(test_map).read(), open(test_map2).read())
|
||||
except AssertionError, e:
|
||||
raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (xmlfile, diff))
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
|
||||
|
||||
import os, sys, inspect, traceback
|
||||
from xml.etree import ElementTree
|
||||
import mapnik
|
||||
|
||||
def execution_path(filename):
|
||||
|
@ -86,3 +87,33 @@ def side_by_side_image(left_im, right_im):
|
|||
im.blend(0, 0, left_im, 1.0)
|
||||
im.blend(left_im.width() + 1, 0, right_im, 1.0)
|
||||
return im
|
||||
|
||||
def datasources_available(map_file, missing_datasources=None):
|
||||
'''
|
||||
datasources_available
|
||||
|
||||
Determine whether the map file contains only available data source types.
|
||||
|
||||
@param map_file: path of XML map file
|
||||
@type map_file: string
|
||||
|
||||
@param missing_datasources: set of data source type names. if there
|
||||
are unavailable data sources, and a collection
|
||||
reference is provided, it will be populated with
|
||||
the names of the unavailable data sources
|
||||
@type missing_datasources: set
|
||||
|
||||
@return: True if all referenced data source types are available,
|
||||
otherwise False
|
||||
'''
|
||||
have_inputs = True
|
||||
map_xml = ElementTree.parse(map_file)
|
||||
data_source_type_params = map_xml.findall(".//Datasource/Parameter[@name=\"type\"]")
|
||||
if data_source_type_params is not None and len(data_source_type_params) > 0:
|
||||
for p in data_source_type_params:
|
||||
dstype = p.text
|
||||
if dstype not in mapnik.DatasourceCache.plugin_names():
|
||||
have_inputs = False
|
||||
if missing_datasources is not None:
|
||||
missing_datasources.add(dstype)
|
||||
return have_inputs
|
||||
|
|
Loading…
Reference in a new issue