Merge pull request #3478 from estadtherr/bypass_unconfigured_plugins_2.3.x
Bypass unavailable plugins in Python tests (2.3.x)
This commit is contained in:
commit
81eff1025d
8 changed files with 100 additions and 82 deletions
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
import os, mapnik
|
||||
|
||||
def setup():
|
||||
|
@ -126,9 +126,10 @@ def test_hit_grid():
|
|||
""" encode a list of strings with run-length compression """
|
||||
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
||||
|
||||
m = mapnik.Map(256,256);
|
||||
try:
|
||||
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
||||
xmlfile = '../data/good_maps/agg_poly_gamma_map.xml'
|
||||
if datasources_available(xmlfile):
|
||||
m = mapnik.Map(256,256);
|
||||
mapnik.load_map(m, xmlfile);
|
||||
m.zoom_all()
|
||||
join_field = 'NAME'
|
||||
fg = [] # feature grid
|
||||
|
@ -144,10 +145,6 @@ def test_hit_grid():
|
|||
hit_list = '|'.join(rle_encode(fg))
|
||||
eq_(hit_list[:16],'730:|2:Greenland')
|
||||
eq_(hit_list[-12:],'1:Chile|812:')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -28,15 +28,15 @@ def test_adding_datasource_to_layer():
|
|||
|
||||
</Map>
|
||||
'''
|
||||
m = mapnik.Map(256, 256)
|
||||
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
||||
m = mapnik.Map(256, 256)
|
||||
|
||||
try:
|
||||
mapnik.load_map_from_string(m, map_string)
|
||||
|
||||
# validate it loaded fine
|
||||
eq_(m.layers[0].styles[0],'world_borders_style')
|
||||
eq_(m.layers[0].styles[1],'point_style')
|
||||
eq_(len(m.layers),1)
|
||||
eq_(m.layers[0].styles[0], 'world_borders_style')
|
||||
eq_(m.layers[0].styles[1], 'point_style')
|
||||
eq_(len(m.layers), 1)
|
||||
|
||||
# also assign a variable reference to that layer
|
||||
# below we will test that this variable references
|
||||
|
@ -44,31 +44,27 @@ def test_adding_datasource_to_layer():
|
|||
lyr = m.layers[0]
|
||||
|
||||
# ensure that there was no datasource for the layer...
|
||||
eq_(m.layers[0].datasource,None)
|
||||
eq_(lyr.datasource,None)
|
||||
eq_(m.layers[0].datasource, None)
|
||||
eq_(lyr.datasource, None)
|
||||
|
||||
# also note that since the srs was black it defaulted to wgs84
|
||||
eq_(m.layers[0].srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||
eq_(lyr.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||
eq_(m.layers[0].srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||
eq_(lyr.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
|
||||
|
||||
# now add a datasource one...
|
||||
ds = mapnik.Shapefile(file='../data/shp/world_merc.shp')
|
||||
m.layers[0].datasource = ds
|
||||
|
||||
# now ensure it is attached
|
||||
eq_(m.layers[0].datasource.describe()['name'],"shape")
|
||||
eq_(lyr.datasource.describe()['name'],"shape")
|
||||
eq_(m.layers[0].datasource.describe()['name'], "shape")
|
||||
eq_(lyr.datasource.describe()['name'], "shape")
|
||||
|
||||
# and since we have now added a shapefile in spherical mercator, adjust the projection
|
||||
lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
|
||||
|
||||
# test that assignment
|
||||
eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
eq_(m.layers[0].srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
eq_(lyr.srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
|
||||
import os, sys, glob, mapnik
|
||||
|
||||
|
@ -38,21 +38,28 @@ def test_broken_files():
|
|||
mapnik.logger.set_severity(default_logging_severity)
|
||||
|
||||
def test_good_files():
|
||||
good_files = glob.glob("../data/good_maps/*.xml")
|
||||
good_files.extend(glob.glob("../visual_tests/styles/*.xml"))
|
||||
all_files = glob.glob("../data/good_maps/*.xml")
|
||||
all_files.extend(glob.glob("../visual_tests/styles/*.xml"))
|
||||
|
||||
good_files = list()
|
||||
for xmlfile in all_files:
|
||||
missing_plugins = set()
|
||||
have_inputs = datasources_available(xmlfile, missing_plugins)
|
||||
if have_inputs:
|
||||
good_files.append(xmlfile)
|
||||
else:
|
||||
print 'Notice: skipping load_map_test for %s due to unavailable input plugins: %s' % (os.path.basename(xmlfile), list(missing_plugins))
|
||||
|
||||
failures = [];
|
||||
strict = False
|
||||
for filename in good_files:
|
||||
try:
|
||||
m = mapnik.Map(512, 512)
|
||||
strict = False
|
||||
mapnik.load_map(m, filename, strict)
|
||||
base_path = os.path.dirname(filename)
|
||||
mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path)
|
||||
mapnik.load_map_from_string(m, open(filename, 'rb').read(), strict, base_path)
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
failures.append('Failed to load valid map (%s)!' % filename)
|
||||
failures.append('Failed to load valid map %s (%s)!' % (filename, str(e)))
|
||||
eq_(len(failures),0,'\n'+'\n'.join(failures))
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -236,7 +236,6 @@ def test_markers_symbolizer():
|
|||
eq_(p.clip,True)
|
||||
eq_(p.comp_op,mapnik.CompositeOp.src_over)
|
||||
|
||||
|
||||
p.width = mapnik.Expression('12')
|
||||
p.height = mapnik.Expression('12')
|
||||
eq_(str(p.width),'12')
|
||||
|
@ -378,9 +377,9 @@ def test_map_init_from_string():
|
|||
</Layer>
|
||||
</Map>'''
|
||||
|
||||
m = mapnik.Map(600, 300)
|
||||
eq_(m.base, '')
|
||||
try:
|
||||
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
||||
m = mapnik.Map(600, 300)
|
||||
eq_(m.base, '')
|
||||
mapnik.load_map_from_string(m, map_string)
|
||||
eq_(m.base, './')
|
||||
mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
||||
|
@ -395,10 +394,6 @@ def test_map_init_from_string():
|
|||
m.base = 'foo'
|
||||
mapnik.load_map_from_string(m, map_string, True, ".")
|
||||
eq_(m.base, '.')
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
# Color initialization
|
||||
@raises(Exception) # Boost.Python.ArgumentError
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all, contains_word, get_unique_colors
|
||||
from utilities import execution_path, run_all, contains_word, get_unique_colors,\
|
||||
datasources_available
|
||||
|
||||
import os, mapnik
|
||||
|
||||
|
@ -89,27 +90,22 @@ def test_dataraster_query_point():
|
|||
assert len(features) == 0
|
||||
|
||||
def test_load_save_map():
|
||||
map = mapnik.Map(256,256)
|
||||
m = mapnik.Map(256,256)
|
||||
in_map = "../visual_tests/styles/raster_symbolizer.xml"
|
||||
try:
|
||||
mapnik.load_map(map, in_map)
|
||||
|
||||
out_map = mapnik.save_map_to_string(map)
|
||||
if datasources_available(in_map):
|
||||
mapnik.load_map(m, in_map)
|
||||
out_map = mapnik.save_map_to_string(m)
|
||||
assert 'RasterSymbolizer' in out_map
|
||||
assert 'RasterColorizer' in out_map
|
||||
assert 'stop' in out_map
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
|
||||
def test_raster_with_alpha_blends_correctly_with_background():
|
||||
WIDTH = 500
|
||||
HEIGHT = 500
|
||||
|
||||
map = mapnik.Map(WIDTH, HEIGHT)
|
||||
m = mapnik.Map(WIDTH, HEIGHT)
|
||||
WHITE = mapnik.Color(255, 255, 255)
|
||||
map.background = WHITE
|
||||
m.background = WHITE
|
||||
|
||||
style = mapnik.Style()
|
||||
rule = mapnik.Rule()
|
||||
|
@ -119,20 +115,20 @@ def test_raster_with_alpha_blends_correctly_with_background():
|
|||
rule.symbols.append(symbolizer)
|
||||
style.rules.append(rule)
|
||||
|
||||
map.append_style('raster_style', style)
|
||||
m.append_style('raster_style', style)
|
||||
|
||||
map_layer = mapnik.Layer('test_layer')
|
||||
filepath = '../data/raster/white-alpha.png'
|
||||
if 'gdal' in mapnik.DatasourceCache.plugin_names():
|
||||
map_layer.datasource = mapnik.Gdal(file=filepath)
|
||||
map_layer.styles.append('raster_style')
|
||||
map.layers.append(map_layer)
|
||||
m.layers.append(map_layer)
|
||||
|
||||
map.zoom_all()
|
||||
m.zoom_all()
|
||||
|
||||
mim = mapnik.Image(WIDTH, HEIGHT)
|
||||
|
||||
mapnik.render(map, mim)
|
||||
mapnik.render(m, mim)
|
||||
imdata = mim.tostring()
|
||||
# All white is expected
|
||||
eq_(get_unique_colors(mim),['rgba(254,254,254,255)'])
|
||||
|
|
|
@ -4,8 +4,7 @@
|
|||
from nose.tools import *
|
||||
import tempfile
|
||||
import os, mapnik
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
|
||||
def setup():
|
||||
# All of the paths used are relative, if we run the tests
|
||||
|
@ -105,16 +104,15 @@ def get_paired_images(w,h,mapfile):
|
|||
return im,im2
|
||||
|
||||
def test_render_from_serialization():
|
||||
try:
|
||||
im,im2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
|
||||
xmlfile = '../data/good_maps/building_symbolizer.xml'
|
||||
if datasources_available(xmlfile):
|
||||
im, im2 = get_paired_images(100, 100, xmlfile)
|
||||
eq_(im.tostring('png32'),im2.tostring('png32'))
|
||||
|
||||
im,im2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
|
||||
xmlfile = '../data/good_maps/polygon_symbolizer.xml'
|
||||
if datasources_available(xmlfile):
|
||||
im, im2 = get_paired_images(100, 100, xmlfile)
|
||||
eq_(im.tostring('png32'),im2.tostring('png32'))
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(e)
|
||||
|
||||
def test_render_points():
|
||||
if not mapnik.has_cairo(): return
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from nose.tools import *
|
||||
from utilities import execution_path, run_all
|
||||
from utilities import execution_path, run_all, datasources_available
|
||||
import tempfile
|
||||
|
||||
import os, sys, glob, mapnik
|
||||
|
@ -19,16 +19,16 @@ def setup():
|
|||
def teardown():
|
||||
mapnik.logger.set_severity(default_logging_severity)
|
||||
|
||||
def compare_map(xml):
|
||||
def compare_map(xmlfile):
|
||||
missing_plugins = set()
|
||||
have_inputs = datasources_available(xmlfile, missing_plugins)
|
||||
if not have_inputs:
|
||||
print 'Notice: skipping saved map comparison for %s due to unavailable input plugins: %s' % (os.path.basename(xmlfile), list(missing_plugins))
|
||||
return False
|
||||
|
||||
m = mapnik.Map(256, 256)
|
||||
absolute_base = os.path.abspath(os.path.dirname(xml))
|
||||
try:
|
||||
mapnik.load_map(m, xml, True, absolute_base)
|
||||
except RuntimeError, e:
|
||||
# only test datasources that we have installed
|
||||
if not 'Could not create datasource' in str(e):
|
||||
raise RuntimeError(str(e))
|
||||
return
|
||||
absolute_base = os.path.abspath(os.path.dirname(xmlfile))
|
||||
mapnik.load_map(m, xmlfile, True, absolute_base)
|
||||
(handle, test_map) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map1-')
|
||||
os.close(handle)
|
||||
(handle, test_map2) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map2-')
|
||||
|
@ -38,12 +38,12 @@ def compare_map(xml):
|
|||
mapnik.save_map(m, test_map)
|
||||
new_map = mapnik.Map(256, 256)
|
||||
mapnik.load_map(new_map, test_map, True, absolute_base)
|
||||
open(test_map2,'w').write(mapnik.save_map_to_string(new_map))
|
||||
open(test_map2, 'w').write(mapnik.save_map_to_string(new_map))
|
||||
diff = ' diff %s %s' % (os.path.abspath(test_map),os.path.abspath(test_map2))
|
||||
try:
|
||||
eq_(open(test_map).read(),open(test_map2).read())
|
||||
eq_(open(test_map).read(), open(test_map2).read())
|
||||
except AssertionError, e:
|
||||
raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (xml,diff))
|
||||
raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (xmlfile, diff))
|
||||
|
||||
if os.path.exists(test_map):
|
||||
os.remove(test_map)
|
||||
|
@ -53,8 +53,6 @@ def compare_map(xml):
|
|||
|
||||
def test_compare_map():
|
||||
good_maps = glob.glob("../data/good_maps/*.xml")
|
||||
# remove one map that round trips CDATA differently, but this is okay
|
||||
good_maps.remove('../data/good_maps/empty_parameter2.xml')
|
||||
for m in good_maps:
|
||||
compare_map(m)
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ from nose.tools import assert_almost_equal
|
|||
|
||||
import os, sys, inspect, traceback
|
||||
import mapnik
|
||||
from xml.etree import ElementTree
|
||||
|
||||
def execution_path(filename):
|
||||
return os.path.join(os.path.dirname(sys._getframe(1).f_code.co_filename), filename)
|
||||
|
@ -55,7 +56,7 @@ def get_unique_colors(im):
|
|||
for y in range(im.height()):
|
||||
pixel = im.get_pixel(x,y)
|
||||
if pixel not in pixels:
|
||||
pixels.append(pixel)
|
||||
pixels.append(pixel)
|
||||
pixels = sorted(pixels)
|
||||
return map(pixel2rgba,pixels)
|
||||
|
||||
|
@ -94,3 +95,33 @@ def assert_box2d_almost_equal(a, b, msg=None):
|
|||
assert_almost_equal(a.maxx, b.maxx, msg=msg)
|
||||
assert_almost_equal(a.miny, b.miny, msg=msg)
|
||||
assert_almost_equal(a.maxy, b.maxy, msg=msg)
|
||||
|
||||
def datasources_available(map_file, missing_datasources=None):
|
||||
'''
|
||||
datasources_available
|
||||
|
||||
Determine whether the map file contains only available data source types.
|
||||
|
||||
@param map_file: path of XML map file
|
||||
@type map_file: string
|
||||
|
||||
@param missing_datasources: collection of data source type names. if there
|
||||
are unavailable data sources, and a collection
|
||||
reference is provided, it will be populated with
|
||||
the names of the unavailable data sources
|
||||
@type missing_datasources: collection reference
|
||||
|
||||
@return: True if all referenced data source types are available,
|
||||
otherwise False
|
||||
'''
|
||||
have_inputs = True
|
||||
map_xml = ElementTree.parse(map_file)
|
||||
data_source_type_params = map_xml.findall(".//Datasource/Parameter[@name=\"type\"]")
|
||||
if data_source_type_params is not None and len(data_source_type_params) > 0:
|
||||
for p in data_source_type_params:
|
||||
dstype = p.text
|
||||
if dstype not in mapnik.DatasourceCache.plugin_names():
|
||||
have_inputs = False
|
||||
if missing_datasources is not None:
|
||||
missing_datasources.add(dstype)
|
||||
return have_inputs
|
||||
|
|
Loading…
Reference in a new issue