2009-05-01 03:24:12 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from nose.tools import *
|
2013-06-03 04:28:24 +02:00
|
|
|
from utilities import execution_path, run_all
|
2011-11-23 12:33:58 +01:00
|
|
|
import os, mapnik
|
2009-05-01 03:24:12 +02:00
|
|
|
|
|
|
|
def setup():
|
|
|
|
# All of the paths used are relative, if we run the tests
|
|
|
|
# from another directory we need to chdir()
|
|
|
|
os.chdir(execution_path('.'))
|
2011-11-12 02:03:56 +01:00
|
|
|
|
|
|
|
def test_that_datasources_exist():
|
2012-09-05 13:53:37 +02:00
|
|
|
if len(mapnik.DatasourceCache.plugin_names()) == 0:
|
2011-11-12 02:03:56 +01:00
|
|
|
print '***NOTICE*** - no datasource plugins have been loaded'
|
2012-02-24 22:13:56 +01:00
|
|
|
|
2014-10-31 04:32:56 +01:00
|
|
|
# adapted from raster_symboliser_test#test_dataraster_query_point
|
2015-01-25 03:48:15 +01:00
|
|
|
@raises(RuntimeError)
|
2014-10-31 04:32:56 +01:00
|
|
|
def test_vrt_referring_to_missing_files():
|
|
|
|
srs = '+init=epsg:32630'
|
|
|
|
if 'gdal' in mapnik.DatasourceCache.plugin_names():
|
|
|
|
lyr = mapnik.Layer('dataraster')
|
|
|
|
lyr.datasource = mapnik.Gdal(
|
|
|
|
file = '../data/raster/missing_raster.vrt',
|
|
|
|
band = 1,
|
|
|
|
)
|
|
|
|
lyr.srs = srs
|
|
|
|
_map = mapnik.Map(256, 256, srs)
|
|
|
|
_map.layers.append(lyr)
|
|
|
|
|
|
|
|
# center of extent of raster
|
|
|
|
x, y = 556113.0,4381428.0 # center of extent of raster
|
|
|
|
|
|
|
|
_map.zoom_all()
|
|
|
|
|
2015-01-25 03:48:15 +01:00
|
|
|
# Fancy stuff to supress output of error
|
|
|
|
# open 2 fds
|
|
|
|
null_fds = [os.open(os.devnull, os.O_RDWR) for x in xrange(2)]
|
|
|
|
# save the current file descriptors to a tuple
|
|
|
|
save = os.dup(1), os.dup(2)
|
|
|
|
# put /dev/null fds on 1 and 2
|
|
|
|
os.dup2(null_fds[0], 1)
|
|
|
|
os.dup2(null_fds[1], 2)
|
|
|
|
|
|
|
|
# *** run the function ***
|
2014-10-31 04:32:56 +01:00
|
|
|
try:
|
2015-01-25 03:48:15 +01:00
|
|
|
# Should RuntimeError here
|
2014-10-31 04:32:56 +01:00
|
|
|
_map.query_point(0, x, y).features
|
2015-01-25 03:48:15 +01:00
|
|
|
finally:
|
|
|
|
# restore file descriptors so I can print the results
|
|
|
|
os.dup2(save[0], 1)
|
|
|
|
os.dup2(save[1], 2)
|
|
|
|
# close the temporary fds
|
|
|
|
os.close(null_fds[0])
|
|
|
|
os.close(null_fds[1])
|
2014-10-31 04:32:56 +01:00
|
|
|
|
|
|
|
|
2009-05-01 03:24:12 +02:00
|
|
|
def test_field_listing():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
fields = ds.fields()
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
2012-04-08 02:20:56 +02:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
|
|
|
|
eq_(desc['name'],'shape')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2009-05-01 03:42:58 +02:00
|
|
|
|
2010-11-14 09:56:42 +01:00
|
|
|
def test_total_feature_count_shp():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 10)
|
2009-05-01 03:42:58 +02:00
|
|
|
|
2010-11-14 09:56:42 +01:00
|
|
|
def test_total_feature_count_json():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2013-06-13 22:34:34 +02:00
|
|
|
ds = mapnik.Ogr(file='../data/json/points.geojson',layer_by_index=0)
|
2012-04-08 02:20:56 +02:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
|
|
|
|
eq_(desc['name'],'ogr')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2012-01-12 05:03:47 +01:00
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2010-11-14 10:32:55 +01:00
|
|
|
|
2012-01-12 05:03:47 +01:00
|
|
|
def test_sqlite_reading():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'sqlite' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0)
|
2012-04-08 02:20:56 +02:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
|
|
|
|
eq_(desc['name'],'sqlite')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2012-01-12 05:03:47 +01:00
|
|
|
features = ds.all_features()
|
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 245)
|
|
|
|
|
2010-11-14 10:32:55 +01:00
|
|
|
def test_reading_json_from_string():
|
2013-06-13 22:34:34 +02:00
|
|
|
json = open('../data/json/points.geojson','r').read()
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Ogr(file=json,layer_by_index=0)
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2012-02-24 22:13:56 +01:00
|
|
|
|
2009-05-01 03:42:58 +02:00
|
|
|
def test_feature_envelope():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
for feat in features:
|
|
|
|
env = feat.envelope()
|
2012-01-12 05:03:47 +01:00
|
|
|
contains = ds.envelope().contains(env)
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(contains, True)
|
2012-01-12 05:03:47 +01:00
|
|
|
intersects = ds.envelope().contains(env)
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(intersects, True)
|
2009-05-01 03:42:58 +02:00
|
|
|
|
|
|
|
def test_feature_attributes():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
feat = features[0]
|
|
|
|
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
|
|
|
|
eq_(feat.attributes, attrs)
|
2012-01-12 05:03:47 +01:00
|
|
|
eq_(ds.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
|
|
|
|
eq_(ds.field_types(),['float','int','str'])
|
2010-12-10 21:11:09 +01:00
|
|
|
|
2011-10-24 11:53:33 +02:00
|
|
|
def test_ogr_layer_by_sql():
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 1)
|
2011-10-24 11:53:33 +02:00
|
|
|
|
2010-12-10 21:11:09 +01:00
|
|
|
def test_hit_grid():
|
|
|
|
import os
|
|
|
|
from itertools import groupby
|
|
|
|
|
|
|
|
def rle_encode(l):
|
|
|
|
""" encode a list of strings with run-length compression """
|
|
|
|
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
m = mapnik.Map(256,256);
|
2011-10-29 02:06:23 +02:00
|
|
|
try:
|
2011-11-23 12:33:58 +01:00
|
|
|
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
2011-10-29 02:06:23 +02:00
|
|
|
m.zoom_all()
|
|
|
|
join_field = 'NAME'
|
|
|
|
fg = [] # feature grid
|
|
|
|
for y in range(0, 256, 4):
|
|
|
|
for x in range(0, 256, 4):
|
|
|
|
featureset = m.query_map_point(0,x,y)
|
|
|
|
added = False
|
|
|
|
for feature in featureset.features:
|
|
|
|
fg.append(feature[join_field])
|
|
|
|
added = True
|
|
|
|
if not added:
|
|
|
|
fg.append('')
|
|
|
|
hit_list = '|'.join(rle_encode(fg))
|
|
|
|
eq_(hit_list[:16],'730:|2:Greenland')
|
|
|
|
eq_(hit_list[-12:],'1:Chile|812:')
|
|
|
|
except RuntimeError, e:
|
|
|
|
# only test datasources that we have installed
|
|
|
|
if not 'Could not create datasource' in str(e):
|
|
|
|
raise RuntimeError(str(e))
|
2012-02-24 22:13:56 +01:00
|
|
|
|
2010-12-10 21:11:09 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2011-08-31 00:51:42 +02:00
|
|
|
setup()
|
2014-07-14 18:34:20 +02:00
|
|
|
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
|