2009-05-01 01:24:12 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from nose.tools import *
|
2013-06-03 02:28:24 +00:00
|
|
|
from utilities import execution_path, run_all
|
2011-11-23 11:33:58 +00:00
|
|
|
import os, mapnik
|
2009-05-01 01:24:12 +00:00
|
|
|
|
|
|
|
def setup():
|
|
|
|
# All of the paths used are relative, if we run the tests
|
|
|
|
# from another directory we need to chdir()
|
|
|
|
os.chdir(execution_path('.'))
|
2011-11-12 01:03:56 +00:00
|
|
|
|
|
|
|
def test_that_datasources_exist():
|
2012-09-05 11:53:37 +00:00
|
|
|
if len(mapnik.DatasourceCache.plugin_names()) == 0:
|
2011-11-12 01:03:56 +00:00
|
|
|
print '***NOTICE*** - no datasource plugins have been loaded'
|
2012-02-24 21:13:56 +00:00
|
|
|
|
2009-05-01 01:24:12 +00:00
|
|
|
def test_field_listing():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
fields = ds.fields()
|
2011-10-29 00:06:23 +00:00
|
|
|
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
2012-04-08 00:20:56 +00:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
|
|
|
|
eq_(desc['name'],'shape')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2009-05-01 01:42:58 +00:00
|
|
|
|
2010-11-14 08:56:42 +00:00
|
|
|
def test_total_feature_count_shp():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 10)
|
2009-05-01 01:42:58 +00:00
|
|
|
|
2010-11-14 08:56:42 +00:00
|
|
|
def test_total_feature_count_json():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2013-06-13 20:34:34 +00:00
|
|
|
ds = mapnik.Ogr(file='../data/json/points.geojson',layer_by_index=0)
|
2012-04-08 00:20:56 +00:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
|
|
|
|
eq_(desc['name'],'ogr')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2012-01-12 04:03:47 +00:00
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2010-11-14 09:32:55 +00:00
|
|
|
|
2012-01-12 04:03:47 +00:00
|
|
|
def test_sqlite_reading():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'sqlite' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0)
|
2012-04-08 00:20:56 +00:00
|
|
|
desc = ds.describe()
|
|
|
|
eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
|
|
|
|
eq_(desc['name'],'sqlite')
|
|
|
|
eq_(desc['type'],mapnik.DataType.Vector)
|
|
|
|
eq_(desc['encoding'],'utf-8')
|
2012-01-12 04:03:47 +00:00
|
|
|
features = ds.all_features()
|
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 245)
|
|
|
|
|
2010-11-14 09:32:55 +00:00
|
|
|
def test_reading_json_from_string():
|
2013-06-13 20:34:34 +00:00
|
|
|
json = open('../data/json/points.geojson','r').read()
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Ogr(file=json,layer_by_index=0)
|
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2012-02-24 21:13:56 +00:00
|
|
|
|
2009-05-01 01:42:58 +00:00
|
|
|
def test_feature_envelope():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
for feat in features:
|
|
|
|
env = feat.envelope()
|
2012-01-12 04:03:47 +00:00
|
|
|
contains = ds.envelope().contains(env)
|
2011-10-29 00:06:23 +00:00
|
|
|
eq_(contains, True)
|
2012-01-12 04:03:47 +00:00
|
|
|
intersects = ds.envelope().contains(env)
|
2011-10-29 00:06:23 +00:00
|
|
|
eq_(intersects, True)
|
2009-05-01 01:42:58 +00:00
|
|
|
|
|
|
|
def test_feature_attributes():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'shape' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
feat = features[0]
|
|
|
|
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
|
|
|
|
eq_(feat.attributes, attrs)
|
2012-01-12 04:03:47 +00:00
|
|
|
eq_(ds.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
|
|
|
|
eq_(ds.field_types(),['float','int','str'])
|
2010-12-10 20:11:09 +00:00
|
|
|
|
2011-10-24 09:53:33 +00:00
|
|
|
def test_ogr_layer_by_sql():
|
2012-09-05 11:53:37 +00:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.plugin_names():
|
2012-01-12 04:03:47 +00:00
|
|
|
ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
|
|
|
features = ds.all_features()
|
2011-10-29 00:06:23 +00:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 1)
|
2011-10-24 09:53:33 +00:00
|
|
|
|
2010-12-10 20:11:09 +00:00
|
|
|
def test_hit_grid():
|
|
|
|
import os
|
|
|
|
from itertools import groupby
|
|
|
|
|
|
|
|
def rle_encode(l):
|
|
|
|
""" encode a list of strings with run-length compression """
|
|
|
|
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
|
|
|
|
2011-11-23 11:33:58 +00:00
|
|
|
m = mapnik.Map(256,256);
|
2011-10-29 00:06:23 +00:00
|
|
|
try:
|
2011-11-23 11:33:58 +00:00
|
|
|
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
2011-10-29 00:06:23 +00:00
|
|
|
m.zoom_all()
|
|
|
|
join_field = 'NAME'
|
|
|
|
fg = [] # feature grid
|
|
|
|
for y in range(0, 256, 4):
|
|
|
|
for x in range(0, 256, 4):
|
|
|
|
featureset = m.query_map_point(0,x,y)
|
|
|
|
added = False
|
|
|
|
for feature in featureset.features:
|
|
|
|
fg.append(feature[join_field])
|
|
|
|
added = True
|
|
|
|
if not added:
|
|
|
|
fg.append('')
|
|
|
|
hit_list = '|'.join(rle_encode(fg))
|
|
|
|
eq_(hit_list[:16],'730:|2:Greenland')
|
|
|
|
eq_(hit_list[-12:],'1:Chile|812:')
|
|
|
|
except RuntimeError, e:
|
|
|
|
# only test datasources that we have installed
|
|
|
|
if not 'Could not create datasource' in str(e):
|
|
|
|
raise RuntimeError(str(e))
|
2012-02-24 21:13:56 +00:00
|
|
|
|
2010-12-10 20:11:09 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2011-08-30 22:51:42 +00:00
|
|
|
setup()
|
2014-07-14 16:34:20 +00:00
|
|
|
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
|