2009-05-01 03:24:12 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from nose.tools import *
|
|
|
|
from utilities import execution_path
|
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
import os, mapnik
|
2009-05-01 03:24:12 +02:00
|
|
|
|
|
|
|
def setup():
|
|
|
|
# All of the paths used are relative, if we run the tests
|
|
|
|
# from another directory we need to chdir()
|
|
|
|
os.chdir(execution_path('.'))
|
2011-11-12 02:03:56 +01:00
|
|
|
|
|
|
|
def test_that_datasources_exist():
|
2011-11-23 12:33:58 +01:00
|
|
|
if len(mapnik.DatasourceCache.instance().plugin_names()) == 0:
|
2011-11-12 02:03:56 +01:00
|
|
|
print '***NOTICE*** - no datasource plugins have been loaded'
|
2009-05-01 03:24:12 +02:00
|
|
|
|
|
|
|
def test_field_listing():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
fields = ds.fields()
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
|
2012-01-17 07:09:46 +01:00
|
|
|
eq_(ds.describe(),{'geometry_type': mapnik.DataGeometryType.Polygon, 'type': mapnik.DataType.Vector, 'name': 'shape', 'encoding': 'utf-8'})
|
2009-05-01 03:42:58 +02:00
|
|
|
|
2010-11-14 09:56:42 +01:00
|
|
|
def test_total_feature_count_shp():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 10)
|
2009-05-01 03:42:58 +02:00
|
|
|
|
2010-11-14 09:56:42 +01:00
|
|
|
def test_total_feature_count_json():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Ogr(file='../data/json/points.json',layer_by_index=0)
|
2012-01-17 07:09:46 +01:00
|
|
|
eq_(ds.describe(),{'geometry_type': mapnik.DataGeometryType.Point, 'type': mapnik.DataType.Vector, 'name': 'ogr', 'encoding': 'utf-8'})
|
2012-01-12 05:03:47 +01:00
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2010-11-14 10:32:55 +01:00
|
|
|
|
2012-01-12 05:03:47 +01:00
|
|
|
def test_sqlite_reading():
|
|
|
|
if 'sqlite' in mapnik.DatasourceCache.instance().plugin_names():
|
|
|
|
ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0)
|
2012-01-17 07:09:46 +01:00
|
|
|
eq_(ds.describe(),{'geometry_type': mapnik.DataGeometryType.Polygon, 'type': mapnik.DataType.Vector, 'name': 'sqlite', 'encoding': 'utf-8'})
|
2012-01-12 05:03:47 +01:00
|
|
|
features = ds.all_features()
|
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 245)
|
|
|
|
|
2010-11-14 10:32:55 +01:00
|
|
|
def test_reading_json_from_string():
|
|
|
|
json = open('../data/json/points.json','r').read()
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Ogr(file=json,layer_by_index=0)
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 5)
|
2010-11-14 09:56:42 +01:00
|
|
|
|
2009-05-01 03:42:58 +02:00
|
|
|
def test_feature_envelope():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
for feat in features:
|
|
|
|
env = feat.envelope()
|
2012-01-12 05:03:47 +01:00
|
|
|
contains = ds.envelope().contains(env)
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(contains, True)
|
2012-01-12 05:03:47 +01:00
|
|
|
intersects = ds.envelope().contains(env)
|
2011-10-29 02:06:23 +02:00
|
|
|
eq_(intersects, True)
|
2009-05-01 03:42:58 +02:00
|
|
|
|
|
|
|
def test_feature_attributes():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
feat = features[0]
|
|
|
|
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
|
|
|
|
eq_(feat.attributes, attrs)
|
2012-01-12 05:03:47 +01:00
|
|
|
eq_(ds.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
|
|
|
|
eq_(ds.field_types(),['float','int','str'])
|
2010-12-10 21:11:09 +01:00
|
|
|
|
2011-10-24 11:53:33 +02:00
|
|
|
def test_ogr_layer_by_sql():
|
2011-11-23 12:33:58 +01:00
|
|
|
if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
|
2012-01-12 05:03:47 +01:00
|
|
|
ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
|
|
|
|
features = ds.all_features()
|
2011-10-29 02:06:23 +02:00
|
|
|
num_feats = len(features)
|
|
|
|
eq_(num_feats, 1)
|
2011-10-24 11:53:33 +02:00
|
|
|
|
2010-12-10 21:11:09 +01:00
|
|
|
def test_hit_grid():
|
|
|
|
import os
|
|
|
|
from itertools import groupby
|
|
|
|
|
|
|
|
def rle_encode(l):
|
|
|
|
""" encode a list of strings with run-length compression """
|
|
|
|
return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
|
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
m = mapnik.Map(256,256);
|
2011-10-29 02:06:23 +02:00
|
|
|
try:
|
2011-11-23 12:33:58 +01:00
|
|
|
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
|
2011-10-29 02:06:23 +02:00
|
|
|
m.zoom_all()
|
|
|
|
join_field = 'NAME'
|
|
|
|
fg = [] # feature grid
|
|
|
|
for y in range(0, 256, 4):
|
|
|
|
for x in range(0, 256, 4):
|
|
|
|
featureset = m.query_map_point(0,x,y)
|
|
|
|
added = False
|
|
|
|
for feature in featureset.features:
|
|
|
|
fg.append(feature[join_field])
|
|
|
|
added = True
|
|
|
|
if not added:
|
|
|
|
fg.append('')
|
|
|
|
hit_list = '|'.join(rle_encode(fg))
|
|
|
|
eq_(hit_list[:16],'730:|2:Greenland')
|
|
|
|
eq_(hit_list[-12:],'1:Chile|812:')
|
|
|
|
except RuntimeError, e:
|
|
|
|
# only test datasources that we have installed
|
|
|
|
if not 'Could not create datasource' in str(e):
|
|
|
|
raise RuntimeError(str(e))
|
|
|
|
|
2010-12-10 21:11:09 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2011-08-31 00:51:42 +02:00
|
|
|
setup()
|
|
|
|
[eval(run)() for run in dir() if 'test_' in run]
|