2011-11-14 01:42:37 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from nose.tools import *
|
2013-07-13 01:45:51 +02:00
|
|
|
from utilities import execution_path, run_all
|
2011-11-14 01:42:37 +01:00
|
|
|
from Queue import Queue
|
|
|
|
import threading
|
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
import os, mapnik
|
2011-11-14 01:42:37 +01:00
|
|
|
import sqlite3
|
|
|
|
|
|
|
|
def setup():
|
|
|
|
# All of the paths used are relative, if we run the tests
|
|
|
|
# from another directory we need to chdir()
|
|
|
|
os.chdir(execution_path('.'))
|
|
|
|
|
|
|
|
NUM_THREADS = 10
|
2011-11-14 02:00:34 +01:00
|
|
|
TOTAL = 245
|
2011-11-14 01:42:37 +01:00
|
|
|
DB = '../data/sqlite/world.sqlite'
|
2011-11-19 20:39:59 +01:00
|
|
|
TABLE= 'world_merc'
|
2011-11-14 01:42:37 +01:00
|
|
|
|
|
|
|
def create_ds():
|
2011-11-23 12:33:58 +01:00
|
|
|
ds = mapnik.SQLite(file=DB,table=TABLE)
|
2011-11-14 01:42:37 +01:00
|
|
|
fs = ds.all_features()
|
|
|
|
|
2012-09-05 13:53:37 +02:00
|
|
|
if 'sqlite' in mapnik.DatasourceCache.plugin_names():
|
2012-02-24 22:13:56 +01:00
|
|
|
|
2011-11-14 01:42:37 +01:00
|
|
|
def test_rtree_creation():
|
|
|
|
|
|
|
|
index = DB +'.index'
|
|
|
|
if os.path.exists(index):
|
|
|
|
os.unlink(index)
|
|
|
|
|
|
|
|
threads = []
|
|
|
|
for i in range(NUM_THREADS):
|
|
|
|
t = threading.Thread(target=create_ds)
|
|
|
|
t.start()
|
|
|
|
threads.append(t)
|
|
|
|
|
|
|
|
for i in threads:
|
|
|
|
i.join()
|
2012-02-24 22:13:56 +01:00
|
|
|
|
2011-11-14 02:00:34 +01:00
|
|
|
eq_(os.path.exists(index),True)
|
|
|
|
conn = sqlite3.connect(index)
|
|
|
|
cur = conn.cursor()
|
2011-12-01 04:15:11 +01:00
|
|
|
try:
|
|
|
|
cur.execute("Select count(*) from idx_%s_GEOMETRY" % TABLE.replace("'",""))
|
|
|
|
conn.commit()
|
|
|
|
eq_(cur.fetchone()[0],TOTAL)
|
|
|
|
except sqlite3.OperationalError:
|
2012-09-05 13:53:37 +02:00
|
|
|
# don't worry about testing # of index records if
|
2011-12-01 04:15:11 +01:00
|
|
|
# python's sqlite module does not support rtree
|
|
|
|
pass
|
2011-11-14 02:00:34 +01:00
|
|
|
cur.close()
|
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
ds = mapnik.SQLite(file=DB,table=TABLE)
|
2011-11-14 02:00:34 +01:00
|
|
|
fs = ds.all_features()
|
|
|
|
eq_(len(fs),TOTAL)
|
|
|
|
os.unlink(index)
|
2011-11-23 12:33:58 +01:00
|
|
|
ds = mapnik.SQLite(file=DB,table=TABLE,use_spatial_index=False)
|
2011-11-14 02:00:34 +01:00
|
|
|
fs = ds.all_features()
|
|
|
|
eq_(len(fs),TOTAL)
|
|
|
|
eq_(os.path.exists(index),False)
|
2011-11-14 01:42:37 +01:00
|
|
|
|
2011-11-23 12:33:58 +01:00
|
|
|
ds = mapnik.SQLite(file=DB,table=TABLE,use_spatial_index=True)
|
2011-11-19 20:39:59 +01:00
|
|
|
fs = ds.all_features()
|
|
|
|
for feat in fs:
|
2011-11-23 12:33:58 +01:00
|
|
|
query = mapnik.Query(feat.envelope())
|
2011-11-19 20:39:59 +01:00
|
|
|
selected = ds.features(query)
|
|
|
|
eq_(len(selected.features)>=1,True)
|
|
|
|
|
|
|
|
eq_(os.path.exists(index),True)
|
|
|
|
os.unlink(index)
|
|
|
|
|
2012-04-18 21:49:49 +02:00
|
|
|
def test_geometry_round_trip():
|
|
|
|
test_db = '/tmp/mapnik-sqlite-point.db'
|
2012-04-19 01:06:18 +02:00
|
|
|
ogr_metadata = True
|
2012-04-18 21:49:49 +02:00
|
|
|
|
|
|
|
# create test db
|
|
|
|
conn = sqlite3.connect(test_db)
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute('''
|
2012-04-19 01:06:18 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS point_table
|
|
|
|
(id INTEGER PRIMARY KEY AUTOINCREMENT, geometry BLOB, name varchar)
|
2012-04-18 21:49:49 +02:00
|
|
|
''')
|
2012-04-19 01:06:18 +02:00
|
|
|
# optional: but nice if we want to read with ogr
|
|
|
|
if ogr_metadata:
|
|
|
|
cur.execute('''CREATE TABLE IF NOT EXISTS geometry_columns (
|
|
|
|
f_table_name VARCHAR,
|
|
|
|
f_geometry_column VARCHAR,
|
|
|
|
geometry_type INTEGER,
|
|
|
|
coord_dimension INTEGER,
|
|
|
|
srid INTEGER,
|
|
|
|
geometry_format VARCHAR )''')
|
|
|
|
cur.execute('''INSERT INTO geometry_columns
|
|
|
|
(f_table_name, f_geometry_column, geometry_format,
|
|
|
|
geometry_type, coord_dimension, srid) VALUES
|
|
|
|
('point_table','geometry','WKB', 1, 1, 4326)''')
|
2012-04-18 21:49:49 +02:00
|
|
|
conn.commit()
|
|
|
|
cur.close()
|
|
|
|
|
2012-04-19 01:06:18 +02:00
|
|
|
# add a point as wkb (using mapnik) to match how an ogr created db looks
|
|
|
|
x = -122 # longitude
|
|
|
|
y = 48 # latitude
|
|
|
|
wkt = 'POINT(%s %s)' % (x,y)
|
|
|
|
# little endian wkb (mapnik will auto-detect and ready either little or big endian (XDR))
|
|
|
|
wkb = mapnik.Path.from_wkt(wkt).to_wkb(mapnik.wkbByteOrder.NDR)
|
2012-04-18 21:49:49 +02:00
|
|
|
values = (None,sqlite3.Binary(wkb),"test point")
|
2012-04-19 01:06:18 +02:00
|
|
|
cur = conn.cursor()
|
2012-04-18 21:49:49 +02:00
|
|
|
cur.execute('''INSERT into "point_table" (id,geometry,name) values (?,?,?)''',values)
|
|
|
|
conn.commit()
|
|
|
|
cur.close()
|
|
|
|
|
2012-04-19 01:06:18 +02:00
|
|
|
def make_wkb_point(x,y):
|
|
|
|
import struct
|
|
|
|
byteorder = 1; # little endian
|
|
|
|
endianess = ''
|
|
|
|
if byteorder == 1:
|
|
|
|
endianess = '<'
|
|
|
|
else:
|
|
|
|
endianess = '>'
|
|
|
|
geom_type = 1; # for a point
|
|
|
|
return struct.pack('%sbldd' % endianess, byteorder, geom_type, x, y)
|
|
|
|
|
|
|
|
# confirm the wkb matches a manually formed wkb
|
|
|
|
wkb2 = make_wkb_point(x,y)
|
|
|
|
eq_(wkb,wkb2)
|
|
|
|
|
2012-04-18 21:49:49 +02:00
|
|
|
# ensure we can read this data back out properly with mapnik
|
|
|
|
ds = mapnik.Datasource(**{'type':'sqlite','file':test_db, 'table':'point_table'})
|
|
|
|
fs = ds.featureset()
|
|
|
|
feat = fs.next()
|
|
|
|
eq_(feat.id(),1)
|
|
|
|
eq_(feat['name'],'test point')
|
|
|
|
geoms = feat.geometries()
|
|
|
|
eq_(len(geoms),1)
|
2012-12-18 13:13:42 +01:00
|
|
|
eq_(geoms.to_wkt(),'Point(-122 48)')
|
2012-04-18 21:49:49 +02:00
|
|
|
|
|
|
|
# ensure it matches data read with just sqlite
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute('''SELECT * from point_table''')
|
|
|
|
conn.commit()
|
|
|
|
result = cur.fetchone()
|
2012-04-19 01:06:18 +02:00
|
|
|
cur.close()
|
2012-04-18 21:49:49 +02:00
|
|
|
feat_id = result[0]
|
|
|
|
eq_(feat_id,1)
|
|
|
|
name = result[2]
|
|
|
|
eq_(name,'test point')
|
|
|
|
geom_wkb_blob = result[1]
|
2012-04-19 01:06:18 +02:00
|
|
|
eq_(str(geom_wkb_blob),geoms.to_wkb(mapnik.wkbByteOrder.NDR))
|
2012-04-18 21:49:49 +02:00
|
|
|
new_geom = mapnik.Path.from_wkb(str(geom_wkb_blob))
|
|
|
|
eq_(new_geom.to_wkt(),geoms.to_wkt())
|
|
|
|
|
|
|
|
# cleanup
|
|
|
|
os.unlink(test_db)
|
2012-04-19 01:06:18 +02:00
|
|
|
os.unlink(test_db + '.index')
|
2011-11-19 20:39:59 +01:00
|
|
|
|
2011-11-14 01:42:37 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
setup()
|
2013-07-13 01:45:51 +02:00
|
|
|
run_all(eval(x) for x in dir() if x.startswith("test_"))
|