Merge branch 'master' into mapnik-geometry

This commit is contained in:
artemp 2015-02-23 10:06:43 +01:00
commit 45a75fb876
11 changed files with 162 additions and 100 deletions

View file

@ -80,7 +80,7 @@ pretty_dep_names = {
'cairo':'Cairo C library | configured using pkg-config | try setting PKG_CONFIG_PATH SCons option',
'pycairo':'Python bindings to Cairo library | configured using pkg-config | try setting PKG_CONFIG_PATH SCons option',
'proj':'Proj.4 C Projections library | configure with PROJ_LIBS & PROJ_INCLUDES | more info: http://trac.osgeo.org/proj/',
'pg':'Postgres C Library required for PostGIS plugin | configure with pg_config program | more info: https://github.com/mapnik/mapnik/wiki/PostGIS',
'pg':'Postgres C Library required for PostGIS plugin | configure with pg_config program or configure with PG_LIBS & PG_INCLUDES | more info: https://github.com/mapnik/mapnik/wiki/PostGIS',
'sqlite3':'SQLite3 C Library | configure with SQLITE_LIBS & SQLITE_INCLUDES | more info: https://github.com/mapnik/mapnik/wiki/SQLite',
'jpeg':'JPEG C library | configure with JPEG_LIBS & JPEG_INCLUDES',
'tiff':'TIFF C library | configure with TIFF_LIBS & TIFF_INCLUDES',
@ -93,6 +93,7 @@ pretty_dep_names = {
'm':'Basic math library, part of C++ stlib',
'pkg-config':'pkg-config tool | more info: http://pkg-config.freedesktop.org',
'pg_config':'pg_config program | try setting PG_CONFIG SCons option',
'pq':'libpq library (postgres client) | try setting PG_CONFIG SCons option or configure with PG_LIBS & PG_INCLUDES',
'xml2-config':'xml2-config program | try setting XML2_CONFIG SCons option or avoid the need for xml2-config command by configuring with XML2_LIBS & XML2_INCLUDES',
'libxml2':'libxml2 library | try setting XML2_CONFIG SCons option to point to location of xml2-config program or configure with XML2_LIBS & XML2_INCLUDES',
'gdal-config':'gdal-config program | try setting GDAL_CONFIG SCons option',
@ -142,6 +143,9 @@ def init_environment(env):
env = Environment(ENV=os.environ)
init_environment(env)
def fix_path(path):
return os.path.abspath(path)
def color_print(color,text,newline=True):
# 1 - red
# 2 - green
@ -357,6 +361,8 @@ opts.AddVariables(
BoolVariable('PROJ', 'Build Mapnik with proj4 support to enable transformations between many different projections', 'True'),
PathVariable('PROJ_INCLUDES', 'Search path for PROJ.4 include files', '/usr/include', PathVariable.PathAccept),
PathVariable('PROJ_LIBS', 'Search path for PROJ.4 library files', '/usr/' + LIBDIR_SCHEMA_DEFAULT, PathVariable.PathAccept),
('PG_INCLUDES', 'Search path for libpq (postgres client) include files', ''),
('PG_LIBS', 'Search path for libpq (postgres client) library files', ''),
('FREETYPE_INCLUDES', 'Search path for Freetype include files', ''),
('FREETYPE_LIBS', 'Search path for Freetype library files', ''),
('XML2_INCLUDES', 'Search path for libxml2 include files', ''),
@ -678,8 +684,8 @@ def parse_pg_config(context, config):
if ret:
lib_path = call('%s --libdir' % env[config])
inc_path = call('%s --includedir' % env[config])
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
lpq = env['PLUGINS']['postgis']['lib']
env.Append(LIBS = lpq)
else:
@ -780,8 +786,8 @@ def FindBoost(context, prefixes, thread_flag):
env['BOOST_APPEND'] = '-'.join(append_params)
msg += '\nFound boost lib name extension: %s' % env['BOOST_APPEND']
env.AppendUnique(CPPPATH = os.path.realpath(env['BOOST_INCLUDES']))
env.AppendUnique(LIBPATH = os.path.realpath(env['BOOST_LIBS']))
env.AppendUnique(CPPPATH = fix_path(env['BOOST_INCLUDES']))
env.AppendUnique(LIBPATH = fix_path(env['BOOST_LIBS']))
if env['COLOR_PRINT']:
msg = "\033[94m%s\033[0m" % (msg)
ret = context.Result(msg)
@ -1161,7 +1167,7 @@ if not preconfigured:
# install prefix is a pre-pended base location to
# re-route the install and only intended for package building
# we normalize to ensure no trailing slash and proper pre-pending to the absolute prefix
install_prefix = os.path.normpath(os.path.realpath(env['DESTDIR'])) + os.path.realpath(env['PREFIX'])
install_prefix = os.path.normpath(fix_path(env['DESTDIR'])) + fix_path(env['PREFIX'])
env['INSTALL_PREFIX'] = strip_first(install_prefix,'//','/')
# all values from above based on install_prefix
# if env['DESTDIR'] == '/' these should be unchanged
@ -1179,11 +1185,11 @@ if not preconfigured:
env['MAPNIK_LIB_NAME'] = '${SHLIBPREFIX}${MAPNIK_NAME}${SHLIBSUFFIX}'
if env['PKG_CONFIG_PATH']:
env['ENV']['PKG_CONFIG_PATH'] = os.path.realpath(env['PKG_CONFIG_PATH'])
env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
# otherwise this variable == os.environ["PKG_CONFIG_PATH"]
if env['PATH']:
env['ENV']['PATH'] = os.path.realpath(env['PATH']) + ':' + env['ENV']['PATH']
env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
if env['SYSTEM_FONTS']:
if not os.path.isdir(env['SYSTEM_FONTS']):
@ -1232,8 +1238,8 @@ if not preconfigured:
for required in ('ICU', 'SQLITE', 'HB'):
inc_path = env['%s_INCLUDES' % required]
lib_path = env['%s_LIBS' % required]
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
REQUIRED_LIBSHEADERS = [
['z', 'zlib.h', True,'C'],
@ -1245,10 +1251,10 @@ if not preconfigured:
REQUIRED_LIBSHEADERS.insert(0,['freetype','ft2build.h',True,'C'])
if env.get('FREETYPE_INCLUDES'):
inc_path = env['FREETYPE_INCLUDES']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
if env.get('FREETYPE_LIBS'):
lib_path = env['FREETYPE_LIBS']
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
elif conf.parse_config('FREETYPE_CONFIG'):
# check if freetype links to bz2
if env['RUNTIME_LINK'] == 'static':
@ -1268,10 +1274,10 @@ if not preconfigured:
REQUIRED_LIBSHEADERS.insert(0,['libxml2','libxml/parser.h',True,'C'])
if env.get('XML2_INCLUDES'):
inc_path = env['XML2_INCLUDES']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
if env.get('XML2_LIBS'):
lib_path = env['XML2_LIBS']
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
elif conf.parse_config('XML2_CONFIG',checks='--cflags'):
env['HAS_LIBXML2'] = True
else:
@ -1288,8 +1294,8 @@ if not preconfigured:
OPTIONAL_LIBSHEADERS.append(['jpeg', ['stdio.h', 'jpeglib.h'], False,'C','-DHAVE_JPEG'])
inc_path = env['%s_INCLUDES' % 'JPEG']
lib_path = env['%s_LIBS' % 'JPEG']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
else:
env['SKIPPED_DEPS'].extend(['jpeg'])
@ -1297,8 +1303,8 @@ if not preconfigured:
OPTIONAL_LIBSHEADERS.append(['proj', 'proj_api.h', False,'C','-DMAPNIK_USE_PROJ4'])
inc_path = env['%s_INCLUDES' % 'PROJ']
lib_path = env['%s_LIBS' % 'PROJ']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
else:
env['SKIPPED_DEPS'].extend(['proj'])
@ -1306,8 +1312,8 @@ if not preconfigured:
OPTIONAL_LIBSHEADERS.append(['png', 'png.h', False,'C','-DHAVE_PNG'])
inc_path = env['%s_INCLUDES' % 'PNG']
lib_path = env['%s_LIBS' % 'PNG']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
else:
env['SKIPPED_DEPS'].extend(['png'])
@ -1315,8 +1321,8 @@ if not preconfigured:
OPTIONAL_LIBSHEADERS.append(['webp', 'webp/decode.h', False,'C','-DHAVE_WEBP'])
inc_path = env['%s_INCLUDES' % 'WEBP']
lib_path = env['%s_LIBS' % 'WEBP']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
else:
env['SKIPPED_DEPS'].extend(['webp'])
@ -1324,8 +1330,8 @@ if not preconfigured:
OPTIONAL_LIBSHEADERS.append(['tiff', 'tiff.h', False,'C','-DHAVE_TIFF'])
inc_path = env['%s_INCLUDES' % 'TIFF']
lib_path = env['%s_LIBS' % 'TIFF']
env.AppendUnique(CPPPATH = os.path.realpath(inc_path))
env.AppendUnique(LIBPATH = os.path.realpath(lib_path))
env.AppendUnique(CPPPATH = fix_path(inc_path))
env.AppendUnique(LIBPATH = fix_path(lib_path))
else:
env['SKIPPED_DEPS'].extend(['tiff'])
@ -1473,7 +1479,22 @@ if not preconfigured:
else:
details['lib'] = libname
elif plugin == 'postgis' or plugin == 'pgraster':
conf.parse_pg_config('PG_CONFIG')
if env.get('PG_LIBS') or env.get('PG_INCLUDES'):
libname = details['lib']
if env.get('PG_INCLUDES'):
inc_path = env['PG_INCLUDES']
env.AppendUnique(CPPPATH = fix_path(inc_path))
if env.get('PG_LIBS'):
lib_path = env['PG_LIBS']
env.AppendUnique(LIBPATH = fix_path(lib_path))
if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
env['SKIPPED_DEPS'].append(libname)
if libname in env['LIBS']:
env['LIBS'].remove(libname)
else:
details['lib'] = libname
else:
conf.parse_pg_config('PG_CONFIG')
elif plugin == 'ogr':
if conf.ogr_enabled():
if conf.parse_config('GDAL_CONFIG',checks='--libs'):
@ -1493,8 +1514,8 @@ if not preconfigured:
# to the beginning of the path list even if they already exist
incpath = env['%s_INCLUDES' % details['path']]
libpath = env['%s_LIBS' % details['path']]
env.PrependUnique(CPPPATH = os.path.realpath(incpath),delete_existing=True)
env.PrependUnique(LIBPATH = os.path.realpath(libpath),delete_existing=True)
env.PrependUnique(CPPPATH = fix_path(incpath),delete_existing=True)
env.PrependUnique(LIBPATH = fix_path(libpath),delete_existing=True)
if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
env.Replace(**backup)
env['SKIPPED_DEPS'].append(details['lib'])
@ -1542,8 +1563,8 @@ if not preconfigured:
if env['PGSQL2SQLITE']:
if 'sqlite3' not in env['LIBS']:
env.AppendUnique(LIBS='sqlite3')
env.AppendUnique(CPPPATH = os.path.realpath(env['SQLITE_INCLUDES']))
env.AppendUnique(LIBPATH = os.path.realpath(env['SQLITE_LIBS']))
env.AppendUnique(CPPPATH = fix_path(env['SQLITE_INCLUDES']))
env.AppendUnique(LIBPATH = fix_path(env['SQLITE_LIBS']))
if 'pq' not in env['LIBS']:
if not conf.parse_pg_config('PG_CONFIG'):
env['PGSQL2SQLITE'] = False
@ -1563,11 +1584,11 @@ if not preconfigured:
if env['CAIRO_LIBS'] or env['CAIRO_INCLUDES']:
c_inc = env['CAIRO_INCLUDES']
if env['CAIRO_LIBS']:
env["CAIRO_LIBPATHS"].append(os.path.realpath(env['CAIRO_LIBS']))
env["CAIRO_LIBPATHS"].append(fix_path(env['CAIRO_LIBS']))
if not env['CAIRO_INCLUDES']:
c_inc = env['CAIRO_LIBS'].replace('lib','',1)
if c_inc:
c_inc = os.path.normpath(os.path.realpath(env['CAIRO_INCLUDES']))
c_inc = os.path.normpath(fix_path(env['CAIRO_INCLUDES']))
if c_inc.endswith('include'):
c_inc = os.path.dirname(c_inc)
env["CAIRO_CPPPATHS"].extend(
@ -1718,7 +1739,7 @@ if not preconfigured:
if env['MISSING_DEPS']:
# if required dependencies are missing, print warnings and then let SCons finish without building or saving local config
color_print(1,'\nExiting... the following required dependencies were not found:\n - %s' % '\n - '.join([pretty_dep(dep) for dep in env['MISSING_DEPS']]))
color_print(1,"\nSee '%s' for details on possible problems." % (os.path.realpath(SCONS_LOCAL_LOG)))
color_print(1,"\nSee '%s' for details on possible problems." % (fix_path(SCONS_LOCAL_LOG)))
if env['SKIPPED_DEPS']:
color_print(4,'\nAlso, these OPTIONAL dependencies were not found:\n - %s' % '\n - '.join([pretty_dep(dep) for dep in env['SKIPPED_DEPS']]))
color_print(4,"\nSet custom paths to these libraries and header files on the command-line or in a file called '%s'" % SCONS_LOCAL_CONFIG)
@ -1850,7 +1871,7 @@ if not preconfigured:
# ugly hack needed until we have env specific conf
backup = env.Clone().Dictionary()
for pyinc in env['PYTHON_INCLUDES']:
env.AppendUnique(CPPPATH = os.path.realpath(pyinc))
env.AppendUnique(CPPPATH = fix_path(pyinc))
if not conf.CheckHeader(header='Python.h',language='C'):
color_print(1,'Could not find required header files for the Python language (version %s)' % env['PYTHON_VERSION'])
@ -1916,11 +1937,11 @@ if not HELP_REQUESTED:
env['create_uninstall_target'] = create_uninstall_target
if env['PKG_CONFIG_PATH']:
env['ENV']['PKG_CONFIG_PATH'] = os.path.realpath(env['PKG_CONFIG_PATH'])
env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
# otherwise this variable == os.environ["PKG_CONFIG_PATH"]
if env['PATH']:
env['ENV']['PATH'] = os.path.realpath(env['PATH']) + ':' + env['ENV']['PATH']
env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
if env['PATH_REMOVE']:
for p in env['PATH_REMOVE'].split(':'):

View file

@ -67,23 +67,20 @@ function setup_nose() {
export PYTHONPATH=$(pwd)/nose-1.3.4:${PYTHONPATH}
}
MASON_LINKED_ABS=$(pwd)/mason_packages/.link
MASON_LINKED_REL=./mason_packages/.link
function make_config() {
local MASON_LINKED_REL=./mason_packages/.link
export C_INCLUDE_PATH="${MASON_LINKED_REL}/include"
export CPLUS_INCLUDE_PATH="${MASON_LINKED_REL}/include"
export LIBRARY_PATH="${MASON_LINKED_REL}/lib"
export PATH="${MASON_LINKED_REL}/bin":${PATH}
if [[ $(uname -s) == 'Darwin' ]]; then
export PATH_REPLACE="/Users/travis/build/mapbox/mason/mason_packages:./mason_packages"
local PATH_REPLACE="/Users/travis/build/mapbox/mason/mason_packages:./mason_packages"
else
export PATH_REPLACE="/home/travis/build/mapbox/mason/mason_packages:./mason_packages"
local PATH_REPLACE="/home/travis/build/mapbox/mason/mason_packages:./mason_packages"
fi
echo "
CXX = '$CXX'
CC = '$CC'
CUSTOM_CXXFLAGS = '-fvisibility=hidden -fvisibility-inlines-hidden -DU_CHARSET_IS_UTF8=1'
CUSTOM_LDFLAGS = '-L${MASON_LINKED_REL}/lib'
RUNTIME_LINK = 'static'
INPUT_PLUGINS = 'all'
PATH = '${MASON_LINKED_REL}/bin'
@ -106,6 +103,8 @@ WEBP_INCLUDES = '${MASON_LINKED_REL}/include'
WEBP_LIBS = '${MASON_LINKED_REL}/lib'
PROJ_INCLUDES = '${MASON_LINKED_REL}/include'
PROJ_LIBS = '${MASON_LINKED_REL}/lib'
PG_INCLUDES = '${MASON_LINKED_REL}/include'
PG_LIBS = '${MASON_LINKED_REL}/lib'
FREETYPE_INCLUDES = '${MASON_LINKED_REL}/include/freetype2'
FREETYPE_LIBS = '${MASON_LINKED_REL}/lib'
XML2_INCLUDES = '${MASON_LINKED_REL}/include/libxml2'
@ -127,7 +126,6 @@ SAMPLE_INPUT_PLUGINS = True
}
function setup_runtime_settings() {
local MASON_LINKED_ABS=$(pwd)/mason_packages/.link
export PROJ_LIB=${MASON_LINKED_ABS}/share/proj
export ICU_DATA=${MASON_LINKED_ABS}/share/icu/54.1
export GDAL_DATA=${MASON_LINKED_ABS}/share/gdal

View file

@ -45,37 +45,9 @@ using mapnik::layer_descriptor;
using mapnik::datasource_exception;
/*
* Opens a GDALDataset and returns a pointer to it.
* Caller is responsible for calling GDALClose on it
*/
inline GDALDataset* gdal_datasource::open_dataset() const
{
MAPNIK_LOG_DEBUG(gdal) << "gdal_datasource: Opening " << dataset_name_;
GDALDataset *dataset;
#if GDAL_VERSION_NUM >= 1600
if (shared_dataset_)
{
dataset = reinterpret_cast<GDALDataset*>(GDALOpenShared((dataset_name_).c_str(), GA_ReadOnly));
}
else
#endif
{
dataset = reinterpret_cast<GDALDataset*>(GDALOpen((dataset_name_).c_str(), GA_ReadOnly));
}
if (! dataset)
{
throw datasource_exception(CPLGetLastErrorMsg());
}
return dataset;
}
gdal_datasource::gdal_datasource(parameters const& params)
: datasource(params),
dataset_(nullptr),
desc_(gdal_datasource::name(), "utf-8"),
nodata_value_(params.get<double>("nodata")),
nodata_tolerance_(*params.get<double>("nodata_tolerance",1e-12))
@ -104,11 +76,27 @@ gdal_datasource::gdal_datasource(parameters const& params)
shared_dataset_ = *params.get<mapnik::boolean_type>("shared", false);
band_ = *params.get<mapnik::value_integer>("band", -1);
GDALDataset *dataset = open_dataset();
#if GDAL_VERSION_NUM >= 1600
if (shared_dataset_)
{
dataset_ = reinterpret_cast<GDALDataset*>(GDALOpenShared((dataset_name_).c_str(), GA_ReadOnly));
}
else
#endif
{
dataset_ = reinterpret_cast<GDALDataset*>(GDALOpen((dataset_name_).c_str(), GA_ReadOnly));
}
nbands_ = dataset->GetRasterCount();
width_ = dataset->GetRasterXSize();
height_ = dataset->GetRasterYSize();
if (! dataset_)
{
throw datasource_exception(CPLGetLastErrorMsg());
}
MAPNIK_LOG_DEBUG(gdal) << "gdal_featureset: opened Dataset=" << dataset_;
nbands_ = dataset_->GetRasterCount();
width_ = dataset_->GetRasterXSize();
height_ = dataset_->GetRasterYSize();
desc_.add_descriptor(mapnik::attribute_descriptor("nodata", mapnik::Double));
double tr[6];
@ -140,7 +128,7 @@ gdal_datasource::gdal_datasource(parameters const& params)
}
else
{
if (dataset->GetGeoTransform(tr) != CPLE_None)
if (dataset_->GetGeoTransform(tr) != CPLE_None)
{
MAPNIK_LOG_DEBUG(gdal) << "gdal_datasource GetGeotransform failure gives="
<< tr[0] << "," << tr[1] << ","
@ -187,8 +175,6 @@ gdal_datasource::gdal_datasource(parameters const& params)
extent_.init(x0, y0, x1, y1);
}
GDALClose(dataset);
MAPNIK_LOG_DEBUG(gdal) << "gdal_datasource: Raster Size=" << width_ << "," << height_;
MAPNIK_LOG_DEBUG(gdal) << "gdal_datasource: Raster Extent=" << extent_;
@ -196,6 +182,8 @@ gdal_datasource::gdal_datasource(parameters const& params)
gdal_datasource::~gdal_datasource()
{
MAPNIK_LOG_DEBUG(gdal) << "gdal_featureset: Closing Dataset=" << dataset_;
GDALClose(dataset_);
}
datasource::datasource_t gdal_datasource::type() const
@ -232,7 +220,7 @@ featureset_ptr gdal_datasource::features(query const& q) const
gdal_query gq = q;
// TODO - move to std::make_shared, but must reduce # of args to <= 9
return featureset_ptr(new gdal_featureset(*open_dataset(),
return featureset_ptr(new gdal_featureset(*dataset_,
band_,
gq,
extent_,
@ -254,7 +242,7 @@ featureset_ptr gdal_datasource::features_at_point(coord2d const& pt, double tol)
gdal_query gq = pt;
// TODO - move to std::make_shared, but must reduce # of args to <= 9
return featureset_ptr(new gdal_featureset(*open_dataset(),
return featureset_ptr(new gdal_featureset(*dataset_,
band_,
gq,
extent_,

View file

@ -56,6 +56,7 @@ public:
mapnik::layer_descriptor get_descriptor() const;
private:
GDALDataset* open_dataset() const;
GDALDataset* dataset_;
mapnik::box2d<double> extent_;
std::string dataset_name_;
int band_;

View file

@ -80,7 +80,6 @@ gdal_featureset::~gdal_featureset()
{
MAPNIK_LOG_DEBUG(gdal) << "gdal_featureset: Closing Dataset=" << &dataset_;
GDALClose(&dataset_);
}
feature_ptr gdal_featureset::next()
@ -376,7 +375,10 @@ feature_ptr gdal_featureset::get_feature(mapnik::query const& q)
raster_nodata = red->GetNoDataValue(&raster_has_nodata);
GDALColorTable *color_table = red->GetColorTable();
bool has_nodata = nodata_value_ || raster_has_nodata;
if (has_nodata && !color_table)
// we can deduce the alpha channel from nodata in the Byte case
// by reusing the reading of R,G,B bands directly
if (has_nodata && !color_table && red->GetRasterDataType() != GDT_Byte)
{
double apply_nodata = nodata_value_ ? *nodata_value_ : raster_nodata;
// read the data in and create an alpha channel from the nodata values
@ -402,20 +404,62 @@ feature_ptr gdal_featureset::get_feature(mapnik::query const& q)
}
}
}
raster_io_error = red->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 0,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
/* Use dataset RasterIO in priority in 99.9% of the cases */
if( red->GetBand() == 1 && green->GetBand() == 2 && blue->GetBand() == 3 )
{
int nBandsToRead = 3;
if( alpha != NULL && alpha->GetBand() == 4 && !raster_has_nodata )
{
nBandsToRead = 4;
alpha = NULL; // to avoid reading it again afterwards
}
raster_io_error = dataset_.RasterIO(GF_Read, x_off, y_off, width, height,
image.getBytes(),
image.width(), image.height(), GDT_Byte,
nBandsToRead, NULL,
4, 4 * image.width(), 1);
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
}
}
raster_io_error = green->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 1,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
else
{
raster_io_error = red->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 0,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
}
raster_io_error = green->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 1,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
}
raster_io_error = blue->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 2,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
}
}
raster_io_error = blue->RasterIO(GF_Read, x_off, y_off, width, height, image.getBytes() + 2,
image.width(), image.height(), GDT_Byte, 4, 4 * image.width());
if (raster_io_error == CE_Failure) {
throw datasource_exception(CPLGetLastErrorMsg());
// In the case we skipped initializing the alpha channel
if (has_nodata && !color_table && red->GetRasterDataType() == GDT_Byte)
{
double apply_nodata = nodata_value_ ? *nodata_value_ : raster_nodata;
if( apply_nodata >= 0 && apply_nodata <= 255 )
{
int len = image.width() * image.height();
GByte* pabyBytes = (GByte*) image.getBytes();
for (int i = 0; i < len; ++i)
{
// TODO - we assume here the nodata value for the red band applies to all bands
// more details about this at http://trac.osgeo.org/gdal/ticket/2734
if (std::fabs(apply_nodata - pabyBytes[4*i]) < nodata_tolerance_)
pabyBytes[4*i + 3] = 0;
else
pabyBytes[4*i + 3] = 255;
}
}
}
}
else if (grey)

View file

@ -43,6 +43,16 @@ libraries.append(env['ICU_LIB_NAME'])
if env['PLUGIN_LINKING'] == 'shared':
libraries.append(env['MAPNIK_NAME'])
# libocci.dylib, at least for 11.2 links to libstdc++
# so we defer symbol resolution to runtime in order to
# dodge linking errors like
# Undefined symbols for architecture x86_64:
# "std::string::_Rep::_M_destroy(std::allocator<char> const&)", referenced from:
# RegisterClasses(oracle::occi::Environment*) in spatial_classesm.os
if env['PLATFORM'] == 'Darwin':
plugin_env.Append(LINKFLAGS='-undefined dynamic_lookup')
TARGET = plugin_env.SharedLibrary('../%s' % PLUGIN_NAME,
SHLIBPREFIX='',
SHLIBSUFFIX='.input',

View file

@ -79,7 +79,7 @@ occi_datasource::occi_datasource(parameters const& params)
desc_(occi_datasource::name(), *params.get<std::string>("encoding", "utf-8")),
use_wkb_(*params.get<mapnik::boolean_type>("use_wkb", false)),
row_limit_(*params.get<mapnik::value_integer>("row_limit", 0)),
row_prefetch_(*params.get<int>("row_prefetch", 100)),
row_prefetch_(*params.get<mapnik::value_integer>("row_prefetch", 100)),
pool_(0),
conn_(0)
{
@ -107,7 +107,7 @@ occi_datasource::occi_datasource(parameters const& params)
boost::optional<std::string> ext = params.get<std::string>("extent");
if (ext) extent_initialized_ = extent_.from_string(*ext);
boost::optional<int> srid = params.get<int>("srid");
boost::optional<mapnik::value_integer> srid = params.get<mapnik::value_integer>("srid");
if (srid)
{
srid_ = *srid;
@ -123,8 +123,8 @@ occi_datasource::occi_datasource(parameters const& params)
*params.get<std::string>("user"),
*params.get<std::string>("password"),
*params.get<std::string>("host"),
*params.get<int>("max_size", 5),
*params.get<int>("initial_size", 1),
*params.get<mapnik::value_integer>("max_size", 5),
*params.get<mapnik::value_integer>("initial_size", 1),
1);
}
catch (SQLException& ex)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 19 KiB