Merge branch 'master' into geometry-refactor

This commit is contained in:
artemp 2016-11-10 18:01:11 +01:00
commit fd36ac5279
26 changed files with 394 additions and 159 deletions

View file

@ -8,9 +8,9 @@ For a complete change history, see the git log.
## 3.0.12
Released: xx-xx-xx
Released: September 8, 2016
(Packaged from xxxxxx)
(Packaged from 1d22d86)
#### Summary
@ -43,6 +43,8 @@ Released: xx-xx-xx
- BuildingSymbolizer - fixed closing segment of polygon in building symbolizer (ref #3505)
- Update dependencies versions
- Fixed warnings when compiling with g++5
- Fixed image swap (ref #3513)
- Stop bundling testdata in source tarball (ref #3335)
## 3.0.11
@ -457,7 +459,7 @@ Released June 3rd, 2013
Summary: The 2.2.0 release is primarily a performance and stability release. The code line represents development in the master branch since the release of 2.1.0 in Aug 2012 and therefore includes nearly a year of bug-fixes and optimizations. Nearly 500 new tests have been added bring the total coverage to 925. Shapefile and PostGIS datasources have benefited from numerous stability fixes, 64 bit integer support has been added to support OSM data in the grid renderer and in attribute filtering, and many fixes have landed for higher quality output when using a custom `scale_factor` during rendering. Critical code paths have been optimized include raster rendering, xml map loading, string to number conversion, vector reprojection when using `epsg:4326` and `epsg:3857`, `hextree` encoding, halo rendering, and rendering when using a custom `gamma`. Mapnik 2.2 also compiles faster than previous releases in the 2.x series and drops several unneeded and hard to install dependencies making builds on OS X and Windows easier than any previous release.
- Removed 3 depedencies without loosing any functionality: `ltdl`, `cairomm` and `libsigc++` (#1804,#806,#1681)
- Removed 3 dependencies without loosing any functionality: `ltdl`, `cairomm` and `libsigc++` (#1804,#806,#1681)
- Added 64 bit integer support in expressions, feature ids, and the grid_renderer (#1661,#1662,#1662)

View file

@ -19,6 +19,9 @@ install:
release:
./scripts/publish_release.sh
test-release:
./scripts/test_release.sh
python:
if [ ! -d ./bindings/python ]; then git clone git@github.com:mapnik/python-mapnik.git --recursive ./bindings/python; else (cd bindings/python && git pull && git submodule update --init); fi;
make

View file

@ -9,7 +9,6 @@ _/ _/ _/_/_/ _/_/_/ _/ _/ _/ _/ _/
```
[![Build Status Linux](https://api.travis-ci.org/mapnik/mapnik.svg?branch=master)](http://travis-ci.org/mapnik/mapnik)
[![CircleCI](https://circleci.com/gh/mapnik/mapnik.svg?style=svg)](https://circleci.com/gh/mapnik/mapnik)
[![Build Status Windows](https://ci.appveyor.com/api/projects/status/hc9l7okdjtucfqqn?branch=master&svg=true)](https://ci.appveyor.com/project/Mapbox/mapnik)
[![Coverage Status](https://coveralls.io/repos/mapnik/mapnik/badge.svg?branch=master&service=github)](https://coveralls.io/github/mapnik/mapnik?branch=master)

2
deps/mapbox/variant vendored

@ -1 +1 @@
Subproject commit aaddee9270e3956cee98cdd7d04aea848d69f5f0
Subproject commit 84a426a31ad3b63c4b8f8d189841e19af48cda40

Binary file not shown.

View file

@ -189,7 +189,10 @@ public:
}
for (unsigned i = 0; i < THE_MAX; ++i)
{
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wundefined-var-template"
if (str_copy == our_strings_[i])
#pragma GCC diagnostic pop
{
value_ = static_cast<ENUM>(i);
if (deprecated)
@ -199,14 +202,20 @@ public:
return;
}
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wundefined-var-template"
throw illegal_enum_value(std::string("Illegal enumeration value '") +
str + "' for enum " + our_name_);
#pragma GCC diagnostic pop
}
/** Returns the current value as a string identifier. */
std::string as_string() const
{
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wundefined-var-template"
return our_strings_[value_];
#pragma GCC diagnostic pop
}
/** Static helper function to iterate over valid identifiers. */

View file

@ -266,6 +266,7 @@ void feature_style_processor<Processor>::prepare_layer(layer_rendering_material
}
box2d<double> layer_ext = lay.envelope();
const box2d<double> buffered_query_ext_map_srs = buffered_query_ext;
bool fw_success = false;
bool early_return = false;
@ -281,9 +282,9 @@ void feature_style_processor<Processor>::prepare_layer(layer_rendering_material
early_return = true;
}
// next try intersection of layer extent back projected into map srs
else if (prj_trans.backward(layer_ext, PROJ_ENVELOPE_POINTS) && buffered_query_ext.intersects(layer_ext))
else if (prj_trans.backward(layer_ext, PROJ_ENVELOPE_POINTS) && buffered_query_ext_map_srs.intersects(layer_ext))
{
layer_ext.clip(buffered_query_ext);
layer_ext.clip(buffered_query_ext_map_srs);
// forward project layer extent back into native projection
if (! prj_trans.forward(layer_ext, PROJ_ENVELOPE_POINTS))
{

View file

@ -76,7 +76,6 @@ public:
private:
detail::image_dimensions<65535> dimensions_;
detail::buffer buffer_;
pixel_type *pData_;
double offset_;
double scaling_;
bool premultiplied_alpha_;

View file

@ -390,7 +390,7 @@ void apply_convolution_3x3(Src const& src_view, Dst & dst_view, Filter const& fi
}
template <typename Src, typename Filter>
void apply_filter(Src & src, Filter const& filter)
void apply_filter(Src & src, Filter const& filter, double /*scale_factor*/)
{
demultiply_alpha(src);
double_buffer<Src> tb(src);
@ -398,12 +398,12 @@ void apply_filter(Src & src, Filter const& filter)
}
template <typename Src>
void apply_filter(Src & src, agg_stack_blur const& op)
void apply_filter(Src & src, agg_stack_blur const& op, double scale_factor)
{
premultiply_alpha(src);
agg::rendering_buffer buf(src.bytes(),src.width(),src.height(), src.row_size());
agg::pixfmt_rgba32_pre pixf(buf);
agg::stack_blur_rgba32(pixf,op.rx,op.ry);
agg::stack_blur_rgba32(pixf, op.rx * scale_factor, op.ry * scale_factor);
}
inline double channel_delta(double source, double match)
@ -420,7 +420,7 @@ inline uint8_t apply_alpha_shift(double source, double match, double alpha)
}
template <typename Src>
void apply_filter(Src & src, color_to_alpha const& op)
void apply_filter(Src & src, color_to_alpha const& op, double /*scale_factor*/)
{
using namespace boost::gil;
bool premultiplied = src.get_premultiplied();
@ -481,7 +481,7 @@ void apply_filter(Src & src, color_to_alpha const& op)
}
template <typename Src>
void apply_filter(Src & src, colorize_alpha const& op)
void apply_filter(Src & src, colorize_alpha const& op, double /*scale_factor*/)
{
using namespace boost::gil;
std::ptrdiff_t size = op.size();
@ -590,7 +590,7 @@ void apply_filter(Src & src, colorize_alpha const& op)
}
template <typename Src>
void apply_filter(Src & src, scale_hsla const& transform)
void apply_filter(Src & src, scale_hsla const& transform, double /*scale_factor*/)
{
using namespace boost::gil;
bool tinting = !transform.is_identity();
@ -802,25 +802,25 @@ void color_blind_filter(Src & src, ColorBlindFilter const& op)
}
template <typename Src>
void apply_filter(Src & src, color_blind_protanope const& op)
void apply_filter(Src & src, color_blind_protanope const& op, double /*scale_factor*/)
{
color_blind_filter(src, op);
}
template <typename Src>
void apply_filter(Src & src, color_blind_deuteranope const& op)
void apply_filter(Src & src, color_blind_deuteranope const& op, double /*scale_factor*/)
{
color_blind_filter(src, op);
}
template <typename Src>
void apply_filter(Src & src, color_blind_tritanope const& op)
void apply_filter(Src & src, color_blind_tritanope const& op, double /*scale_factor*/)
{
color_blind_filter(src, op);
}
template <typename Src>
void apply_filter(Src & src, gray const& /*op*/)
void apply_filter(Src & src, gray const& /*op*/, double /*scale_factor*/)
{
premultiply_alpha(src);
using namespace boost::gil;
@ -871,7 +871,7 @@ void x_gradient_impl(Src const& src_view, Dst const& dst_view)
}
template <typename Src>
void apply_filter(Src & src, x_gradient const& /*op*/)
void apply_filter(Src & src, x_gradient const& /*op*/, double /*scale_factor*/)
{
premultiply_alpha(src);
double_buffer<Src> tb(src);
@ -879,7 +879,7 @@ void apply_filter(Src & src, x_gradient const& /*op*/)
}
template <typename Src>
void apply_filter(Src & src, y_gradient const& /*op*/)
void apply_filter(Src & src, y_gradient const& /*op*/, double /*scale_factor*/)
{
premultiply_alpha(src);
double_buffer<Src> tb(src);
@ -888,7 +888,7 @@ void apply_filter(Src & src, y_gradient const& /*op*/)
}
template <typename Src>
void apply_filter(Src & src, invert const& /*op*/)
void apply_filter(Src & src, invert const& /*op*/, double /*scale_factor*/)
{
premultiply_alpha(src);
using namespace boost::gil;
@ -916,16 +916,18 @@ void apply_filter(Src & src, invert const& /*op*/)
template <typename Src>
struct filter_visitor
{
filter_visitor(Src & src)
: src_(src) {}
filter_visitor(Src & src, double scale_factor)
: src_(src),
scale_factor_(scale_factor) {}
template <typename T>
void operator () (T const& filter) const
{
apply_filter(src_, filter);
apply_filter(src_, filter, scale_factor_);
}
Src & src_;
double scale_factor_;
};
struct filter_radius_visitor
@ -944,14 +946,14 @@ struct filter_radius_visitor
};
template<typename Src>
void filter_image(Src & src, std::string const& filter)
void filter_image(Src & src, std::string const& filter, double scale_factor=1)
{
std::vector<filter_type> filter_vector;
if(!parse_image_filters(filter, filter_vector))
{
throw std::runtime_error("Failed to parse filter argument in filter_image: '" + filter + "'");
}
filter_visitor<Src> visitor(src);
filter_visitor<Src> visitor(src, scale_factor);
for (filter_type const& filter_tag : filter_vector)
{
util::apply_visitor(visitor, filter_tag);
@ -959,7 +961,7 @@ void filter_image(Src & src, std::string const& filter)
}
template<typename Src>
Src filter_image(Src const& src, std::string const& filter)
Src filter_image(Src const& src, std::string const& filter, double scale_factor=1)
{
std::vector<filter_type> filter_vector;
if(!parse_image_filters(filter, filter_vector))
@ -967,7 +969,7 @@ Src filter_image(Src const& src, std::string const& filter)
throw std::runtime_error("Failed to parse filter argument in filter_image: '" + filter + "'");
}
Src new_src(src);
filter_visitor<Src> visitor(new_src);
filter_visitor<Src> visitor(new_src, scale_factor);
for (filter_type const& filter_tag : filter_vector)
{
util::apply_visitor(visitor, filter_tag);

View file

@ -62,7 +62,6 @@ template <typename T>
image<T>::image()
: dimensions_(0,0),
buffer_(0),
pData_(nullptr),
offset_(0.0),
scaling_(1.0),
premultiplied_alpha_(false),
@ -73,7 +72,6 @@ template <typename T>
image<T>::image(int width, int height, unsigned char* data, bool premultiplied, bool painted)
: dimensions_(width, height),
buffer_(data, width * height * sizeof(pixel_size)),
pData_(reinterpret_cast<pixel_type*>(buffer_.data())),
offset_(0.0),
scaling_(1.0),
premultiplied_alpha_(premultiplied),
@ -83,15 +81,14 @@ template <typename T>
image<T>::image(int width, int height, bool initialize, bool premultiplied, bool painted)
: dimensions_(width, height),
buffer_(dimensions_.width() * dimensions_.height() * pixel_size),
pData_(reinterpret_cast<pixel_type*>(buffer_.data())),
offset_(0.0),
scaling_(1.0),
premultiplied_alpha_(premultiplied),
painted_(painted)
{
if (pData_ && initialize)
if (initialize)
{
std::fill(pData_, pData_ + dimensions_.width() * dimensions_.height(), 0);
std::fill(begin(), end(), 0);
}
}
@ -99,7 +96,6 @@ template <typename T>
image<T>::image(image<T> const& rhs)
: dimensions_(rhs.dimensions_),
buffer_(rhs.buffer_),
pData_(reinterpret_cast<pixel_type*>(buffer_.data())),
offset_(rhs.offset_),
scaling_(rhs.scaling_),
premultiplied_alpha_(rhs.premultiplied_alpha_),
@ -109,14 +105,12 @@ template <typename T>
image<T>::image(image<T> && rhs) noexcept
: dimensions_(std::move(rhs.dimensions_)),
buffer_(std::move(rhs.buffer_)),
pData_(reinterpret_cast<pixel_type*>(buffer_.data())),
offset_(rhs.offset_),
scaling_(rhs.scaling_),
premultiplied_alpha_(rhs.premultiplied_alpha_),
painted_(rhs.painted_)
{
rhs.dimensions_ = { 0, 0 };
rhs.pData_ = nullptr;
}
template <typename T>
@ -153,14 +147,14 @@ template <typename T>
inline typename image<T>::pixel_type& image<T>::operator() (std::size_t i, std::size_t j)
{
assert(i < dimensions_.width() && j < dimensions_.height());
return pData_[j * dimensions_.width() + i];
return *get_row(j, i);
}
template <typename T>
inline const typename image<T>::pixel_type& image<T>::operator() (std::size_t i, std::size_t j) const
{
assert(i < dimensions_.width() && j < dimensions_.height());
return pData_[j * dimensions_.width() + i];
return *get_row(j, i);
}
template <typename T>
@ -190,19 +184,19 @@ inline std::size_t image<T>::row_size() const
template <typename T>
inline void image<T>::set(pixel_type const& t)
{
std::fill(pData_, pData_ + dimensions_.width() * dimensions_.height(), t);
std::fill(begin(), end(), t);
}
template <typename T>
inline const typename image<T>::pixel_type* image<T>::data() const
{
return pData_;
return reinterpret_cast<const pixel_type*>(buffer_.data());
}
template <typename T>
inline typename image<T>::pixel_type* image<T>::data()
{
return pData_;
return reinterpret_cast<pixel_type*>(buffer_.data());
}
template <typename T>
@ -219,40 +213,40 @@ inline unsigned char* image<T>::bytes()
// iterator interface
template <typename T>
inline typename image<T>::iterator image<T>::begin() { return pData_; }
inline typename image<T>::iterator image<T>::begin() { return data(); }
template <typename T>
inline typename image<T>::iterator image<T>::end() { return pData_ + dimensions_.width() * dimensions_.height(); }
inline typename image<T>::iterator image<T>::end() { return data() + dimensions_.width() * dimensions_.height(); }
template <typename T>
inline typename image<T>::const_iterator image<T>::begin() const { return pData_; }
inline typename image<T>::const_iterator image<T>::begin() const { return data(); }
template <typename T>
inline typename image<T>::const_iterator image<T>::end() const{ return pData_ + dimensions_.width() * dimensions_.height(); }
inline typename image<T>::const_iterator image<T>::end() const{ return data() + dimensions_.width() * dimensions_.height(); }
template <typename T>
inline typename image<T>::pixel_type const* image<T>::get_row(std::size_t row) const
{
return pData_ + row * dimensions_.width();
return data() + row * dimensions_.width();
}
template <typename T>
inline const typename image<T>::pixel_type* image<T>::get_row(std::size_t row, std::size_t x0) const
{
return pData_ + row * dimensions_.width() + x0;
return data() + row * dimensions_.width() + x0;
}
template <typename T>
inline typename image<T>::pixel_type* image<T>::get_row(std::size_t row)
{
return pData_ + row * dimensions_.width();
return data() + row * dimensions_.width();
}
template <typename T>
inline typename image<T>::pixel_type* image<T>::get_row(std::size_t row, std::size_t x0)
{
return pData_ + row * dimensions_.width() + x0;
return data() + row * dimensions_.width() + x0;
}
template <typename T>
@ -260,7 +254,7 @@ inline void image<T>::set_row(std::size_t row, pixel_type const* buf, std::size_
{
assert(row < dimensions_.height());
assert(size <= dimensions_.width());
std::copy(buf, buf + size, pData_ + row * dimensions_.width());
std::copy(buf, buf + size, get_row(row));
}
template <typename T>
@ -268,7 +262,7 @@ inline void image<T>::set_row(std::size_t row, std::size_t x0, std::size_t x1, p
{
assert(row < dimensions_.height());
assert ((x1 - x0) <= dimensions_.width() );
std::copy(buf, buf + (x1 - x0), pData_ + row * dimensions_.width() + x0);
std::copy(buf, buf + (x1 - x0), get_row(row, x0));
}
template <typename T>

View file

@ -72,7 +72,8 @@ struct feature_grammar : qi::grammar<Iterator, void(FeatureType&), space_type>
generic_json<Iterator> json_;
// geoJSON
qi::rule<Iterator, void(FeatureType&),space_type> start;
qi::rule<Iterator, qi::locals<bool>, void(FeatureType&),space_type> feature;
qi::rule<Iterator, qi::locals<bool>, void(FeatureType&), space_type> feature;
qi::rule<Iterator, void(FeatureType&, bool&), space_type> feature_part;
qi::rule<Iterator, space_type> feature_type;
qi::rule<Iterator,void(FeatureType &),space_type> properties;
qi::rule<Iterator,qi::locals<std::string>, void(FeatureType &),space_type> attributes;

View file

@ -41,6 +41,7 @@ feature_grammar<Iterator,FeatureType,ErrorHandler>::feature_grammar(mapnik::tran
qi::_4_type _4;
qi::_a_type _a;
qi::_r1_type _r1;
qi::_r2_type _r2;
qi::eps_type eps;
qi::char_type char_;
using qi::fail;
@ -55,16 +56,19 @@ feature_grammar<Iterator,FeatureType,ErrorHandler>::feature_grammar(mapnik::tran
start = feature(_r1);
feature = eps[_a = false] > lit('{') >
(feature_type[_a = true]
|
(lit("\"geometry\"") > lit(':') > geometry_grammar_[set_geometry(_r1, _1)])
|
properties(_r1)
|
json_.key_value) % lit(',')
feature_part(_r1, _a) % lit(',')
> eps(_a) > lit('}')
;
feature_part = feature_type[_r2 = true]
|
(lit("\"geometry\"") > lit(':') > geometry_grammar_[set_geometry(_r1, _1)])
|
properties(_r1)
|
json_.key_value
;
properties = lit("\"properties\"")
> lit(':') > ((lit('{') > -attributes(_r1) > lit('}')) | lit("null"))
;

View file

@ -47,6 +47,7 @@ struct geometry_grammar :
geometry_grammar();
qi::rule<Iterator, mapnik::geometry::geometry<double>(), space_type> start;
qi::rule<Iterator, qi::locals<int, mapnik::json::coordinates>, mapnik::geometry::geometry<double>(), space_type> geometry;
qi::rule<Iterator, void(int&, mapnik::json::coordinates&, mapnik::geometry::geometry<double>&), space_type> geometry_part;
qi::rule<Iterator, mapnik::geometry::geometry_collection<double>(), space_type> geometry_collection;
qi::symbols<char, int> geometry_type_dispatch;
positions_grammar<Iterator> coordinates;

View file

@ -46,21 +46,27 @@ geometry_grammar<Iterator, ErrorHandler>::geometry_grammar()
qi::_4_type _4;
qi::_a_type _a;
qi::_b_type _b;
qi::_r1_type _r1;
qi::_r2_type _r2;
qi::_r3_type _r3;
qi::eps_type eps;
using qi::fail;
using qi::on_error;
using phoenix::push_back;
start = geometry.alias() | lit("null");
geometry = lit('{')[_a = 0]
> (((lit("\"type\"") > lit(':') > geometry_type_dispatch[_a = _1])
|
(lit("\"coordinates\"") > lit(':') > coordinates[_b = _1])
|
(lit("\"geometries\"") > lit(':') > lit('[') > geometry_collection[_val = _1] > lit(']'))
|
json_.key_value) % lit(',')) [create_geometry(_val,_a,_b)]
> lit('}')
> (geometry_part(_a, _b, _val) % lit(','))[create_geometry(_val, _a, _b)]
> lit('}');
geometry_part = ((lit("\"type\"") > lit(':') > geometry_type_dispatch[_r1 = _1])
|
(lit("\"coordinates\"") > lit(':') > coordinates[_r2 = _1])
|
(lit("\"geometries\"") > lit(':') > lit('[') > geometry_collection[_r3 = _1] > lit(']'))
|
json_.key_value)
;
geometry_collection = geometry[push_back(_val, _1)] % lit(',')

View file

@ -24,10 +24,8 @@
#define MAPNIK_JSON_POSITIONS_GRAMMAR_HPP
// mapnik
#include <mapnik/util/variant.hpp>
#include <mapnik/json/positions.hpp>
#include <mapnik/json/error_handler.hpp>
#include <mapnik/geometry.hpp>
#pragma GCC diagnostic push
#include <mapnik/warning_ignore.hpp>
#include <boost/spirit/include/qi.hpp>

View file

@ -108,16 +108,16 @@ struct attr_value_converter
};
geojson_datasource::geojson_datasource(parameters const& params)
: datasource(params),
type_(datasource::Vector),
desc_(geojson_datasource::name(),
*params.get<std::string>("encoding","utf-8")),
filename_(),
from_inline_string_(false),
extent_(),
features_(),
tree_(nullptr),
num_features_to_query_(*params.get<mapnik::value_integer>("num_features_to_query",5))
: datasource(params),
type_(datasource::Vector),
desc_(geojson_datasource::name(),
*params.get<std::string>("encoding","utf-8")),
filename_(),
from_inline_string_(false),
extent_(),
features_(),
tree_(nullptr),
num_features_to_query_(std::max(mapnik::value_integer(1), *params.get<mapnik::value_integer>("num_features_to_query", 5)))
{
boost::optional<std::string> inline_string = params.get<std::string>("inline");
if (!inline_string)
@ -261,8 +261,8 @@ void geojson_datasource::initialise_index(Iterator start, Iterator end)
std::size_t start_id = 1;
mapnik::json::default_feature_callback callback(features_);
bool result = boost::spirit::qi::phrase_parse(itr, end, (geojson_datasource_static_feature_callback_grammar)
(boost::phoenix::ref(ctx), boost::phoenix::ref(start_id), boost::phoenix::ref(callback)),
space);
(boost::phoenix::ref(ctx), boost::phoenix::ref(start_id), boost::phoenix::ref(callback)),
space);
if (!result || itr != end)
{
if (from_inline_string_) throw mapnik::datasource_exception("geojson_datasource: Failed to parse GeoJSON file from in-memory string");
@ -481,8 +481,8 @@ boost::optional<mapnik::datasource_geometry_t> geojson_datasource::get_geometry_
}
else if (cache_features_)
{
unsigned num_features = features_.size();
for (unsigned i = 0; i < num_features && i < num_features_to_query_; ++i)
std::size_t num_features = features_.size();
for (std::size_t i = 0; i < num_features && i < num_features_to_query_; ++i)
{
result = mapnik::util::to_ds_type(features_[i]->get_geometry());
if (result)

View file

@ -3,19 +3,21 @@
set -eu
set -o pipefail
VERSION=$(git describe)
if [[ -d .git ]]; then
git submodule update --init
else
if [[ -f RELEASE_VERSION.md ]]; then
VERSION=$(cat RELEASE_VERSION.md)
if [[ ! -d ./test/data ]]; then
echo "Downloading unit test data from https://github.com/mapnik/test-data/archive/${VERSION}.tar.gz"
mkdir -p test/data/
curl -L -s https://github.com/mapnik/test-data/archive/${VERSION}.tar.gz | tar zxf - --strip-components=1 -C test/data/
curl -f -L -s https://github.com/mapnik/test-data/archive/${VERSION}.tar.gz | tar zxf - --strip-components=1 -C test/data/
fi
if [[ ! -d ./test/data-visual ]]; then
echo "Downloading visual test data from https://github.com/mapnik/test-data-visual/archive/${VERSION}.tar.gz"
mkdir -p test/data-visual/
curl -L -s https://github.com/mapnik/test-data-visual/archive/${VERSION}.tar.gz | tar zxf - --strip-components=1 -C test/data-visual/
curl -f -L -s https://github.com/mapnik/test-data-visual/archive/${VERSION}.tar.gz | tar zxf - --strip-components=1 -C test/data-visual/
fi
elif [[ -d .git ]]; then
git submodule update --init test/
else
echo "Expected either git clone directory (with .git) or release tarball with `RELEASE_VERSION.md` in root"
exit 1
fi

View file

@ -1,51 +1,136 @@
#!/usr/bin/env bash
#!/bin/bash
set -eu
set -o pipefail
# for normal release leave empty
# for release candidate, add "-rcN"
export MAPNIK_VERSION=$(git describe)
if [[ $(git tag -l) =~ $MAPNIK_VERSION ]]; then echo yes;
echo "Success: found $MAPNIK_VERSION (result of git describe) in tags, continuing"
else
echo "error: $MAPNIK_VERSION (result of git describe) not in "git tag -l" output, aborting"
echo "You must create a valid annotated tag first, before running this ./scripts/publish_release.sh"
: '
Usage:
git tag v3.0.12-rc7 -a -m "Tagging v3.0.12-rc7"
./scripts/publish_release.sh
Note: before running this script you need to tag a new release or release candidate.
This script:
- confirms that the current git checkout is a valid tag
- Downloads a fresh checkout to a /tmp directory
- Updates the submodules
- Confirms that the test-data and test-data-visual is also tagged, otherwise tags them
- Removes the test-data and test-data-visual since they are large and can be downloaded dynamically for released code
- Creates a tarball and uploads to a DRAFT "github release"
After using this script:
- Go to https://github.com/mapnik/mapnik/releases and confirm that the draft release looks good, then publish it.
'
function step { >&2 echo -e "\033[1m\033[36m* $1\033[0m"; }
function step_error { >&2 echo -e "\033[1m\033[31m$1\033[0m"; }
if [[ ${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO:-false} == false ]]; then
step_error "Please set GITHUB_TOKEN_MAPNIK_PUBLIC_REPO to a github token with 'public_repo' scope (create one at https://github.com/settings/tokens)"
exit 1
fi
export MAPNIK_VERSION=$(git describe)
if [[ $(git tag -l) =~ $MAPNIK_VERSION ]]; then
step "Success: found $MAPNIK_VERSION (result of git describe) in tags, continuing"
else
step_error "error: $MAPNIK_VERSION (result of git describe) not in "git tag -l" output, aborting"
step_error "You must create a valid annotated tag first, before running this ./scripts/publish_release.sh"
exit 1
fi
# alternatively set this to `git@github.com:mapnik/mapnik.git` to pull public tag
export ROOT_GIT_CLONE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd ../ && pwd )"
export TARBALL_NAME="mapnik-${MAPNIK_VERSION}"
cd /tmp/
rm -rf ${TARBALL_NAME}
echo "Cloning ${MAPNIK_VERSION}"
git clone --depth 1 --branch ${MAPNIK_VERSION} git@github.com:mapnik/mapnik.git ${TARBALL_NAME}
step "Cloning ${MAPNIK_VERSION}"
git clone --depth 1 --branch ${MAPNIK_VERSION} ${ROOT_GIT_CLONE} ${TARBALL_NAME}
cd ${TARBALL_NAME}
step "Checking out ${MAPNIK_VERSION}"
git checkout "tags/${MAPNIK_VERSION}"
echo "updating submodules"
# TODO: skip initializing submodule if data is already tagged
# Will require bundling variant as well
git submodule update --depth 100 --init
step "checking submodules"
step "vendorizing and cleaning up mapbox variant"
git submodule update --init deps/mapbox/variant
rm -rf deps/mapbox/variant/.git
cd test/data/
git remote set-url origin git@github.com:mapnik/test-data
echo "tagging test data"
git tag ${MAPNIK_VERSION} -a -m "tagging for ${MAPNIK_VERSION}"
git push --tags
cd ../../
echo "removing test data submodule"
rm -rf test/data/
cd test/data-visual/
git remote set-url origin git@github.com:mapnik/test-data-visual
echo "tagging visual data"
git tag ${MAPNIK_VERSION} -a -m "tagging for ${MAPNIK_VERSION}"
git push --tags
cd ../../
echo "removing visual test data submodule"
rm -rf test/data-visual/
rm -f deps/mapbox/variant/*yml
rm -f deps/mapbox/variant/Jamroot
function check_and_tag() {
REPO_DIR=$1
REPO_NAME=$2
cmd="curl --fail -I https://github.com/mapnik/${REPO_NAME}/releases/tag/${MAPNIK_VERSION}"
if [[ $(${cmd}) ]]; then
step "test data already tagged, no need to initialize submodule"
else
step "tagging test data"
git submodule update --depth 100 --init ${REPO_DIR}
cd ${REPO_DIR}/
git remote set-url origin git@github.com:mapnik/${REPO_NAME}
git tag ${MAPNIK_VERSION} -a -m "tagging for ${MAPNIK_VERSION}"
git push --tags
cd ../../
step "removing test data submodule"
rm -rf ${REPO_DIR}/
fi
}
# test data
check_and_tag test/data test-data
# test data visual
check_and_tag test/data-visual test-data-visual
step "removing .git and .gitignore"
rm -rf .git
rm -rf .gitignore
export TARBALL_COMPRESSED=${TARBALL_NAME}.tar.bz2
echo ${MAPNIK_VERSION} > RELEASE_VERSION.md
step "creating tarball of ${TARBALL_COMPRESSED}"
cd ../
echo "creating tarball of ${TARBALL_NAME}.tar.bz2"
tar cjf ${TARBALL_NAME}.tar.bz2 ${TARBALL_NAME}/
echo "uploading $(dirname ${TARBALL_NAME})/${TARBALL_NAME}.tar.bz2 to s3://mapnik/dist/${MAPNIK_VERSION}/"
# TODO: upload to github releases instead of s3
aws s3 cp --acl public-read ${TARBALL_NAME}.tar.bz2 s3://mapnik/dist/${MAPNIK_VERSION}/
tar cjf ${TARBALL_COMPRESSED} ${TARBALL_NAME}/
step "uploading to github"
# https://developer.github.com/v3/repos/releases/#create-a-release
IS_PRERELEASE=false
if [[ ${MAPNIK_VERSION} =~ 'rc' ]] || [[ ${MAPNIK_VERSION} =~ 'alpha' ]]; then
IS_PRERELEASE=true
fi
IS_DRAFT=true
step "creating a draft release"
export CHANGELOG_REF=$(python -c "print '${MAPNIK_VERSION}'.replace('.','').replace('v','').split('-')[0]")
export RELEASE_NOTES="Mapnik ${MAPNIK_VERSION}\r\n\r\n[Changelog](https://github.com/mapnik/mapnik/blob/${MAPNIK_VERSION}/CHANGELOG.md#${CHANGELOG_REF})"
step "release notes: $RELEASE_NOTES"
# create draft release
curl --data "{\"tag_name\": \"${MAPNIK_VERSION}\",\"target_commitish\": \"master\",\"name\": \"${MAPNIK_VERSION}\",\"body\": \"${RELEASE_NOTES}\",\"draft\": ${IS_DRAFT},\"prerelease\": ${IS_PRERELEASE}}" \
https://api.github.com/repos/mapnik/mapnik/releases?access_token=${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO} \
> create_response.json
cat create_response.json
# parse out upload url and form it up to post tarball
UPLOAD_URL=$(python -c "import json;print json.load(open('create_response.json'))['upload_url'].replace('{?name,label}','?name=${TARBALL_COMPRESSED}')")
HTML_URL=$(python -c "import json;print json.load(open('create_response.json'))['html_url']")
step "upload url: $UPLOAD_URL"
# upload source tarball
curl ${UPLOAD_URL} \
-X POST \
-H "Authorization: token ${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO}" \
-H "Content-Type:application/octet-stream" \
--data-binary @${TARBALL_COMPRESSED}
echo
step "Success: view your new draft release at ${HTML_URL}"
open ${HTML_URL}
echo
#step "uploading $(realpath ${TARBALL_COMPRESSED}) to s3://mapnik/dist/${MAPNIK_VERSION}/"
#aws s3 cp --acl public-read ${TARBALL_COMPRESSED} s3://mapnik/dist/${MAPNIK_VERSION}/

65
scripts/test_release.sh Executable file
View file

@ -0,0 +1,65 @@
#!/bin/bash
set -eu
set -o pipefail
: '
Note: before running this script you need to tag and publish a new release (it can be a draft)
Usage:
./scripts/test_release.sh
This script:
- Downloads the latest release tarball from github
- Builds it and runs tests
'
function step { >&2 echo -e "\033[1m\033[36m* $1\033[0m"; }
function step_error { >&2 echo -e "\033[1m\033[31m$1\033[0m"; }
if [[ ${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO:-false} == false ]]; then
step_error "Please set GITHUB_TOKEN_MAPNIK_PUBLIC_REPO to a github token with 'public_repo' scope (create one at https://github.com/settings/tokens)"
exit 1
fi
export MAPNIK_VERSION="$(git describe)"
if [[ $(git tag -l) =~ ${MAPNIK_VERSION} ]]; then
step "Success: found $MAPNIK_VERSION (result of git describe) in tags, continuing"
else
step_error "error: $MAPNIK_VERSION (result of git describe) not in "git tag -l" output, aborting"
step_error "You must create a valid annotated tag first, before running this ./scripts/publish_release.sh"
exit 1
fi
curl --fail https://api.github.com/repos/mapnik/mapnik/releases -H "Authorization: token ${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO}" > /tmp/mapnik-releases.json
RELEASE_ASSET_NAME=$(python -c "import json;print json.load(open('/tmp/mapnik-releases.json'))[0]['assets'][0]['name']")
if [[ ${RELEASE_ASSET_NAME} == "mapnik-${MAPNIK_VERSION}.tar.bz2" ]]; then
step "Successfully found release asset to test: mapnik-${MAPNIK_VERSION}.tar.bz2"
else
step_error "Error: did not find correct release asset to test: mapnik-${MAPNIK_VERSION}.tar.bz2"
exit 1
fi
export RELEASE_ASSET_URL=$(python -c "import json;print json.load(open('/tmp/mapnik-releases.json'))[0]['assets'][0]['url']")
step "Downloading ${RELEASE_ASSET_URL}"
mkdir -p /tmp/build-mapnik-${MAPNIK_VERSION}/
rm -rf /tmp/build-mapnik-${MAPNIK_VERSION}/*
cd /tmp/build-mapnik-${MAPNIK_VERSION}/
# note: curl passes the "Authorization" header to redirects such that this breaks aws
# hence we need a two step approach here to downloading rather than depending on -L
# first a head request to get the download redirect
curl -I -f ${RELEASE_ASSET_URL} -H "Accept: application/octet-stream" -H "Authorization: token ${GITHUB_TOKEN_MAPNIK_PUBLIC_REPO}" > redirect.json
# now download from the github s3 location after stripping bogus newline
export RELEASE_ASSET_S3=$(cat redirect.json | grep location | cut -d' ' -f2 | tr '\r' ' ')
curl --retry 3 -f -S -L "${RELEASE_ASSET_S3}" -o mapnik-${MAPNIK_VERSION}.tar.bz2
tar xf mapnik-${MAPNIK_VERSION}.tar.bz2
cd mapnik-${MAPNIK_VERSION}
source bootstrap.sh
./configure CXX="$(pwd)/mason_packages/.link/bin/ccache clang++"
make
make test

View file

@ -30,14 +30,15 @@ on () {
git_submodule_update () {
git submodule update "$@" && return
# failed, search pull requests for matching commits
# failed, search branch and pull request heads for matching commit
git submodule foreach \
'
test "$sha1" = "`git rev-parse HEAD`" ||
git ls-remote origin "refs/pull/*/head" |
git ls-remote origin "refs/heads/*" "refs/pull/*/head" |
while read hash ref; do
if test "$hash" = "$sha1"; then
git config --add remote.origin.fetch "+$ref:$ref";
git config --add remote.origin.fetch "+$ref:$ref"
break
fi
done
'

View file

@ -257,6 +257,7 @@ void agg_renderer<T0,T1>::start_style_processing(feature_type_style const& st)
{
util::apply_visitor(visitor, filter_tag);
}
radius *= common_.scale_factor_;
if (radius > common_.t_.offset())
{
common_.t_.set_offset(radius);
@ -309,7 +310,7 @@ void agg_renderer<T0,T1>::end_style_processing(feature_type_style const& st)
if (st.image_filters().size() > 0)
{
blend_from = true;
mapnik::filter::filter_visitor<buffer_type> visitor(*current_buffer_);
mapnik::filter::filter_visitor<buffer_type> visitor(*current_buffer_, common_.scale_factor_);
for (mapnik::filter::filter_type const& filter_tag : st.image_filters())
{
util::apply_visitor(visitor, filter_tag);
@ -334,7 +335,7 @@ void agg_renderer<T0,T1>::end_style_processing(feature_type_style const& st)
if (st.direct_image_filters().size() > 0)
{
// apply any 'direct' image filters
mapnik::filter::filter_visitor<buffer_type> visitor(pixmap_);
mapnik::filter::filter_visitor<buffer_type> visitor(pixmap_, common_.scale_factor_);
for (mapnik::filter::filter_type const& filter_tag : st.direct_image_filters())
{
util::apply_visitor(visitor, filter_tag);

@ -1 +1 @@
Subproject commit 61f0d314b989a345df9515d5bc1472c8d5362990
Subproject commit 941db3d00920dc7aceaa6797096a7228bc7bac14

View file

@ -40,26 +40,21 @@ SECTION("registration") {
REQUIRE( mapnik::util::is_directory( fontdir ) );
// test map cached fonts
REQUIRE( m.register_fonts(fontdir , false ) );
REQUIRE( m.get_font_memory_cache().size() == 0 );
REQUIRE( m.get_font_file_mapping().size() == 1 );
REQUIRE( m.load_fonts() );
REQUIRE( m.get_font_memory_cache().size() == 1 );
REQUIRE( m.register_fonts(fontdir , true ) );
REQUIRE( m.get_font_file_mapping().size() == 23 );
REQUIRE( m.get_font_file_mapping().size() == 22 );
REQUIRE( m.load_fonts() );
REQUIRE( m.get_font_memory_cache().size() == 23 );
REQUIRE( m.get_font_memory_cache().size() == 22 );
// copy discards memory cache but not file mapping
mapnik::Map m2(m);
REQUIRE( m2.get_font_memory_cache().size() == 0 );
REQUIRE( m2.get_font_file_mapping().size() == 23 );
REQUIRE( m2.get_font_file_mapping().size() == 22 );
REQUIRE( m2.load_fonts() );
REQUIRE( m2.get_font_memory_cache().size() == 23 );
REQUIRE( m2.get_font_memory_cache().size() == 22 );
// test font-directory from XML
mapnik::Map m3(1,1);
mapnik::load_map_string(m3,"<Map font-directory=\"fonts/\"></Map>");
mapnik::load_map_string(m3,"<Map font-directory=\"test/data/fonts/Noto/\"></Map>");
REQUIRE( m3.get_font_memory_cache().size() == 0 );
REQUIRE( m3.load_fonts() );
REQUIRE( m3.get_font_memory_cache().size() == 1 );
@ -97,22 +92,11 @@ SECTION("registration") {
// now restore the original severity
logger.set_severity(original_severity);
// register unifont, since we know it sits in the root fonts/ dir
REQUIRE( mapnik::freetype_engine::register_fonts(fontdir) );
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() > 0 );
REQUIRE( face_names.size() == 1 );
// re-register unifont, should not have any affect
REQUIRE( mapnik::freetype_engine::register_fonts(fontdir, false) );
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() == 1 );
// single dejavu font in separate location
std::string dejavu_bold_oblique("test/data/fonts/DejaVuSansMono-BoldOblique.ttf");
REQUIRE( mapnik::freetype_engine::register_font(dejavu_bold_oblique) );
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() == 2 );
REQUIRE( face_names.size() == 1 );
// now, inspect font mapping and confirm the correct 'DejaVu Sans Mono Bold Oblique' is registered
using font_file_mapping = std::map<std::string, std::pair<int,std::string> >;
@ -132,7 +116,7 @@ SECTION("registration") {
// recurse to find all dejavu fonts
REQUIRE( mapnik::freetype_engine::register_fonts(fontdir, true) );
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() == 23 );
REQUIRE( face_names.size() == 22 );
// we should have re-registered 'DejaVu Sans Mono Bold Oblique' again,
// but now at a new path
@ -154,7 +138,7 @@ SECTION("registration") {
mapnik::Map m4(1,1);
REQUIRE( m4.register_fonts(fontdir , true ) );
REQUIRE( m4.get_font_memory_cache().size() == 0 );
REQUIRE( m4.get_font_file_mapping().size() == 23 );
REQUIRE( m4.get_font_file_mapping().size() == 22 );
REQUIRE( !m4.load_fonts() );
REQUIRE( m4.get_font_memory_cache().size() == 0 );
REQUIRE( m4.register_fonts(dejavu_bold_oblique, false) );
@ -166,7 +150,7 @@ SECTION("registration") {
// https://github.com/mapnik/mapnik/issues/2274
REQUIRE( mapnik::freetype_engine::register_font("test/data/fonts/NotoSans-Regular.ttc") );
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() == 25 );
REQUIRE( face_names.size() == 24 );
// now blindly register as many system fonts as possible
// the goal here to make sure we don't crash
@ -179,7 +163,7 @@ SECTION("registration") {
// windows
mapnik::freetype_engine::register_fonts("C:\\Windows\\Fonts", true);
face_names = mapnik::freetype_engine::face_names();
REQUIRE( face_names.size() > 23 );
REQUIRE( face_names.size() > 22 );
}
catch (std::exception const & ex)
{

View file

@ -115,6 +115,58 @@ TEST_CASE("geojson") {
}
}
SECTION("GeoJSON num_features_to_query")
{
std::string filename = "./test/data/json/featurecollection-multipleprops.geojson";
for (mapnik::value_integer num_features_to_query : { mapnik::value_integer(-1),
mapnik::value_integer(0),
mapnik::value_integer(1),
mapnik::value_integer(2),
mapnik::value_integer(3),
std::numeric_limits<mapnik::value_integer>::max()})
{
for (auto create_index : { true, false })
{
if (create_index)
{
int ret = create_disk_index(filename);
int ret_posix = (ret >> 8) & 0x000000ff;
INFO(ret);
INFO(ret_posix);
CHECK(mapnik::util::exists(filename + ".index"));
}
for (auto cache_features : {true, false})
{
mapnik::parameters params;
params["type"] = "geojson";
params["file"] = filename;
params["cache_features"] = cache_features;
params["num_features_to_query"] = num_features_to_query;
auto ds = mapnik::datasource_cache::instance().create(params);
CHECK(ds != nullptr);
auto fields = ds->get_descriptor().get_descriptors();
if (!create_index && cache_features)
{
// when there's no index and caching is enabled descriptor is always fully initialised
REQUIRE(fields.size() == 2);
}
else
{
// at least 1 feature should be queried
REQUIRE(fields.size() == std::min(std::max(mapnik::value_integer(1), num_features_to_query),
mapnik::value_integer(2)));
}
}
// cleanup
if (create_index && mapnik::util::exists(filename + ".index"))
{
mapnik::util::remove(filename + ".index");
}
}
}
}
SECTION("GeoJSON attribute descriptors are alphabetically ordered")
{
for (auto cache_features : {true, false})

View file

@ -363,4 +363,30 @@ SECTION("Image copy/move")
}
}
SECTION("image::swap")
{
auto blue = mapnik::color(50, 50, 250).rgba();
auto orange = mapnik::color(250, 150, 0).rgba();
mapnik::image_rgba8 im;
mapnik::image_rgba8 im2(16, 16);
mapnik::image_rgba8 im3(16, 16);
im2.set(blue);
im3.set(orange);
// swap two non-empty images
CHECK_NOTHROW(im2.swap(im3));
CHECK(im2(0, 0) == orange);
CHECK(im3(0, 0) == blue);
// swap empty <-> non-empty
CHECK_NOTHROW(im.swap(im3));
CHECK(im3.data() == nullptr);
CHECKED_IF(im.data() != nullptr)
{
CHECK(im(0, 0) == blue);
}
}
} // END TEST CASE

View file

@ -159,7 +159,7 @@ configuration = {
"mapnik_bundled_icu_data":mapnik_bundled_icu_data,
}
## if we are statically linking depedencies
## if we are statically linking dependencies
## then they do not need to be reported in ldflags
#if env['RUNTIME_LINK'] == 'static':
# configuration['ldflags'] = ''