Merge pull request #2824 from zerebubuth/test/port-python-tests
Port some python tests
This commit is contained in:
commit
c37720066d
5 changed files with 913 additions and 2 deletions
|
@ -11,9 +11,9 @@ if not env['CPP_TESTS']:
|
|||
os.unlink(cpp_test_bin)
|
||||
else:
|
||||
test_env['LIBS'] = [env['MAPNIK_NAME']]
|
||||
test_env.AppendUnique(LIBS=copy(env['LIBMAPNIK_LIBS']))
|
||||
test_env.AppendUnique(LIBS='mapnik-wkt')
|
||||
test_env.AppendUnique(LIBS='mapnik-json')
|
||||
test_env.AppendUnique(LIBS=copy(env['LIBMAPNIK_LIBS']))
|
||||
if env['RUNTIME_LINK'] == 'static' and env['PLATFORM'] == 'Linux':
|
||||
test_env.AppendUnique(LIBS='dl')
|
||||
test_env.AppendUnique(CXXFLAGS='-g')
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6802b27950218d7f620ffb7e73c5213aa003ea3c
|
||||
Subproject commit 419f955634dec99fe1417b6e7092d608803e0f90
|
64
test/standalone/agg_rasterizer_integer_overflow_test.cpp
Normal file
64
test/standalone/agg_rasterizer_integer_overflow_test.cpp
Normal file
|
@ -0,0 +1,64 @@
|
|||
#define CATCH_CONFIG_MAIN
|
||||
#include "catch.hpp"
|
||||
|
||||
#include <mapnik/map.hpp>
|
||||
#include <mapnik/memory_datasource.hpp>
|
||||
#include <mapnik/json/feature_parser.hpp>
|
||||
#include <mapnik/feature_type_style.hpp>
|
||||
#include <mapnik/rule.hpp>
|
||||
#include <mapnik/symbolizer.hpp>
|
||||
#include <mapnik/layer.hpp>
|
||||
#include <mapnik/agg_renderer.hpp>
|
||||
#include <mapnik/debug.hpp>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
// geojson box of the world
|
||||
const std::string geojson("{ \"type\": \"Feature\", \"properties\": { }, \"geometry\": { \"type\": \"Polygon\", \"coordinates\": [ [ [ -17963313.143242701888084, -6300857.11560364998877 ], [ -17963313.143242701888084, 13071343.332991421222687 ], [ 7396658.353099936619401, 13071343.332991421222687 ], [ 7396658.353099936619401, -6300857.11560364998877 ], [ -17963313.143242701888084, -6300857.11560364998877 ] ] ] } }");
|
||||
|
||||
TEST_CASE("agg_rasterizer_integer_overflow") {
|
||||
|
||||
SECTION("coordinates_do_not_overflow_and_polygon_is_rendered") {
|
||||
auto expected_color = mapnik::color("white");
|
||||
|
||||
mapnik::Map m(256,256);
|
||||
m.set_background(mapnik::color("black"));
|
||||
|
||||
mapnik::feature_type_style s;
|
||||
{
|
||||
mapnik::rule r;
|
||||
mapnik::polygon_symbolizer sym;
|
||||
mapnik::put(sym, mapnik::keys::fill, expected_color);
|
||||
mapnik::put(sym, mapnik::keys::clip, false);
|
||||
r.append(std::move(sym));
|
||||
s.add_rule(std::move(r));
|
||||
}
|
||||
m.insert_style("style",std::move(s));
|
||||
|
||||
mapnik::layer lyr("Layer");
|
||||
lyr.styles().emplace_back("style");
|
||||
{
|
||||
auto ds = std::make_shared<mapnik::memory_datasource>(mapnik::parameters());
|
||||
auto context = std::make_shared<mapnik::context_type>();
|
||||
auto f = std::make_shared<mapnik::feature_impl>(context, 0);
|
||||
REQUIRE(mapnik::json::from_geojson(geojson, *f));
|
||||
ds->push(f);
|
||||
lyr.set_datasource(ds);
|
||||
}
|
||||
m.add_layer(std::move(lyr));
|
||||
|
||||
// 17/20864/45265.png
|
||||
m.zoom_to_box(mapnik::box2d<double>(-13658379.710221574,6197514.253362091,-13657768.213995293,6198125.749588372));
|
||||
|
||||
// works 15/5216/11316.png
|
||||
//m.zoom_to_box(mapnik::box2d<double>(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372));
|
||||
|
||||
mapnik::image_rgba8 im(256, 256);
|
||||
{
|
||||
mapnik::agg_renderer<mapnik::image_rgba8> ren(m, im);
|
||||
ren.apply();
|
||||
}
|
||||
|
||||
REQUIRE(im(128,128) == expected_color.rgba());
|
||||
} // SECTION
|
||||
} // TEST_CASE
|
684
test/standalone/csv_test.cpp
Normal file
684
test/standalone/csv_test.cpp
Normal file
|
@ -0,0 +1,684 @@
|
|||
#define CATCH_CONFIG_MAIN
|
||||
#include "catch.hpp"
|
||||
|
||||
#include <mapnik/map.hpp>
|
||||
#include <mapnik/datasource.hpp>
|
||||
#include <mapnik/datasource_cache.hpp>
|
||||
#include <mapnik/geometry.hpp>
|
||||
#include <mapnik/geometry_types.hpp>
|
||||
#include <mapnik/geometry_type.hpp>
|
||||
#include <mapnik/expression.hpp>
|
||||
#include <mapnik/expression_evaluator.hpp>
|
||||
#include <mapnik/debug.hpp>
|
||||
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/range/iterator_range_core.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/optional/optional_io.hpp>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace bfs = boost::filesystem;
|
||||
|
||||
namespace {
|
||||
void add_csv_files(bfs::path dir, std::vector<bfs::path> &csv_files) {
|
||||
for (auto const &entry : boost::make_iterator_range(
|
||||
bfs::directory_iterator(dir), bfs::directory_iterator())) {
|
||||
auto path = entry.path();
|
||||
if (path.extension().native() == ".csv") {
|
||||
csv_files.emplace_back(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool operator==(mapnik::attribute_descriptor const &a,
|
||||
mapnik::attribute_descriptor const &b) {
|
||||
return ((a.get_name() == b.get_name()) &&
|
||||
(a.get_type() == b.get_type()) &&
|
||||
(a.is_primary_key() == b.is_primary_key()) &
|
||||
(a.get_size() == b.get_size()) &&
|
||||
(a.get_precision() == b.get_precision()));
|
||||
}
|
||||
|
||||
mapnik::datasource_ptr get_csv_ds(std::string const &file_name, bool strict = true) {
|
||||
mapnik::parameters params;
|
||||
params["type"] = std::string("csv");
|
||||
params["file"] = file_name;
|
||||
params["strict"] = mapnik::value_bool(strict);
|
||||
auto ds = mapnik::datasource_cache::instance().create(params);
|
||||
// require a non-null pointer returned
|
||||
REQUIRE(bool(ds));
|
||||
return ds;
|
||||
}
|
||||
|
||||
void require_field_names(std::vector<mapnik::attribute_descriptor> const &fields,
|
||||
std::initializer_list<std::string> const &names) {
|
||||
REQUIRE(fields.size() == names.size());
|
||||
auto itr_a = fields.begin();
|
||||
auto const end_a = fields.end();
|
||||
auto itr_b = names.begin();
|
||||
for (; itr_a != end_a; ++itr_a, ++itr_b) {
|
||||
CHECK(itr_a->get_name() == *itr_b);
|
||||
}
|
||||
}
|
||||
|
||||
void require_field_types(std::vector<mapnik::attribute_descriptor> const &fields,
|
||||
std::initializer_list<mapnik::eAttributeType> const &types) {
|
||||
REQUIRE(fields.size() == types.size());
|
||||
auto itr_a = fields.begin();
|
||||
auto const end_a = fields.end();
|
||||
auto itr_b = types.begin();
|
||||
for (; itr_a != end_a; ++itr_a, ++itr_b) {
|
||||
CHECK(itr_a->get_type() == *itr_b);
|
||||
}
|
||||
}
|
||||
|
||||
mapnik::featureset_ptr all_features(mapnik::datasource_ptr ds) {
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
mapnik::query query(ds->envelope());
|
||||
for (auto const &field : fields) {
|
||||
query.add_property_name(field.get_name());
|
||||
}
|
||||
return ds->features(query);
|
||||
}
|
||||
|
||||
std::size_t count_features(mapnik::featureset_ptr features) {
|
||||
std::size_t count = 0;
|
||||
while (features->next()) {
|
||||
++count;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
using attr = std::tuple<std::string, mapnik::value>;
|
||||
void require_attributes(mapnik::feature_ptr feature,
|
||||
std::initializer_list<attr> const &attrs) {
|
||||
REQUIRE(bool(feature));
|
||||
for (auto const &kv : attrs) {
|
||||
REQUIRE(feature->has_key(std::get<0>(kv)));
|
||||
CHECK(feature->get(std::get<0>(kv)) == std::get<1>(kv));
|
||||
}
|
||||
}
|
||||
|
||||
namespace detail {
|
||||
struct feature_count {
|
||||
template <typename T>
|
||||
std::size_t operator()(T const &geom) const {
|
||||
return mapnik::util::apply_visitor(*this, geom);
|
||||
}
|
||||
|
||||
std::size_t operator()(mapnik::geometry::geometry_empty const &) const {
|
||||
return 0;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::point<T> const &) const {
|
||||
return 1;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::line_string<T> const &) const {
|
||||
return 1;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::polygon<T> const &) const {
|
||||
return 1;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::multi_point<T> const &mp) const {
|
||||
return mp.size();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::multi_line_string<T> const &mls) const {
|
||||
return mls.size();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::multi_polygon<T> const &mp) const {
|
||||
return mp.size();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
std::size_t operator()(mapnik::geometry::geometry_collection<T> const &col) const {
|
||||
std::size_t sum = 0;
|
||||
for (auto const &geom : col) {
|
||||
sum += operator()(geom);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
};
|
||||
} // namespace detail
|
||||
|
||||
template <typename T>
|
||||
std::size_t feature_count(mapnik::geometry::geometry<T> const &g) {
|
||||
return detail::feature_count()(g);
|
||||
}
|
||||
|
||||
void require_geometry(mapnik::feature_ptr feature,
|
||||
std::size_t num_parts,
|
||||
mapnik::geometry::geometry_types type) {
|
||||
REQUIRE(bool(feature));
|
||||
CHECK(mapnik::geometry::geometry_type(feature->get_geometry()) == type);
|
||||
CHECK(feature_count(feature->get_geometry()) == num_parts);
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
const bool registered = mapnik::datasource_cache::instance().register_datasources("./plugins/input/");
|
||||
|
||||
TEST_CASE("csv") {
|
||||
REQUIRE(registered);
|
||||
|
||||
// make the tests silent since we intentially test error conditions that are noisy
|
||||
auto const severity = mapnik::logger::instance().get_severity();
|
||||
mapnik::logger::instance().set_severity(mapnik::logger::none);
|
||||
|
||||
// check the CSV datasource is loaded
|
||||
const std::vector<std::string> plugin_names =
|
||||
mapnik::datasource_cache::instance().plugin_names();
|
||||
const bool have_csv_plugin =
|
||||
std::find(plugin_names.begin(), plugin_names.end(), "csv") != plugin_names.end();
|
||||
|
||||
SECTION("broken files") {
|
||||
if (have_csv_plugin) {
|
||||
std::vector<bfs::path> broken;
|
||||
add_csv_files("test/data/csv/fails", broken);
|
||||
add_csv_files("test/data/csv/warns", broken);
|
||||
broken.emplace_back("test/data/csv/fails/does_not_exist.csv");
|
||||
|
||||
for (auto const &path : broken) {
|
||||
REQUIRE_THROWS(get_csv_ds(path.native()));
|
||||
}
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("good files") {
|
||||
if (have_csv_plugin) {
|
||||
std::vector<bfs::path> good;
|
||||
add_csv_files("test/data/csv", good);
|
||||
add_csv_files("test/data/csv/warns", good);
|
||||
|
||||
for (auto const &path : good) {
|
||||
auto ds = get_csv_ds(path.native(), false);
|
||||
// require a non-null pointer returned
|
||||
REQUIRE(bool(ds));
|
||||
}
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("lon/lat detection") {
|
||||
for (auto const &lon_name : {std::string("lon"), std::string("lng")}) {
|
||||
auto ds = get_csv_ds((boost::format("test/data/csv/%1%_lat.csv") % lon_name).str());
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {lon_name, "lat"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer});
|
||||
|
||||
CHECK(ds->get_geometry_type() == mapnik::datasource_geometry_t::Point);
|
||||
|
||||
mapnik::query query(ds->envelope());
|
||||
for (auto const &field : fields) {
|
||||
query.add_property_name(field.get_name());
|
||||
}
|
||||
auto features = ds->features(query);
|
||||
auto feature = features->next();
|
||||
|
||||
require_attributes(feature, {
|
||||
attr { lon_name, mapnik::value_integer(0) },
|
||||
attr { "lat", mapnik::value_integer(0) }
|
||||
});
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("type detection") {
|
||||
auto ds = get_csv_ds("test/data/csv/nypd.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"Precinct", "Phone", "Address", "City", "geo_longitude", "geo_latitude", "geo_accuracy"});
|
||||
require_field_types(fields, {mapnik::String, mapnik::String, mapnik::String, mapnik::String, mapnik::Double, mapnik::Double, mapnik::String});
|
||||
|
||||
CHECK(ds->get_geometry_type() == mapnik::datasource_geometry_t::Point);
|
||||
CHECK(count_features(all_features(ds)) == 2);
|
||||
|
||||
auto feature = all_features(ds)->next();
|
||||
require_attributes(feature, {
|
||||
attr { "City", mapnik::value_unicode_string("New York, NY") }
|
||||
, attr { "geo_accuracy", mapnik::value_unicode_string("house") }
|
||||
, attr { "Phone", mapnik::value_unicode_string("(212) 334-0711") }
|
||||
, attr { "Address", mapnik::value_unicode_string("19 Elizabeth Street") }
|
||||
, attr { "Precinct", mapnik::value_unicode_string("5th Precinct") }
|
||||
, attr { "geo_longitude", mapnik::value_integer(-70) }
|
||||
, attr { "geo_latitude", mapnik::value_integer(40) }
|
||||
});
|
||||
} // END SECTION
|
||||
|
||||
SECTION("skipping blank rows") {
|
||||
auto ds = get_csv_ds("test/data/csv/blank_rows.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "name"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String});
|
||||
|
||||
CHECK(ds->get_geometry_type() == mapnik::datasource_geometry_t::Point);
|
||||
CHECK(count_features(all_features(ds)) == 2);
|
||||
} // END SECTION
|
||||
|
||||
SECTION("empty rows") {
|
||||
auto ds = get_csv_ds("test/data/csv/empty_rows.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "text", "date", "integer", "boolean", "float", "time", "datetime", "empty_column"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String, mapnik::String, mapnik::Integer, mapnik::Boolean, mapnik::Double, mapnik::String, mapnik::String, mapnik::String});
|
||||
|
||||
CHECK(ds->get_geometry_type() == mapnik::datasource_geometry_t::Point);
|
||||
CHECK(count_features(all_features(ds)) == 4);
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
auto feature = featureset->next();
|
||||
require_attributes(feature, {
|
||||
attr { "x", mapnik::value_integer(0) }
|
||||
, attr { "empty_column", mapnik::value_unicode_string("") }
|
||||
, attr { "text", mapnik::value_unicode_string("a b") }
|
||||
, attr { "float", mapnik::value_double(1.0) }
|
||||
, attr { "datetime", mapnik::value_unicode_string("1971-01-01T04:14:00") }
|
||||
, attr { "y", mapnik::value_integer(0) }
|
||||
, attr { "boolean", mapnik::value_bool(true) }
|
||||
, attr { "time", mapnik::value_unicode_string("04:14:00") }
|
||||
, attr { "date", mapnik::value_unicode_string("1971-01-01") }
|
||||
, attr { "integer", mapnik::value_integer(40) }
|
||||
});
|
||||
|
||||
while (bool(feature = featureset->next())) {
|
||||
CHECK(feature->size() == 10);
|
||||
CHECK(feature->get("empty_column") == mapnik::value_unicode_string(""));
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("slashes") {
|
||||
auto ds = get_csv_ds("test/data/csv/has_attributes_with_slashes.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "name"});
|
||||
// NOTE: y column is integer, even though a double value is used below in the test?
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String});
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}
|
||||
, attr{"y", 0}
|
||||
, attr{"name", mapnik::value_unicode_string("a/a") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 1}
|
||||
, attr{"y", 4}
|
||||
, attr{"name", mapnik::value_unicode_string("b/b") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 10}
|
||||
, attr{"y", 2.5}
|
||||
, attr{"name", mapnik::value_unicode_string("c/c") } });
|
||||
} // END SECTION
|
||||
|
||||
SECTION("wkt field") {
|
||||
using mapnik::geometry::geometry_types;
|
||||
|
||||
auto ds = get_csv_ds("test/data/csv/wkt.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"type"});
|
||||
require_field_types(fields, {mapnik::String});
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Point);
|
||||
require_geometry(featureset->next(), 1, geometry_types::LineString);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Polygon);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Polygon);
|
||||
require_geometry(featureset->next(), 4, geometry_types::MultiPoint);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiLineString);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiPolygon);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiPolygon);
|
||||
} // END SECTION
|
||||
|
||||
SECTION("handling of missing header") {
|
||||
// TODO: does this mean 'missing_header.csv' should be in the warnings
|
||||
// subdirectory, since it doesn't work in strict mode?
|
||||
auto ds = get_csv_ds("test/data/csv/missing_header.csv", false);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"one", "two", "x", "y", "_4", "aftermissing"});
|
||||
auto feature = all_features(ds)->next();
|
||||
REQUIRE(feature);
|
||||
REQUIRE(feature->has_key("_4"));
|
||||
CHECK(feature->get("_4") == mapnik::value_unicode_string("missing"));
|
||||
} // END SECTION
|
||||
|
||||
SECTION("handling of headers that are numbers") {
|
||||
auto ds = get_csv_ds("test/data/csv/numbers_for_headers.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "1990", "1991", "1992"});
|
||||
auto feature = all_features(ds)->next();
|
||||
require_attributes(feature, {
|
||||
attr{"x", 0}
|
||||
, attr{"y", 0}
|
||||
, attr{"1990", 1}
|
||||
, attr{"1991", 2}
|
||||
, attr{"1992", 3}
|
||||
});
|
||||
auto expression = mapnik::parse_expression("[1991]=2");
|
||||
REQUIRE(bool(expression));
|
||||
auto value = mapnik::util::apply_visitor(
|
||||
mapnik::evaluate<mapnik::feature_impl, mapnik::value_type, mapnik::attributes>(
|
||||
*feature, mapnik::attributes()), *expression);
|
||||
CHECK(value == true);
|
||||
} // END SECTION
|
||||
|
||||
SECTION("quoted numbers") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
auto ds = get_csv_ds("test/data/csv/quoted_numbers.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "label"});
|
||||
auto featureset = all_features(ds);
|
||||
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"label", ustring("0,0") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 5}, attr{"y", 5}, attr{"label", ustring("5,5") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 5}, attr{"label", ustring("0,5") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 5}, attr{"y", 0}, attr{"label", ustring("5,0") } });
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 2.5}, attr{"y", 2.5}, attr{"label", ustring("2.5,2.5") } });
|
||||
|
||||
} // END SECTION
|
||||
|
||||
SECTION("reading newlines") {
|
||||
for (auto const &platform : {std::string("windows"), std::string("mac")}) {
|
||||
std::string file_name = (boost::format("test/data/csv/%1%_newlines.csv") % platform).str();
|
||||
auto ds = get_csv_ds(file_name);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "z"});
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", 1}, attr{"y", 10}, attr{"z", 9999.9999} });
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("mixed newlines") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
for (auto const &file : {
|
||||
std::string("test/data/csv/mac_newlines_with_unix_inline.csv")
|
||||
, std::string("test/data/csv/mac_newlines_with_unix_inline_escaped.csv")
|
||||
, std::string("test/data/csv/windows_newlines_with_unix_inline.csv")
|
||||
, std::string("test/data/csv/windows_newlines_with_unix_inline_escaped.csv")
|
||||
}) {
|
||||
auto ds = get_csv_ds(file);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "line"});
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}
|
||||
, attr{"line", ustring("many\n lines\n of text\n with unix newlines")} });
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("tabs") {
|
||||
auto ds = get_csv_ds("test/data/csv/tabs_in_csv.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "z"});
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", -122}, attr{"y", 48}, attr{"z", 0} });
|
||||
} // END SECTION
|
||||
|
||||
SECTION("separators") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
for (auto const &file : {
|
||||
std::string("test/data/csv/pipe_delimiters.csv")
|
||||
, std::string("test/data/csv/semicolon_delimiters.csv")
|
||||
}) {
|
||||
auto ds = get_csv_ds(file);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "z"});
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"z", ustring("hello")} });
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("null and bool keywords are empty strings") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
auto ds = get_csv_ds("test/data/csv/nulls_and_booleans_as_strings.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "null", "boolean"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String, mapnik::Boolean});
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"null", ustring("null")}, attr{"boolean", true}});
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"null", ustring("")}, attr{"boolean", false}});
|
||||
} // END SECTION
|
||||
|
||||
SECTION("nonexistent query fields throw") {
|
||||
auto ds = get_csv_ds("test/data/csv/lon_lat.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"lon", "lat"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer});
|
||||
|
||||
mapnik::query query(ds->envelope());
|
||||
for (auto const &field : fields) {
|
||||
query.add_property_name(field.get_name());
|
||||
}
|
||||
// also add an invalid one, triggering throw
|
||||
query.add_property_name("bogus");
|
||||
|
||||
REQUIRE_THROWS(ds->features(query));
|
||||
} // END SECTION
|
||||
|
||||
SECTION("leading zeros mean strings") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
auto ds = get_csv_ds("test/data/csv/leading_zeros.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "fips"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String});
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"fips", ustring("001")}});
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"fips", ustring("003")}});
|
||||
require_attributes(featureset->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"fips", ustring("005")}});
|
||||
} // END SECTION
|
||||
|
||||
SECTION("advanced geometry detection") {
|
||||
using row = std::pair<std::string, mapnik::datasource_geometry_t>;
|
||||
|
||||
for (row r : {
|
||||
row{"point", mapnik::datasource_geometry_t::Point}
|
||||
, row{"poly", mapnik::datasource_geometry_t::Polygon}
|
||||
, row{"multi_poly", mapnik::datasource_geometry_t::Polygon}
|
||||
, row{"line", mapnik::datasource_geometry_t::LineString}
|
||||
}) {
|
||||
std::string file_name = (boost::format("test/data/csv/%1%_wkt.csv") % r.first).str();
|
||||
auto ds = get_csv_ds(file_name);
|
||||
CHECK(ds->get_geometry_type() == r.second);
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("creation of CSV from in-memory strings") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
for (auto const &name : {std::string("Winthrop, WA"), std::string(u8"Qu\u00e9bec")}) {
|
||||
std::string csv_string =
|
||||
(boost::format(
|
||||
"wkt,Name\n"
|
||||
"\"POINT (120.15 48.47)\",\"%1%\"\n"
|
||||
) % name).str();
|
||||
|
||||
mapnik::parameters params;
|
||||
params["type"] = std::string("csv");
|
||||
params["inline"] = csv_string;
|
||||
auto ds = mapnik::datasource_cache::instance().create(params);
|
||||
REQUIRE(bool(ds));
|
||||
|
||||
auto feature = all_features(ds)->next();
|
||||
REQUIRE(bool(feature));
|
||||
REQUIRE(feature->has_key("Name"));
|
||||
CHECK(feature->get("Name") == ustring(name.c_str()));
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("geojson quoting") {
|
||||
using mapnik::geometry::geometry_types;
|
||||
|
||||
for (auto const &file : {
|
||||
std::string("test/data/csv/geojson_double_quote_escape.csv")
|
||||
, std::string("test/data/csv/geojson_single_quote.csv")
|
||||
, std::string("test/data/csv/geojson_2x_double_quote_filebakery_style.csv")
|
||||
}) {
|
||||
auto ds = get_csv_ds(file);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"type"});
|
||||
require_field_types(fields, {mapnik::String});
|
||||
|
||||
auto featureset = all_features(ds);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Point);
|
||||
require_geometry(featureset->next(), 1, geometry_types::LineString);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Polygon);
|
||||
require_geometry(featureset->next(), 1, geometry_types::Polygon);
|
||||
require_geometry(featureset->next(), 4, geometry_types::MultiPoint);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiLineString);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiPolygon);
|
||||
require_geometry(featureset->next(), 2, geometry_types::MultiPolygon);
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("blank undelimited rows are still parsed") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
|
||||
// TODO: does this mean this CSV file should be in the warnings
|
||||
// subdirectory, since it doesn't work in strict mode?
|
||||
auto ds = get_csv_ds("test/data/csv/more_headers_than_column_values.csv", false);
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "one", "two", "three"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String, mapnik::String, mapnik::String});
|
||||
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"one", ustring("")}, attr{"two", ustring("")}, attr{"three", ustring("")} });
|
||||
} // END SECTION
|
||||
|
||||
SECTION("fewer headers than rows throws") {
|
||||
REQUIRE_THROWS(get_csv_ds("test/data/csv/more_column_values_than_headers.csv"));
|
||||
} // END SECTION
|
||||
|
||||
SECTION("feature ID only incremented for valid rows") {
|
||||
auto ds = get_csv_ds("test/data/csv/warns/feature_id_counting.csv", false);
|
||||
auto fs = all_features(ds);
|
||||
|
||||
// first
|
||||
auto feature = fs->next();
|
||||
REQUIRE(bool(feature));
|
||||
CHECK(feature->id() == 1);
|
||||
|
||||
// second, should have skipped bogus one
|
||||
feature = fs->next();
|
||||
REQUIRE(bool(feature));
|
||||
CHECK(feature->id() == 2);
|
||||
|
||||
feature = fs->next();
|
||||
CHECK(!feature);
|
||||
} // END SECTION
|
||||
|
||||
SECTION("dynamically defining headers") {
|
||||
using ustring = mapnik::value_unicode_string;
|
||||
using row = std::pair<std::string, std::size_t>;
|
||||
|
||||
for (auto const &r : {
|
||||
row{"test/data/csv/fails/needs_headers_two_lines.csv", 2}
|
||||
, row{"test/data/csv/fails/needs_headers_one_line.csv", 1}
|
||||
, row{"test/data/csv/fails/needs_headers_one_line_no_newline.csv", 1}
|
||||
}) {
|
||||
mapnik::parameters params;
|
||||
params["type"] = std::string("csv");
|
||||
params["file"] = r.first;
|
||||
params["headers"] = "x,y,name";
|
||||
auto ds = mapnik::datasource_cache::instance().create(params);
|
||||
REQUIRE(bool(ds));
|
||||
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "name"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::String});
|
||||
require_attributes(all_features(ds)->next(), {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"name", ustring("data_name")} });
|
||||
REQUIRE(count_features(all_features(ds)) == r.second);
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wlong-long"
|
||||
SECTION("64bit int fields work") {
|
||||
auto ds = get_csv_ds("test/data/csv/64bit_int.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "bigint"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::Integer});
|
||||
|
||||
auto fs = all_features(ds);
|
||||
auto feature = fs->next();
|
||||
require_attributes(feature, {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"bigint", 2147483648} });
|
||||
|
||||
feature = fs->next();
|
||||
require_attributes(feature, {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"bigint", 9223372036854775807ll} });
|
||||
require_attributes(feature, {
|
||||
attr{"x", 0}, attr{"y", 0}, attr{"bigint", 0x7FFFFFFFFFFFFFFFll} });
|
||||
} // END SECTION
|
||||
#pragma GCC diagnostic pop
|
||||
|
||||
SECTION("various number types") {
|
||||
auto ds = get_csv_ds("test/data/csv/number_types.csv");
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {"x", "y", "floats"});
|
||||
require_field_types(fields, {mapnik::Integer, mapnik::Integer, mapnik::Double});
|
||||
|
||||
auto fs = all_features(ds);
|
||||
for (double d : { .0, +.0, 1e-06, -1e-06, 0.000001, 1.234e+16, 1.234e+16 }) {
|
||||
auto feature = fs->next();
|
||||
REQUIRE(bool(feature));
|
||||
CHECK(feature->get("floats").get<mapnik::value_double>() == Approx(d));
|
||||
}
|
||||
} // END SECTION
|
||||
|
||||
SECTION("manually supplied extent") {
|
||||
std::string csv_string("wkt,Name\n");
|
||||
mapnik::parameters params;
|
||||
params["type"] = std::string("csv");
|
||||
params["inline"] = csv_string;
|
||||
params["extent"] = "-180,-90,180,90";
|
||||
auto ds = mapnik::datasource_cache::instance().create(params);
|
||||
REQUIRE(bool(ds));
|
||||
|
||||
auto box = ds->envelope();
|
||||
CHECK(box.minx() == -180);
|
||||
CHECK(box.miny() == -90);
|
||||
CHECK(box.maxx() == 180);
|
||||
CHECK(box.maxy() == 90);
|
||||
} // END SECTION
|
||||
|
||||
SECTION("inline geojson") {
|
||||
std::string csv_string = "geojson\n'{\"coordinates\":[-92.22568,38.59553],\"type\":\"Point\"}'";
|
||||
mapnik::parameters params;
|
||||
params["type"] = std::string("csv");
|
||||
params["inline"] = csv_string;
|
||||
auto ds = mapnik::datasource_cache::instance().create(params);
|
||||
REQUIRE(bool(ds));
|
||||
|
||||
auto fields = ds->get_descriptor().get_descriptors();
|
||||
require_field_names(fields, {});
|
||||
|
||||
// TODO: this originally had the following comment:
|
||||
// - re-enable after https://github.com/mapnik/mapnik/issues/2319 is fixed
|
||||
// but that seems to have been merged and tested separately?
|
||||
auto fs = all_features(ds);
|
||||
auto feat = fs->next();
|
||||
CHECK(feature_count(feat->get_geometry()) == 1);
|
||||
} // END SECTION
|
||||
|
||||
mapnik::logger::instance().set_severity(severity);
|
||||
} // END TEST CASE
|
163
test/unit/core/box2d_test.cpp
Normal file
163
test/unit/core/box2d_test.cpp
Normal file
|
@ -0,0 +1,163 @@
|
|||
#include "catch.hpp"
|
||||
|
||||
#include <iostream>
|
||||
#include <mapnik/coord.hpp>
|
||||
#include <mapnik/box2d.hpp>
|
||||
|
||||
TEST_CASE("box2d") {
|
||||
SECTION("coord init") {
|
||||
auto c = mapnik::coord2d(100, 100);
|
||||
|
||||
REQUIRE(c.x == 100);
|
||||
REQUIRE(c.y == 100);
|
||||
}
|
||||
|
||||
SECTION("coord multiplication") {
|
||||
auto c = mapnik::coord2d(100, 100);
|
||||
c *= 2;
|
||||
|
||||
REQUIRE(c.x == 200);
|
||||
REQUIRE(c.y == 200);
|
||||
}
|
||||
|
||||
SECTION("envelope init") {
|
||||
auto e = mapnik::box2d<double>(100, 100, 200, 200);
|
||||
|
||||
REQUIRE(e.contains(100, 100));
|
||||
REQUIRE(e.contains(100, 200));
|
||||
REQUIRE(e.contains(200, 200));
|
||||
REQUIRE(e.contains(200, 100));
|
||||
|
||||
REQUIRE(e.contains(e.center()));
|
||||
|
||||
REQUIRE(!e.contains(99.9, 99.9));
|
||||
REQUIRE(!e.contains(99.9, 200.1));
|
||||
REQUIRE(!e.contains(200.1, 200.1));
|
||||
REQUIRE(!e.contains(200.1, 99.9));
|
||||
|
||||
REQUIRE(e.width() == 100);
|
||||
REQUIRE(e.height() == 100);
|
||||
|
||||
REQUIRE(e.minx() == 100);
|
||||
REQUIRE(e.miny() == 100);
|
||||
|
||||
REQUIRE(e.maxx() == 200);
|
||||
REQUIRE(e.maxy() == 200);
|
||||
|
||||
REQUIRE(e[0] == 100);
|
||||
REQUIRE(e[1] == 100);
|
||||
REQUIRE(e[2] == 200);
|
||||
REQUIRE(e[3] == 200);
|
||||
REQUIRE(e[0] == e[-4]);
|
||||
REQUIRE(e[1] == e[-3]);
|
||||
REQUIRE(e[2] == e[-2]);
|
||||
REQUIRE(e[3] == e[-1]);
|
||||
|
||||
auto c = e.center();
|
||||
|
||||
REQUIRE(c.x == 150);
|
||||
REQUIRE(c.y == 150);
|
||||
}
|
||||
|
||||
SECTION("envelope static init") {
|
||||
auto e = mapnik::box2d<double>(100, 100, 200, 200);
|
||||
|
||||
mapnik::box2d<double> e1, e2, e3;
|
||||
REQUIRE(e1.from_string("100 100 200 200"));
|
||||
REQUIRE(e2.from_string("100,100,200,200"));
|
||||
REQUIRE(e3.from_string("100 , 100 , 200 , 200"));
|
||||
|
||||
REQUIRE(e == e1);
|
||||
REQUIRE(e == e2);
|
||||
REQUIRE(e == e3);
|
||||
}
|
||||
|
||||
SECTION("envelope multiplication") {
|
||||
// no width then no impact of multiplication
|
||||
{
|
||||
auto a = mapnik::box2d<int>(100, 100, 100, 100);
|
||||
a *= 5;
|
||||
|
||||
REQUIRE(a.minx() == 100);
|
||||
REQUIRE(a.miny() == 100);
|
||||
REQUIRE(a.maxx() == 100);
|
||||
REQUIRE(a.maxy() == 100);
|
||||
}
|
||||
|
||||
{
|
||||
auto a = mapnik::box2d<double>(100.0, 100.0, 100.0, 100.0);
|
||||
a *= 5;
|
||||
|
||||
REQUIRE(a.minx() == 100);
|
||||
REQUIRE(a.miny() == 100);
|
||||
REQUIRE(a.maxx() == 100);
|
||||
REQUIRE(a.maxy() == 100);
|
||||
}
|
||||
|
||||
{
|
||||
auto a = mapnik::box2d<double>(100.0, 100.0, 100.001, 100.001);
|
||||
a *= 5;
|
||||
|
||||
REQUIRE(a.minx() == Approx( 99.9980));
|
||||
REQUIRE(a.miny() == Approx( 99.9980));
|
||||
REQUIRE(a.maxx() == Approx(100.0030));
|
||||
REQUIRE(a.maxy() == Approx(100.0030));
|
||||
}
|
||||
|
||||
{
|
||||
auto e = mapnik::box2d<double>(100, 100, 200, 200);
|
||||
e *= 2;
|
||||
|
||||
REQUIRE(e.minx() == 50);
|
||||
REQUIRE(e.miny() == 50);
|
||||
REQUIRE(e.maxx() == 250);
|
||||
REQUIRE(e.maxy() == 250);
|
||||
|
||||
REQUIRE(e.contains(50, 50));
|
||||
REQUIRE(e.contains(50, 250));
|
||||
REQUIRE(e.contains(250, 250));
|
||||
REQUIRE(e.contains(250, 50));
|
||||
|
||||
REQUIRE(!e.contains(49.9, 49.9));
|
||||
REQUIRE(!e.contains(49.9, 250.1));
|
||||
REQUIRE(!e.contains(250.1, 250.1));
|
||||
REQUIRE(!e.contains(250.1, 49.9));
|
||||
|
||||
REQUIRE(e.contains(e.center()));
|
||||
|
||||
REQUIRE(e.width() == 200);
|
||||
REQUIRE(e.height() == 200);
|
||||
|
||||
REQUIRE(e.minx() == 50);
|
||||
REQUIRE(e.miny() == 50);
|
||||
|
||||
REQUIRE(e.maxx() == 250);
|
||||
REQUIRE(e.maxy() == 250);
|
||||
|
||||
auto c = e.center();
|
||||
|
||||
REQUIRE(c.x == 150);
|
||||
REQUIRE(c.y == 150);
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("envelope clipping") {
|
||||
auto e1 = mapnik::box2d<double>(-180,-90,180,90);
|
||||
auto e2 = mapnik::box2d<double>(-120,40,-110,48);
|
||||
e1.clip(e2);
|
||||
REQUIRE(e1 == e2);
|
||||
|
||||
// madagascar in merc
|
||||
e1 = mapnik::box2d<double>(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673);
|
||||
e2 = mapnik::box2d<double>(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520);
|
||||
e1.clip(e2);
|
||||
REQUIRE(e1 == e2);
|
||||
|
||||
// nz in lon/lat
|
||||
e1 = mapnik::box2d<double>(163.8062, -47.1897, 179.3628, -33.9069);
|
||||
e2 = mapnik::box2d<double>(173.7378, -39.6395, 174.4849, -38.9252);
|
||||
e1.clip(e2);
|
||||
REQUIRE(e1 == e2);
|
||||
}
|
||||
|
||||
} // TEST_CASE
|
Loading…
Reference in a new issue