Merge branch 'master' of github.com:mapnik/mapnik into geometry-refactor

This commit is contained in:
Dane Springmeyer 2016-08-22 16:19:17 -07:00
commit f81583554a
296 changed files with 3475 additions and 4977 deletions

View file

@ -29,7 +29,7 @@ matrix:
addons:
apt:
sources: [ 'ubuntu-toolchain-r-test']
packages: [ 'libstdc++6', 'libstdc++-5-dev']
packages: [ 'libstdc++-5-dev', 'xutils']
- os: linux
sudo: false
compiler: ": clang-coverage"
@ -37,7 +37,7 @@ matrix:
addons:
apt:
sources: [ 'ubuntu-toolchain-r-test']
packages: [ 'libstdc++6','libstdc++-5-dev' ]
packages: ['libstdc++-5-dev', 'xutils' ]
- os: osx
compiler: ": clang-osx"
# https://docs.travis-ci.com/user/languages/objective-c/#Supported-OS-X-iOS-SDK-versions
@ -45,6 +45,12 @@ matrix:
env: JOBS=4 MASON_PUBLISH=true _CXX="ccache clang++ -Qunused-arguments"
before_install:
# workaround travis rvm bug
# http://superuser.com/questions/1044130/why-am-i-having-how-can-i-fix-this-error-shell-session-update-command-not-f
- |
if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then
rvm get head || true
fi
- if [[ ${_CXX:-false} != false ]]; then export CXX=${_CXX}; fi
- if [[ ${_CC:-false} != false ]]; then export CC=${_CC}; fi
- source scripts/travis-common.sh

View file

@ -6,9 +6,41 @@ Developers: Please commit along with changes.
For a complete change history, see the git log.
## 3.0.12
Released: xx-xx-xx
(Packaged from xxxxxx)
#### Summary
- Ensured gdal.input is registered once (refs #3093 #3339 #3340)
- Fixed `mapnik::util::is_clockwise` implementation to use coordinates relative to the origin and avoid numeric precision issues
- `mapnik-index` is updated to fail on first error in input (csv)
- Added `guard` to `get_object_severity` method (ref #3322)
- Improved `hash` calculation for `mapnik::value` (ref #3406)
- AGG - made cover `unsigned` to avoid left shift of negative values (ref #3406)
- Fixed using `scale_factor` in `evaluate_transform(..)`
- Fixed line spacing logic by applying `scale factor`
- ~~Fixed `stringify_object/stringify_array` implementations by disabling white space skipping (ref #3419)~~
- Added geojson unit test for property types/values
- JSON - added support for object and array type in `json_value` and update `stringifier`
- GDAL.input - fallback to using `overviews` if present (8e8482803bb435726534c3b686a56037b7d3e8ad)
- TopoJSON.input - improved and simplified grammer/parser implementation (https://github.com/mapnik/mapnik/pull/3429)
- GDAL.input - Added support for non-alpha mask band
- TopoJSON.input - fixed order of ellements limitation (ref #3434)
- Fixed stroke-width size not included in markers ellipse bounding box (ref #3445)
- Implemented `char_array_buffer` and removed `boost::iostreams` dependency (2e8c0d36c2237f2815d8004c1b96bad909056eb9)
- JSON.input - `extract_bounding_box_grammar` - make features optional (ref #3463)
- Ensure input plugins return `empty_featureset` rather than `nullptr` (feature_ptr())
- Added support for quantising small (less than 3 pixel) images (ref #3466)
- Added support for natural logarithm function in expressions (ref #3475)
- Improved logic determining if certain compiler features are available e.g `inheriting constructors` (MSVC)
- GeoJSON - corrected quoting in `stringgifird` objects (ref #3491)
## 3.0.11
Released:
Released: April 1, 2016
(Packaged from 8d9dc27)

View file

@ -1,6 +1,7 @@
#!/usr/bin/env bash
#set -eu
set -eu
set -o pipefail
: '
@ -10,7 +11,7 @@ todo
- shrink icu data
'
MASON_VERSION="b709931"
MASON_VERSION="7ed8931"
function setup_mason() {
if [[ ! -d ./.mason ]]; then
@ -29,7 +30,6 @@ function install() {
MASON_PLATFORM_ID=$(mason env MASON_PLATFORM_ID)
if [[ ! -d ./mason_packages/${MASON_PLATFORM_ID}/${1}/${2} ]]; then
mason install $1 $2
mason link $1 $2
if [[ ${3:-false} != false ]]; then
LA_FILE=$(mason prefix $1 $2)/lib/$3.la
if [[ -f ${LA_FILE} ]]; then
@ -39,11 +39,13 @@ function install() {
fi
fi
fi
mason link $1 $2
}
ICU_VERSION="55.1"
function install_mason_deps() {
FAIL=0
install ccache 3.2.4 &
install jpeg_turbo 1.4.0 libjpeg &
install libpng 1.6.20 libpng &
@ -51,7 +53,6 @@ function install_mason_deps() {
install libpq 9.4.1 &
install sqlite 3.8.8.3 libsqlite3 &
install expat 2.1.0 libexpat &
wait
install icu ${ICU_VERSION} &
install proj 4.8.0 libproj &
install pixman 0.32.6 libpixman-1 &
@ -59,17 +60,26 @@ function install_mason_deps() {
install protobuf 2.6.1 &
# technically protobuf is not a mapnik core dep, but installing
# here by default helps make mapnik-vector-tile builds easier
wait
install webp 0.4.2 libwebp &
install webp 0.5.0 libwebp &
install gdal 1.11.2 libgdal &
install boost 1.61.0 &
install boost_libsystem 1.61.0 &
install boost_libfilesystem 1.61.0 &
install boost_libprogram_options 1.61.0 &
install boost_libregex 1.61.0 &
install boost_libregex_icu 1.61.0 &
# technically boost thread and python are not a core dep, but installing
# here by default helps make python-mapnik builds easier
install boost_libthread 1.61.0 &
install boost_libpython 1.61.0 &
install freetype 2.6 libfreetype &
install harfbuzz 0.9.41 libharfbuzz &
wait
for job in $(jobs -p)
do
wait $job || let "FAIL+=1"
done
if [[ "$FAIL" != "0" ]]; then
exit ${FAIL}
fi
}
MASON_LINKED_ABS=$(pwd)/mason_packages/.link
@ -140,3 +150,8 @@ function main() {
}
main
# allow sourcing of script without
# causing the terminal to bail on error
set +eu
set +o pipefail

View file

@ -21,13 +21,13 @@ dependencies:
cache_directories:
- "~/.ccache"
- "~/.apt-cache"
- "mason_packages"
pre:
# https://discuss.circleci.com/t/add-ability-to-cache-apt-get-programs/598/3
- sudo rm -rf /var/cache/apt/archives && sudo ln -s ~/.apt-cache /var/cache/apt/archives && mkdir -p ~/.apt-cache/partial
- sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
override:
- sudo apt-get update -y
- sudo apt-get install -y libstdc++-5-dev xutils
database:
pre:

View file

@ -55,7 +55,7 @@ int main ( int, char** )
try {
std::cout << " running demo ... \n";
datasource_cache::instance().register_datasources("plugins/input/");
freetype_engine::register_font("fonts/dejavu-fonts-ttf-2.35/ttf/DejaVuSans.ttf");
freetype_engine::register_font("fonts/dejavu-fonts-ttf-2.37/ttf/DejaVuSans.ttf");
Map m(800,600);
m.set_background(parse_color("white"));

2
deps/mapbox/variant vendored

@ -1 +1 @@
Subproject commit c511b2f34d966c09e02a1b833db33a9a1f9b2196
Subproject commit aaddee9270e3956cee98cdd7d04aea848d69f5f0

View file

@ -21,7 +21,7 @@ James Cloos
James Crippen
John Karp
Keenan Pepper
Lars Naesbye Christensen
Lars Næsbye Christensen
Lior Halphon
MaEr
Mashrab Kuvatov
@ -33,6 +33,7 @@ Misu Moldovan
Nguyen Thai Ngoc Duy
Nicolas Mailhot
Norayr Chilingarian
Olleg Samoylov
Ognyan Kulev
Ondrej Koala Vacha
Peter Cernak
@ -53,4 +54,4 @@ Vasek Stodulka
Wesley Transue
Yoshiki Ohshima
$Id: AUTHORS 2593 2015-05-17 07:47:39Z ben_laenen $
$Id$

View file

@ -1,3 +1,3 @@
See http://dejavu.sourceforge.net/wiki/index.php/Bugs
$Id: BUGS 80 2004-11-13 13:12:02Z src $
$Id$

View file

@ -1,6 +1,7 @@
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Glyphs imported from Arev fonts are (c) Tavmjong Bah (see below)
Bitstream Vera Fonts Copyright
------------------------------
@ -96,4 +97,91 @@ dealings in this Font Software without prior written authorization
from Tavmjong Bah. For further information, contact: tavmjong @ free
. fr.
$Id: LICENSE 2133 2007-11-28 02:46:28Z lechimp $
TeX Gyre DJV Math
-----------------
Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
Math extensions done by B. Jackowski, P. Strzelczyk and P. Pianowski
(on behalf of TeX users groups) are in public domain.
Letters imported from Euler Fraktur from AMSfonts are (c) American
Mathematical Society (see below).
Bitstream Vera Fonts Copyright
Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera
is a trademark of Bitstream, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of the fonts accompanying this license (“Fonts”) and associated
documentation
files (the “Font Software”), to reproduce and distribute the Font Software,
including without limitation the rights to use, copy, merge, publish,
distribute,
and/or sell copies of the Font Software, and to permit persons to whom
the Font Software is furnished to do so, subject to the following
conditions:
The above copyright and trademark notices and this permission notice
shall be
included in all copies of one or more of the Font Software typefaces.
The Font Software may be modified, altered, or added to, and in particular
the designs of glyphs or characters in the Fonts may be modified and
additional
glyphs or characters may be added to the Fonts, only if the fonts are
renamed
to names not containing either the words “Bitstream” or the word “Vera”.
This License becomes null and void to the extent applicable to Fonts or
Font Software
that has been modified and is distributed under the “Bitstream Vera”
names.
The Font Software may be sold as part of a larger software package but
no copy
of one or more of the Font Software typefaces may be sold by itself.
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
FOUNDATION
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL,
SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN
ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR
INABILITY TO USE
THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
Except as contained in this notice, the names of GNOME, the GNOME
Foundation,
and Bitstream Inc., shall not be used in advertising or otherwise to promote
the sale, use or other dealings in this Font Software without prior written
authorization from the GNOME Foundation or Bitstream Inc., respectively.
For further information, contact: fonts at gnome dot org.
AMSFonts (v. 2.2) copyright
The PostScript Type 1 implementation of the AMSFonts produced by and
previously distributed by Blue Sky Research and Y&Y, Inc. are now freely
available for general use. This has been accomplished through the
cooperation
of a consortium of scientific publishers with Blue Sky Research and Y&Y.
Members of this consortium include:
Elsevier Science IBM Corporation Society for Industrial and Applied
Mathematics (SIAM) Springer-Verlag American Mathematical Society (AMS)
In order to assure the authenticity of these fonts, copyright will be
held by
the American Mathematical Society. This is not meant to restrict in any way
the legitimate use of the fonts, such as (but not limited to) electronic
distribution of documents containing these fonts, inclusion of these fonts
into other public domain or commercial font collections or computer
applications, use of the outline data to create derivative fonts and/or
faces, etc. However, the AMS does require that the AMS copyright notice be
removed from any derivative versions of the fonts which have been altered in
any way. In addition, to ensure the fidelity of TeX documents using Computer
Modern fonts, Professor Donald Knuth, creator of the Computer Modern faces,
has requested that any alterations which yield different font metrics be
given a different name.
$Id$

View file

@ -1,3 +1,29 @@
Changes from 2.35 to 2.36
* Fix issue with empty glyphs in condensed typefaces in the released source files.
Changes from 2.35 to 2.36
* Math: added DejaVu Math Tex Gyre by B. Jackowski, P. Strzelczyk and
P. Pianowski (on behalf of TeX users groups)
* Sans: removed dot of U+06BA in all forms
* Sans: fixed position of three dots of U+06BD in init and medi forms (by
Denis Jacquerye)
* Sans: corrected direction of contours in U+05E7 (by Lior Halphon)
* Sans: added U+1F643 (by Olleg Samoylov)
* Serif: moved up U+0360-0361 (by Gee Fung Sit 薛至峰)
* Serif: increased spacing of Roman numerals U+2161-2163, U+2165-2168,
U+216A-216B (by Gee Fung Sit 薛至峰)
* Serif: fixed anchor position of U+00E6 (by Gee Fung Sit 薛至峰)
* Sans: fixed vertical position of U+20BA (by Gee Fung Sit 薛至峰)
* Sans, Serif: fixed glyph height of Block Elements (by Gee Fung Sit 薛至峰)
* Sans, Serif: added U+A698-A699 (by Gee Fung Sit 薛至峰)
* Sans, Mono, Serif: added U+037F (by Gee Fung Sit 薛至峰)
* Mono: added U+0376-0377, U+037B-037D (by Gee Fung Sit 薛至峰)
* Serif: removed duplicate point from U+1D05 (by Gee Fung Sit 薛至峰)
* Mono: added U+20BA, U+20BD (by Gee Fung Sit 薛至峰)
* Sans: added moon symbols U+1F311-1F318 (by Ben Laenen)
Changes from 2.34 to 2.35
* Sans, SansMono, Serif: added U+0E3F, U+A7F8-U+A7F9 (by Gee Fung Sit 薛至峰)
@ -1426,4 +1452,4 @@ Changes from 0.9 to 0.9.1:
- proper caron shape for dcaron and tcaron
- minor visual changes
$Id: NEWS 2594 2015-05-17 07:54:48Z ben_laenen $
$Id$

View file

@ -1,4 +1,6 @@
DejaVu fonts 2.35 (c)2004-2015 DejaVu fonts team
[![Build Status](https://travis-ci.org/dejavu-fonts/dejavu-fonts.svg)](https://travis-ci.org/dejavu-fonts/dejavu-fonts)
DejaVu fonts 2.37 (c)2004-2016 DejaVu fonts team
------------------------------------------------
The DejaVu fonts are a font family based on the Bitstream Vera Fonts
@ -31,6 +33,7 @@ DejaVu Serif Condensed (experimental)
DejaVu Serif Condensed Bold (experimental)
DejaVu Serif Condensed Bold Italic (experimental)
DejaVu Serif Condensed Italic (experimental)
DejaVu Math TeX Gyre
All fonts are also available as derivative called DejaVu LGC with support
only for Latin, Greek and Cyrillic scripts.
@ -56,4 +59,9 @@ U+213C-U+2140, U+2295-U+2298, U+2308-U+230B, U+26A2-U+26B1, U+2701-U+2704,
U+2706-U+2709, U+270C-U+274B, U+2758-U+275A, U+2761-U+2775, U+2780-U+2794,
U+2798-U+27AF, U+27B1-U+27BE, U+FB05-U+FB06
$Id: README 2595 2015-05-17 07:57:27Z ben_laenen $
DejaVu Math TeX Gyre
--------------------
TeX Gyre DJV Math by B. Jackowski, P. Strzelczyk and P. Pianowski
(on behalf of TeX users groups).
$Id$

Binary file not shown.

View file

@ -11,7 +11,7 @@ U+0180 Latin Extended-B 100% (208/208) 100% (208/208
U+0250 IPA Extensions 100% (96/96) 100% (96/96) 100% (96/96)
U+02b0 Spacing Modifier Letters 78% (63/80) 73% (59/80) 62% (50/80)
U+0300 Combining Diacritical Marks 83% (93/112) 61% (69/112) 59% (67/112)
U+0370 Greek and Coptic 99% (134/135) 88% (120/135) 81% (110/135)
U+0370 Greek and Coptic 100% (135/135) 89% (121/135) 85% (116/135)
U+0400 Cyrillic 100% (256/256) 79% (204/256) 70% (180/256)
U+0500 Cyrillic Supplement 79% (38/48) 20% (10/48) 12% (6/48)
U+0530 Armenian 96% (86/89) 96% (86/89) 96% (86/89)
@ -23,16 +23,16 @@ U+0780 Thaana (0/50) (0/50)
U+07c0 NKo 91% (54/59) (0/59) (0/59)
U+0800 Samaritan (0/61) (0/61) (0/61)
U+0840 Mandaic (0/29) (0/29) (0/29)
U+08a0 Arabic Extended-A (0/47) (0/47) (0/47)
U+08a0 Arabic Extended-A (0/73) (0/73) (0/73)
U+0900 Devanagari (0/128) (0/128) (0/128)
U+0980 Bengali (0/93) (0/93) (0/93)
U+0a00 Gurmukhi (0/79) (0/79) (0/79)
U+0a80 Gujarati (0/84) (0/84) (0/84)
U+0a80 Gujarati (0/85) (0/85) (0/85)
U+0b00 Oriya (0/90) (0/90) (0/90)
U+0b80 Tamil (0/72) (0/72) (0/72)
U+0c00 Telugu (0/95) (0/95) (0/95)
U+0c80 Kannada (0/87) (0/87) (0/87)
U+0d00 Malayalam (0/99) (0/99) (0/99)
U+0c00 Telugu (0/96) (0/96) (0/96)
U+0c80 Kannada (0/88) (0/88) (0/88)
U+0d00 Malayalam (0/114) (0/114) (0/114)
U+0d80 Sinhala (0/90) (0/90) (0/90)
U+0e00 Thai 1% (1/87) 1% (1/87) 1% (1/87)
U+0e80 Lao 97% (65/67) (0/67) 68% (46/67)
@ -42,7 +42,7 @@ U+10a0 Georgian 94% (83/88) 94% (83/88)
U+1100 Hangul Jamo (0/256) (0/256) (0/256)
U+1200 Ethiopic (0/358) (0/358) (0/358)
U+1380 Ethiopic Supplement (0/26) (0/26) (0/26)
U+13a0 Cherokee (0/85) (0/85) (0/85)
U+13a0 Cherokee (0/92) (0/92) (0/92)
U+1400 Unified Canadian Aboriginal Syllabics 63% (404/640) (0/640) (0/640)
U+1680 Ogham 100% (29/29) (0/29) (0/29)
U+16a0 Runic (0/89) (0/89) (0/89)
@ -65,22 +65,23 @@ U+1b80 Sundanese (0/64) (0/64)
U+1bc0 Batak (0/56) (0/56) (0/56)
U+1c00 Lepcha (0/74) (0/74) (0/74)
U+1c50 Ol Chiki (0/48) (0/48) (0/48)
U+1c80 Cyrillic Extended-C (0/9) (0/9) (0/9)
U+1cc0 Sundanese Supplement (0/8) (0/8) (0/8)
U+1cd0 Vedic Extensions (0/41) (0/41) (0/41)
U+1d00 Phonetic Extensions 82% (106/128) 89% (115/128) 48% (62/128)
U+1d80 Phonetic Extensions Supplement 59% (38/64) 59% (38/64) 57% (37/64)
U+1dc0 Combining Diacritical Marks Supplement 10% (6/58) 10% (6/58) (0/58)
U+1dc0 Combining Diacritical Marks Supplement 10% (6/59) 10% (6/59) (0/59)
U+1e00 Latin Extended Additional 98% (252/256) 98% (252/256) 71% (182/256)
U+1f00 Greek Extended 100% (233/233) 100% (233/233) 100% (233/233)
U+2000 General Punctuation 96% (107/111) 78% (87/111) 48% (54/111)
U+2070 Superscripts and Subscripts 100% (42/42) 100% (42/42) 100% (42/42)
U+20a0 Currency Symbols 86% (26/30) 33% (10/30) 80% (24/30)
U+20a0 Currency Symbols 83% (26/31) 32% (10/31) 83% (26/31)
U+20d0 Combining Diacritical Marks for Symbols 21% (7/33) (0/33) (0/33)
U+2100 Letterlike Symbols 93% (75/80) 42% (34/80) 22% (18/80)
U+2150 Number Forms 94% (55/58) 94% (55/58) 27% (16/58)
U+2150 Number Forms 91% (55/60) 91% (55/60) 26% (16/60)
U+2190 Arrows 100% (112/112) 100% (112/112) 100% (112/112)
U+2200 Mathematical Operators 100% (256/256) 39% (101/256) 69% (178/256)
U+2300 Miscellaneous Technical 25% (65/251) 14% (36/251) 54% (136/251)
U+2300 Miscellaneous Technical 25% (65/255) 14% (36/255) 53% (136/255)
U+2400 Control Pictures 5% (2/39) 2% (1/39) 2% (1/39)
U+2440 Optical Character Recognition (0/11) (0/11) (0/11)
U+2460 Enclosed Alphanumerics 6% (10/160) (0/160) (0/160)
@ -95,7 +96,7 @@ U+2800 Braille Patterns 100% (256/256) 100% (256/256
U+2900 Supplemental Arrows-B 4% (6/128) 100% (128/128) (0/128)
U+2980 Miscellaneous Mathematical Symbols-B 10% (13/128) 0% (1/128) 5% (7/128)
U+2a00 Supplemental Mathematical Operators 28% (74/256) 2% (6/256) 1% (4/256)
U+2b00 Miscellaneous Symbols and Arrows 17% (35/202) 13% (27/202) 8% (18/202)
U+2b00 Miscellaneous Symbols and Arrows 16% (35/206) 13% (27/206) 8% (18/206)
U+2c00 Glagolitic (0/94) (0/94) (0/94)
U+2c60 Latin Extended-C 96% (31/32) 84% (27/32) 43% (14/32)
U+2c80 Coptic (0/123) (0/123) (0/123)
@ -103,7 +104,7 @@ U+2d00 Georgian Supplement 95% (38/40) 95% (38/40)
U+2d30 Tifinagh 93% (55/59) (0/59) (0/59)
U+2d80 Ethiopic Extended (0/79) (0/79) (0/79)
U+2de0 Cyrillic Extended-A (0/32) (0/32) (0/32)
U+2e00 Supplemental Punctuation 10% (7/67) 10% (7/67) 10% (7/67)
U+2e00 Supplemental Punctuation 10% (7/69) 10% (7/69) 10% (7/69)
U+2e80 CJK Radicals Supplement (0/115) (0/115) (0/115)
U+2f00 Kangxi Radicals (0/214) (0/214) (0/214)
U+2ff0 Ideographic Description Characters (0/12) (0/12) (0/12)
@ -125,15 +126,15 @@ U+a000 Yi Syllables (0/1165) (0/1165)
U+a490 Yi Radicals (0/55) (0/55) (0/55)
U+a4d0 Lisu 100% (48/48) (0/48) (0/48)
U+a500 Vai (0/300) (0/300) (0/300)
U+a640 Cyrillic Extended-B 32% (31/95) 10% (10/95) (0/95)
U+a640 Cyrillic Extended-B 34% (33/96) 12% (12/96) (0/96)
U+a6a0 Bamum (0/88) (0/88) (0/88)
U+a700 Modifier Tone Letters 62% (20/32) 62% (20/32) 62% (20/32)
U+a720 Latin Extended-D 50% (77/152) 39% (60/152) 11% (17/152)
U+a720 Latin Extended-D 48% (77/160) 37% (60/160) 10% (17/160)
U+a800 Syloti Nagri (0/44) (0/44) (0/44)
U+a830 Common Indic Number Forms (0/10) (0/10) (0/10)
U+a840 Phags-pa (0/56) (0/56) (0/56)
U+a880 Saurashtra (0/81) (0/81) (0/81)
U+a8e0 Devanagari Extended (0/28) (0/28) (0/28)
U+a880 Saurashtra (0/82) (0/82) (0/82)
U+a8e0 Devanagari Extended (0/30) (0/30) (0/30)
U+a900 Kayah Li (0/48) (0/48) (0/48)
U+a930 Rejang (0/37) (0/37) (0/37)
U+a960 Hangul Jamo Extended-A (0/29) (0/29) (0/29)
@ -144,7 +145,8 @@ U+aa60 Myanmar Extended-A (0/32) (0/32)
U+aa80 Tai Viet (0/72) (0/72) (0/72)
U+aae0 Meetei Mayek Extensions (0/23) (0/23) (0/23)
U+ab00 Ethiopic Extended-A (0/32) (0/32) (0/32)
U+ab30 Latin Extended-E (0/50) (0/50) (0/50)
U+ab30 Latin Extended-E (0/54) (0/54) (0/54)
U+ab70 Cherokee Supplement (0/80) (0/80) (0/80)
U+abc0 Meetei Mayek (0/56) (0/56) (0/56)
U+ac00 Hangul Syllables (0/0) (0/0) (0/0)
U+d7b0 Hangul Jamo Extended-B (0/72) (0/72) (0/72)
@ -157,7 +159,7 @@ U+fb00 Alphabetic Presentation Forms 100% (58/58) 12% (7/58)
U+fb50 Arabic Presentation Forms-A 17% (108/611) (0/611) 11% (72/611)
U+fe00 Variation Selectors 100% (16/16) 100% (16/16) (0/16)
U+fe10 Vertical Forms (0/10) (0/10) (0/10)
U+fe20 Combining Half Marks 28% (4/14) (0/14) (0/14)
U+fe20 Combining Half Marks 25% (4/16) (0/16) (0/16)
U+fe30 CJK Compatibility Forms (0/32) (0/32) (0/32)
U+fe50 Small Form Variants (0/26) (0/26) (0/26)
U+fe70 Arabic Presentation Forms-B 100% (141/141) (0/141) 100% (141/141)
@ -166,7 +168,7 @@ U+fff0 Specials 100% (5/5) 100% (5/5)
U+10000 Linear B Syllabary (0/88) (0/88) (0/88)
U+10080 Linear B Ideograms (0/123) (0/123) (0/123)
U+10100 Aegean Numbers (0/57) (0/57) (0/57)
U+10140 Ancient Greek Numbers (0/77) (0/77) (0/77)
U+10140 Ancient Greek Numbers (0/79) (0/79) (0/79)
U+10190 Ancient Symbols (0/13) (0/13) (0/13)
U+101d0 Phaistos Disc (0/46) (0/46) (0/46)
U+10280 Lycian (0/29) (0/29) (0/29)
@ -180,6 +182,7 @@ U+103a0 Old Persian (0/50) (0/50)
U+10400 Deseret (0/80) (0/80) (0/80)
U+10450 Shavian (0/48) (0/48) (0/48)
U+10480 Osmanya (0/40) (0/40) (0/40)
U+104b0 Osage (0/72) (0/72) (0/72)
U+10500 Elbasan (0/40) (0/40) (0/40)
U+10530 Caucasian Albanian (0/53) (0/53) (0/53)
U+10600 Linear A (0/341) (0/341) (0/341)
@ -187,10 +190,11 @@ U+10800 Cypriot Syllabary (0/55) (0/55)
U+10840 Imperial Aramaic (0/31) (0/31) (0/31)
U+10860 Palmyrene (0/32) (0/32) (0/32)
U+10880 Nabataean (0/40) (0/40) (0/40)
U+108e0 Hatran (0/26) (0/26) (0/26)
U+10900 Phoenician (0/29) (0/29) (0/29)
U+10920 Lydian (0/27) (0/27) (0/27)
U+10980 Meroitic Hieroglyphs (0/32) (0/32) (0/32)
U+109a0 Meroitic Cursive (0/26) (0/26) (0/26)
U+109a0 Meroitic Cursive (0/90) (0/90) (0/90)
U+10a00 Kharoshthi (0/65) (0/65) (0/65)
U+10a60 Old South Arabian (0/32) (0/32) (0/32)
U+10a80 Old North Arabian (0/32) (0/32) (0/32)
@ -200,57 +204,74 @@ U+10b40 Inscriptional Parthian (0/30) (0/30)
U+10b60 Inscriptional Pahlavi (0/27) (0/27) (0/27)
U+10b80 Psalter Pahlavi (0/29) (0/29) (0/29)
U+10c00 Old Turkic (0/73) (0/73) (0/73)
U+10c80 Old Hungarian (0/108) (0/108) (0/108)
U+10e60 Rumi Numeral Symbols (0/31) (0/31) (0/31)
U+11000 Brahmi (0/109) (0/109) (0/109)
U+11080 Kaithi (0/66) (0/66) (0/66)
U+110d0 Sora Sompeng (0/35) (0/35) (0/35)
U+11100 Chakma (0/67) (0/67) (0/67)
U+11150 Mahajani (0/39) (0/39) (0/39)
U+11180 Sharada (0/85) (0/85) (0/85)
U+11180 Sharada (0/94) (0/94) (0/94)
U+111e0 Sinhala Archaic Numbers (0/20) (0/20) (0/20)
U+11200 Khojki (0/61) (0/61) (0/61)
U+11200 Khojki (0/62) (0/62) (0/62)
U+11280 Multani (0/38) (0/38) (0/38)
U+112b0 Khudawadi (0/69) (0/69) (0/69)
U+11300 Grantha (0/83) (0/83) (0/83)
U+11300 Grantha (0/85) (0/85) (0/85)
U+11400 Newa (0/92) (0/92) (0/92)
U+11480 Tirhuta (0/82) (0/82) (0/82)
U+11580 Siddham (0/72) (0/72) (0/72)
U+11580 Siddham (0/92) (0/92) (0/92)
U+11600 Modi (0/79) (0/79) (0/79)
U+11660 Mongolian Supplement (0/13) (0/13) (0/13)
U+11680 Takri (0/66) (0/66) (0/66)
U+11700 Ahom (0/57) (0/57) (0/57)
U+118a0 Warang Citi (0/84) (0/84) (0/84)
U+11ac0 Pau Cin Hau (0/57) (0/57) (0/57)
U+12000 Cuneiform (0/921) (0/921) (0/921)
U+11c00 Bhaiksuki (0/97) (0/97) (0/97)
U+11c70 Marchen (0/68) (0/68) (0/68)
U+12000 Cuneiform (0/922) (0/922) (0/922)
U+12400 Cuneiform Numbers and Punctuation (0/116) (0/116) (0/116)
U+12480 Early Dynastic Cuneiform (0/196) (0/196) (0/196)
U+13000 Egyptian Hieroglyphs (0/1071) (0/1071) (0/1071)
U+14400 Anatolian Hieroglyphs (0/583) (0/583) (0/583)
U+16800 Bamum Supplement (0/569) (0/569) (0/569)
U+16a40 Mro (0/43) (0/43) (0/43)
U+16ad0 Bassa Vah (0/36) (0/36) (0/36)
U+16b00 Pahawh Hmong (0/127) (0/127) (0/127)
U+16f00 Miao (0/133) (0/133) (0/133)
U+16fe0 Ideographic Symbols and Punctuation (0/1) (0/1) (0/1)
U+17000 Tangut (0/0) (0/0) (0/0)
U+18800 Tangut Components (0/755) (0/755) (0/755)
U+1b000 Kana Supplement (0/2) (0/2) (0/2)
U+1bc00 Duployan (0/143) (0/143) (0/143)
U+1bca0 Shorthand Format Controls (0/4) (0/4) (0/4)
U+1d000 Byzantine Musical Symbols (0/246) (0/246) (0/246)
U+1d100 Musical Symbols (0/220) (0/220) (0/220)
U+1d100 Musical Symbols (0/231) (0/231) (0/231)
U+1d200 Ancient Greek Musical Notation (0/70) (0/70) (0/70)
U+1d300 Tai Xuan Jing Symbols 100% (87/87) (0/87) (0/87)
U+1d360 Counting Rod Numerals (0/18) (0/18) (0/18)
U+1d400 Mathematical Alphanumeric Symbols 11% (117/996) 10% (108/996) 6% (63/996)
U+1d800 Sutton SignWriting (0/672) (0/672) (0/672)
U+1e000 Glagolitic Supplement (0/38) (0/38) (0/38)
U+1e800 Mende Kikakui (0/213) (0/213) (0/213)
U+1e900 Adlam (0/87) (0/87) (0/87)
U+1ee00 Arabic Mathematical Alphabetic Symbols 51% (74/143) (0/143) (0/143)
U+1f000 Mahjong Tiles (0/44) (0/44) (0/44)
U+1f030 Domino Tiles 100% (100/100) (0/100) (0/100)
U+1f0a0 Playing Cards 71% (59/82) (0/82) (0/82)
U+1f100 Enclosed Alphanumeric Supplement (0/173) (0/173) (0/173)
U+1f200 Enclosed Ideographic Supplement (0/57) (0/57) (0/57)
U+1f300 Miscellaneous Symbols and Pictographs 0% (4/742) (0/742) (0/742)
U+1f600 Emoticons 80% (63/78) (0/78) (0/78)
U+1f100 Enclosed Alphanumeric Supplement (0/191) (0/191) (0/191)
U+1f200 Enclosed Ideographic Supplement (0/58) (0/58) (0/58)
U+1f300 Miscellaneous Symbols and Pictographs 1% (12/768) (0/768) (0/768)
U+1f600 Emoticons 80% (64/80) (0/80) (0/80)
U+1f650 Ornamental Dingbats (0/48) (0/48) (0/48)
U+1f680 Transport and Map Symbols (0/97) (0/97) (0/97)
U+1f680 Transport and Map Symbols (0/103) (0/103) (0/103)
U+1f700 Alchemical Symbols (0/116) (0/116) (0/116)
U+1f780 Geometric Shapes Extended (0/85) (0/85) (0/85)
U+1f800 Supplemental Arrows-C (0/148) (0/148) (0/148)
U+1f900 Supplemental Symbols and Pictographs (0/82) (0/82) (0/82)
U+20000 CJK Unified Ideographs Extension B (0/0) (0/0) (0/0)
U+2a700 CJK Unified Ideographs Extension C (0/0) (0/0) (0/0)
U+2b740 CJK Unified Ideographs Extension D (0/0) (0/0) (0/0)
U+2b820 CJK Unified Ideographs Extension E (0/0) (0/0) (0/0)
U+2f800 CJK Compatibility Ideographs Supplement (0/542) (0/542) (0/542)
U+e0000 Tags (0/98) (0/98) (0/98)
U+e0100 Variation Selectors Supplement (0/240) (0/240) (0/240)

View file

@ -308,6 +308,7 @@ public:
void stroke();
void fill();
void paint();
void paint(double opacity);
void set_pattern(cairo_pattern const& pattern);
void set_gradient(cairo_gradient const& pattern, box2d<double> const& bbox);
void add_image(double x, double y, image_rgba8 const& data, double opacity = 1.0);
@ -327,6 +328,9 @@ public:
composite_mode_e halo_comp_op = src_over,
double scale_factor = 1.0);
void push_group();
void pop_group();
template <typename T>
void add_path(T& path, unsigned start_index = 0)
{

View file

@ -177,6 +177,7 @@ protected:
cairo_context context_;
renderer_common common_;
cairo_face_manager face_manager_;
bool style_level_compositing_;
void setup(Map const& m);
};

View file

@ -52,10 +52,18 @@
#define PROJ_ENVELOPE_POINTS 20
#ifndef BOOST_MPL_LIMIT_VECTOR_SIZE
#define BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS
#define BOOST_MPL_LIMIT_VECTOR_SIZE 30
#ifndef BOOST_MPL_LIMIT_VECTOR_SIZE
#define BOOST_MPL_LIMIT_VECTOR_SIZE 30
#else
#warning "WARNING: BOOST_MPL_LIMIT_VECTOR_SIZE is already defined. Ensure config.hpp is included before any Boost headers"
#warning "WARNING: BOOST_MPL_LIMIT_VECTOR_SIZE is already defined. Ensure config.hpp is included before any Boost headers"
#endif
#ifndef BOOST_MPL_LIMIT_LIST_SIZE
#define BOOST_MPL_LIMIT_LIST_SIZE 30
#else
#warning "WARNING: BOOST_MPL_LIMIT_LIST_SIZE is already defined. Ensure config.hpp is included before any Boost headers"
#endif
#endif // MAPNIK_CONFIG_HPP

View file

@ -102,6 +102,7 @@ unary_function_types::unary_function_types()
("tan", tan_impl())
("atan", atan_impl())
("exp", exp_impl())
("log", log_impl())
("abs", abs_impl())
("length",length_impl())
;

View file

@ -46,12 +46,11 @@ class feature_impl;
class MAPNIK_DECL feature_kv_iterator :
public boost::iterator_facade<feature_kv_iterator,
std::tuple<std::string , value> const,
std::tuple<std::string, value> const,
boost::forward_traversal_tag>
{
public:
using value_type = std::tuple<std::string,value>;
feature_kv_iterator (feature_impl const& f, bool begin = false);
private:
friend class boost::iterator_core_access;

View file

@ -94,12 +94,21 @@ public:
{
return extra_params_;
}
bool has_name(std::string const& name) const
{
auto result = std::find_if(std::begin(descriptors_), std::end(descriptors_),
[&name](attribute_descriptor const& desc) { return name == desc.get_name();});
return result != std::end(descriptors_);
}
void order_by_name()
{
std::sort(std::begin(descriptors_), std::end(descriptors_),
[](attribute_descriptor const& d0, attribute_descriptor const& d1)
{
return d0.get_name() < d1.get_name();
});
}
private:
std::string name_;
std::string encoding_;

View file

@ -41,21 +41,25 @@ struct MAPNIK_DECL Featureset : private util::noncopyable
virtual ~Featureset() {}
};
struct MAPNIK_DECL empty_featureset final : Featureset
struct MAPNIK_DECL invalid_featureset final : Featureset
{
feature_ptr next()
{
return feature_ptr();
}
~empty_featureset() {}
~invalid_featureset() {}
};
using featureset_ptr = std::shared_ptr<Featureset>;
inline featureset_ptr make_empty_featureset()
inline featureset_ptr make_invalid_featureset()
{
return std::make_shared<empty_featureset>();
return std::make_shared<invalid_featureset>();
}
inline bool is_valid(featureset_ptr const& ptr)
{
return (dynamic_cast<invalid_featureset*>(ptr.get()) == nullptr) ? true : false;
}
}

View file

@ -50,6 +50,17 @@ struct exp_impl
};
// log
struct log_impl
{
//using type = T;
value_type operator() (value_type const& val) const
{
return std::log(val.to_double());
}
};
// sin
struct sin_impl
{

View file

@ -63,13 +63,19 @@ struct geometry : geometry_base<T>
{
using coordinate_type = T;
geometry()
: geometry_base<T>() {} // empty
#if __cpp_inheriting_constructors >= 200802
using geometry_base<T>::geometry_base;
#else
geometry() = default;
template <typename G>
geometry(G && geom)
: geometry_base<T>(std::forward<G>(geom)) {}
#endif
};

View file

@ -57,15 +57,18 @@ using json_value_base = mapnik::util::variant<value_null,
mapnik::util::recursive_wrapper<json_object> >;
struct json_value : json_value_base
{
#if __cpp_inheriting_constructors >= 200802
using json_value_base::json_value_base;
#else
#ifdef _WINDOWS
json_value() = default;
template <typename T>
json_value(T && val)
: json_value_base(std::forward<T>(val)) {}
#else
// MSVC 2015 inheriting constructors is not working in this context (support is apparently planned)
using json_value_base::json_value_base;
#endif
};

View file

@ -86,7 +86,7 @@ struct stringifier
{
if (first) first = false;
else str += ",";
str += kv.first;
str += "\"" + kv.first + "\"";
str += ":";
str += mapnik::util::apply_visitor(*this, kv.second);
}

View file

@ -43,6 +43,7 @@ using rgba_hash_table = std::unordered_map<unsigned int, unsigned char>;
// stl
#include <vector>
#include <tuple>
#define U2RED(x) ((x)&0xff)
#define U2GREEN(x) (((x)>>8)&0xff)
@ -53,7 +54,8 @@ namespace mapnik {
struct rgba;
struct MAPNIK_DECL rgb {
struct MAPNIK_DECL rgb
{
std::uint8_t r;
std::uint8_t g;
std::uint8_t b;
@ -92,7 +94,7 @@ struct MAPNIK_DECL rgba
b(U2BLUE(c)),
a(U2ALPHA(c)) {}
inline bool operator==(const rgba& y) const
inline bool operator==(rgba const& y) const
{
return r == y.r && g == y.g && b == y.b && a == y.a;
}
@ -103,18 +105,27 @@ struct MAPNIK_DECL rgba
bool operator() (const rgba& x, const rgba& y) const;
};
inline bool operator<(rgba const& y) const
{
return std::tie(r, g, b, a) < std::tie(y.r, y.g, y.b, y.a);
}
};
class MAPNIK_DECL rgba_palette : private util::noncopyable {
class MAPNIK_DECL rgba_palette : private util::noncopyable
{
public:
enum palette_type { PALETTE_RGBA = 0, PALETTE_RGB = 1, PALETTE_ACT = 2 };
explicit rgba_palette(std::string const& pal, palette_type type = PALETTE_RGBA);
rgba_palette();
const std::vector<rgb>& palette() const;
const std::vector<unsigned>& alphaTable() const;
inline std::vector<rgb> const& palette() const { return rgb_pal_;}
inline std::vector<unsigned> const& alpha_table() const { return alpha_pal_;}
inline std::vector<rgb>& palette() { return rgb_pal_;}
inline std::vector<unsigned>& alpha_table() { return alpha_pal_;}
unsigned char quantize(unsigned c) const;

View file

@ -51,6 +51,10 @@ struct value_holder : value_holder_base
value_holder()
: value_holder_base() {}
// C-string -> std::string
value_holder(char const* str)
: value_holder(std::string(str)) {}
// perfect forwarding
template <typename T>
value_holder(T && obj)

View file

@ -39,7 +39,7 @@ extern "C"
{
#include <png.h>
}
#include <set>
#pragma GCC diagnostic pop
#define MAX_OCTREE_LEVELS 4
@ -515,19 +515,19 @@ void save_as_png8_oct(T1 & file,
}
//transparency values per palette index
std::vector<unsigned> alphaTable;
//alphaTable.resize(palette.size());//allow semitransparency also in almost opaque range
std::vector<unsigned> alpha_table;
//alpha_table.resize(palette.size());//allow semitransparency also in almost opaque range
if (opts.trans_mode != 0)
{
alphaTable.resize(palette.size() - cols[TRANSPARENCY_LEVELS-1]);
alpha_table.resize(palette.size() - cols[TRANSPARENCY_LEVELS-1]);
}
if (palette.size() > 16 )
{
// >16 && <=256 colors -> write 8-bit color depth
image_gray8 reduced_image(width,height);
reduce_8(image, reduced_image, trees, limits, TRANSPARENCY_LEVELS, alphaTable);
save_as_png(file,palette,reduced_image,width,height,8,alphaTable,opts);
reduce_8(image, reduced_image, trees, limits, TRANSPARENCY_LEVELS, alpha_table);
save_as_png(file,palette,reduced_image,width,height,8,alpha_table,opts);
}
else if (palette.size() == 1)
{
@ -535,13 +535,13 @@ void save_as_png8_oct(T1 & file,
unsigned image_width = ((width + 15) >> 3) & ~1U; // 1-bit image, round up to 16-bit boundary
unsigned image_height = height;
image_gray8 reduced_image(image_width,image_height);
reduce_1(image,reduced_image,trees, limits, alphaTable);
reduce_1(image,reduced_image,trees, limits, alpha_table);
if (meanAlpha<255 && cols[0]==0)
{
alphaTable.resize(1);
alphaTable[0] = meanAlpha;
alpha_table.resize(1);
alpha_table[0] = meanAlpha;
}
save_as_png(file,palette,reduced_image,width,height,1,alphaTable,opts);
save_as_png(file,palette,reduced_image,width,height,1,alpha_table,opts);
}
else
{
@ -549,8 +549,8 @@ void save_as_png8_oct(T1 & file,
unsigned image_width = ((width + 7) >> 1) & ~3U; // 4-bit image, round up to 32-bit boundary
unsigned image_height = height;
image_gray8 reduced_image(image_width,image_height);
reduce_4(image, reduced_image, trees, limits, TRANSPARENCY_LEVELS, alphaTable);
save_as_png(file,palette,reduced_image,width,height,4,alphaTable,opts);
reduce_4(image, reduced_image, trees, limits, TRANSPARENCY_LEVELS, alpha_table);
save_as_png(file,palette,reduced_image,width,height,4,alpha_table,opts);
}
}
@ -560,7 +560,7 @@ void save_as_png8(T1 & file,
T2 const& image,
T3 const & tree,
std::vector<mapnik::rgb> const& palette,
std::vector<unsigned> const& alphaTable,
std::vector<unsigned> const& alpha_table,
png_options const& opts)
{
unsigned width = image.width();
@ -579,7 +579,7 @@ void save_as_png8(T1 & file,
row_out[x] = tree.quantize(row[x]);
}
}
save_as_png(file, palette, reduced_image, width, height, 8, alphaTable, opts);
save_as_png(file, palette, reduced_image, width, height, 8, alpha_table, opts);
}
else if (palette.size() == 1)
{
@ -588,7 +588,7 @@ void save_as_png8(T1 & file,
unsigned image_height = height;
image_gray8 reduced_image(image_width, image_height);
reduced_image.set(0);
save_as_png(file, palette, reduced_image, width, height, 1, alphaTable, opts);
save_as_png(file, palette, reduced_image, width, height, 1, alpha_table, opts);
}
else
{
@ -612,7 +612,7 @@ void save_as_png8(T1 & file,
row_out[x>>1] |= index;
}
}
save_as_png(file, palette, reduced_image, width, height, 4, alphaTable, opts);
save_as_png(file, palette, reduced_image, width, height, 4, alpha_table, opts);
}
}
@ -623,6 +623,7 @@ void save_as_png8_hex(T1 & file,
{
unsigned width = image.width();
unsigned height = image.height();
if (width + height > 3) // at least 3 pixels (hextree implementation requirement)
{
// structure for color quantization
@ -647,20 +648,44 @@ void save_as_png8_hex(T1 & file,
}
//transparency values per palette index
std::vector<mapnik::rgba> pal;
tree.create_palette(pal);
std::vector<mapnik::rgba> rgba_palette;
tree.create_palette(rgba_palette);
auto size = rgba_palette.size();
std::vector<mapnik::rgb> palette;
std::vector<unsigned> alphaTable;
for (unsigned i=0; i<pal.size(); ++i)
std::vector<unsigned> alpha_table;
palette.reserve(size);
alpha_table.reserve(size);
for (auto const& c : rgba_palette)
{
palette.push_back(rgb(pal[i].r, pal[i].g, pal[i].b));
alphaTable.push_back(pal[i].a);
palette.emplace_back(c.r, c.g, c.b);
alpha_table.push_back(c.a);
}
save_as_png8<T1, T2, hextree<mapnik::rgba> >(file, image, tree, palette, alphaTable, opts);
save_as_png8<T1, T2, hextree<mapnik::rgba> >(file, image, tree, palette, alpha_table, opts);
}
else
{
throw std::runtime_error("Can't quantize images with less than 3 pixels");
std::set<mapnik::rgba> colors;
for (unsigned y = 0; y < height; ++y)
{
typename T2::pixel_type const * row = image.get_row(y);
for (unsigned x = 0; x < width; ++x)
{
unsigned val = row[x];
colors.emplace(U2RED(val), U2GREEN(val), U2BLUE(val), U2ALPHA(val));
}
}
std::string str;
for (auto c : colors)
{
str.push_back(c.r);
str.push_back(c.g);
str.push_back(c.b);
str.push_back(c.a);
}
rgba_palette pal(str, rgba_palette::PALETTE_RGBA);
save_as_png8<T1, T2, rgba_palette>(file, image, pal, pal.palette(), pal.alpha_table(), opts);
}
}
@ -670,7 +695,7 @@ void save_as_png8_pal(T1 & file,
rgba_palette const& pal,
png_options const& opts)
{
save_as_png8<T1, T2, rgba_palette>(file, image, pal, pal.palette(), pal.alphaTable(), opts);
save_as_png8<T1, T2, rgba_palette>(file, image, pal, pal.palette(), pal.alpha_table(), opts);
}
}

View file

@ -101,7 +101,7 @@ struct strict_value : value_base_type
strict_value() = default;
strict_value(const char* val)
: value_base_type(val) {}
: value_base_type(std::string(val)) {}
template <typename T>
strict_value(T const& obj)

View file

@ -51,13 +51,13 @@ public:
// unary visitor interface
// const
template <typename F, typename V>
auto VARIANT_INLINE static apply_visitor(F && f, V const& v) -> decltype(V::visit(v, f))
auto VARIANT_INLINE static apply_visitor(F && f, V const& v) -> decltype(V::visit(v, std::forward<F>(f)))
{
return V::visit(v, std::forward<F>(f));
}
// non-const
template <typename F, typename V>
auto VARIANT_INLINE static apply_visitor(F && f, V & v) -> decltype(V::visit(v, f))
auto VARIANT_INLINE static apply_visitor(F && f, V & v) -> decltype(V::visit(v, std::forward<F>(f)))
{
return V::visit(v, std::forward<F>(f));
}
@ -65,14 +65,14 @@ auto VARIANT_INLINE static apply_visitor(F && f, V & v) -> decltype(V::visit(v,
// binary visitor interface
// const
template <typename F, typename V>
auto VARIANT_INLINE static apply_visitor(F && f, V const& v0, V const& v1) -> decltype(V::binary_visit(v0, v1, f))
auto VARIANT_INLINE static apply_visitor(F && f, V const& v0, V const& v1) -> decltype(V::binary_visit(v0, v1, std::forward<F>(f)))
{
return V::binary_visit(v0, v1, std::forward<F>(f));
}
// non-const
template <typename F, typename V>
auto VARIANT_INLINE static apply_visitor(F && f, V & v0, V & v1) -> decltype(V::binary_visit(v0, v1, f))
auto VARIANT_INLINE static apply_visitor(F && f, V & v0, V & v1) -> decltype(V::binary_visit(v0, v1, std::forward<F>(f)))
{
return V::binary_visit(v0, v1, std::forward<F>(f));
}

View file

@ -55,7 +55,7 @@ struct do_xml_attribute_cast
{
static inline boost::optional<T> xml_attribute_cast_impl(xml_tree const& /*tree*/, std::string const& /*source*/)
{
std::string err_msg("No conversion from std::string to");
std::string err_msg("No conversion from std::string to ");
err_msg += std::string(typeid(T).name());
throw std::runtime_error(err_msg);
}
@ -74,6 +74,19 @@ struct do_xml_attribute_cast<mapnik::boolean_type>
}
};
// specialization for mapnik::value_bool
template <>
struct do_xml_attribute_cast<mapnik::value_bool>
{
static inline boost::optional<mapnik::value_bool> xml_attribute_cast_impl(xml_tree const& /*tree*/, std::string const& source)
{
bool result;
if (mapnik::util::string2bool(source, result))
return boost::optional<mapnik::value_bool>(result);
return boost::optional<mapnik::value_bool>();
}
};
// specialization for int
template <>
struct do_xml_attribute_cast<int>

View file

@ -431,7 +431,7 @@ mapnik::featureset_ptr csv_datasource::features(mapnik::query const& q) const
return std::make_shared<csv_index_featureset>(filename_, filter, locator_, separator_, quote_, headers_, ctx_);
}
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
mapnik::featureset_ptr csv_datasource::features_at_point(mapnik::coord2d const& pt, double tol) const

View file

@ -116,7 +116,8 @@ geojson_datasource::geojson_datasource(parameters const& params)
inline_string_(),
extent_(),
features_(),
tree_(nullptr)
tree_(nullptr),
num_features_to_query_(*params.get<mapnik::value_integer>("num_features_to_query",5))
{
boost::optional<std::string> inline_string = params.get<std::string>("inline");
if (inline_string)
@ -233,7 +234,7 @@ void geojson_datasource::initialise_disk_index(std::string const& filename)
mapnik::util::file file(filename_);
if (!file) throw mapnik::datasource_exception("GeoJSON Plugin: could not open: '" + filename_ + "'");
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
for (auto const& pos : positions)
{
std::fseek(file.get(), pos.first, SEEK_SET);
@ -242,8 +243,7 @@ void geojson_datasource::initialise_disk_index(std::string const& filename)
std::fread(record.data(), pos.second, 1, file.get());
auto const* start = record.data();
auto const* end = start + record.size();
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
mapnik::feature_ptr feature(mapnik::feature_factory::create(ctx,1));
mapnik::feature_ptr feature(mapnik::feature_factory::create(ctx, -1));
using namespace boost::spirit;
standard::space_type space;
if (!boost::spirit::qi::phrase_parse(start, end,
@ -254,6 +254,7 @@ void geojson_datasource::initialise_disk_index(std::string const& filename)
}
initialise_descriptor(feature);
}
desc_.order_by_name();
}
template <typename Iterator>
@ -314,6 +315,7 @@ void geojson_datasource::initialise_index(Iterator start, Iterator end)
tree_ = std::make_unique<spatial_index_type>(boxes);
// calculate total extent
std::size_t feature_count = 0;
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
for (auto const& item : boxes)
{
auto const& box = std::get<0>(item);
@ -326,7 +328,6 @@ void geojson_datasource::initialise_index(Iterator start, Iterator end)
// NOTE: this doesn't yield correct answer for geoJSON in general, just an indication
Iterator itr2 = start + geometry_index.first;
Iterator end2 = itr2 + geometry_index.second;
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
mapnik::feature_ptr feature(mapnik::feature_factory::create(ctx,-1)); // temp feature
if (!boost::spirit::qi::phrase_parse(itr2, end2,
(geojson_datasource_static_feature_grammar)(boost::phoenix::ref(*feature)), space)
@ -339,6 +340,7 @@ void geojson_datasource::initialise_index(Iterator start, Iterator end)
}
}
}
desc_.order_by_name();
}
template <typename Iterator>
@ -355,7 +357,7 @@ void geojson_datasource::parse_geojson(Iterator start, Iterator end)
try
{
bool result = boost::spirit::qi::phrase_parse(itr, end, (geojson_datasource_static_fc_grammar)
(boost::phoenix::ref(ctx),boost::phoenix::ref(start_id), boost::phoenix::ref(callback)),
(boost::phoenix::ref(ctx), boost::phoenix::ref(start_id), boost::phoenix::ref(callback)),
space);
if (!result || itr != end)
{
@ -457,7 +459,7 @@ boost::optional<mapnik::datasource_geometry_t> geojson_datasource::get_geometry_
mapnik::util::file file(filename_);
if (!file) throw mapnik::datasource_exception("GeoJSON Plugin: could not open: '" + filename_ + "'");
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
for (auto const& pos : positions)
{
std::fseek(file.get(), pos.first, SEEK_SET);
@ -466,7 +468,6 @@ boost::optional<mapnik::datasource_geometry_t> geojson_datasource::get_geometry_
std::fread(record.data(), pos.second, 1, file.get());
auto const* start = record.data();
auto const* end = start + record.size();
mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
mapnik::feature_ptr feature(mapnik::feature_factory::create(ctx, -1)); // temp feature
using namespace boost::spirit;
standard::space_type space;
@ -588,7 +589,7 @@ mapnik::featureset_ptr geojson_datasource::features(mapnik::query const& q) cons
}
// otherwise return an empty featureset
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
mapnik::featureset_ptr geojson_datasource::features_at_point(mapnik::coord2d const& pt, double tol) const

View file

@ -104,7 +104,7 @@ private:
std::unique_ptr<spatial_index_type> tree_;
bool cache_features_ = true;
bool has_disk_index_ = false;
const std::size_t num_features_to_query_ = 5;
const std::size_t num_features_to_query_;
};

View file

@ -560,7 +560,7 @@ featureset_ptr ogr_datasource::features(query const& q) const
}
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
featureset_ptr ogr_datasource::features_at_point(coord2d const& pt, double tol) const
@ -603,5 +603,5 @@ featureset_ptr ogr_datasource::features_at_point(coord2d const& pt, double tol)
}
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}

View file

@ -998,7 +998,7 @@ featureset_ptr pgraster_datasource::features_with_context(query const& q,process
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
@ -1011,7 +1011,7 @@ featureset_ptr pgraster_datasource::features_at_point(coord2d const& pt, double
if (pool)
{
shared_ptr<Connection> conn = pool->borrowObject();
if (!conn) return mapnik::make_empty_featureset();
if (!conn) return mapnik::make_invalid_featureset();
if (conn->isOK())
{
@ -1082,7 +1082,7 @@ featureset_ptr pgraster_datasource::features_at_point(coord2d const& pt, double
}
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
box2d<double> pgraster_datasource::envelope() const

View file

@ -479,7 +479,7 @@ postgis_datasource::postgis_datasource(parameters const& params)
// Finally, add unique metadata to layer descriptor
mapnik::parameters & extra_params = desc_.get_extra_parameters();
// explicitly make copies of values due to https://github.com/mapnik/mapnik/issues/2651
extra_params["srid"] = srid_;
extra_params["srid"] = mapnik::value_integer(srid_);
if (!key_field_.empty())
{
extra_params["key_field"] = key_field_;
@ -942,7 +942,7 @@ featureset_ptr postgis_datasource::features_with_context(query const& q,processo
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
@ -955,7 +955,7 @@ featureset_ptr postgis_datasource::features_at_point(coord2d const& pt, double t
if (pool)
{
shared_ptr<Connection> conn = pool->borrowObject();
if (!conn) return mapnik::make_empty_featureset();
if (!conn) return mapnik::make_invalid_featureset();
if (conn->isOK())
{
@ -1030,7 +1030,7 @@ featureset_ptr postgis_datasource::features_at_point(coord2d const& pt, double t
}
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
box2d<double> postgis_datasource::envelope() const

View file

@ -224,5 +224,5 @@ featureset_ptr raster_datasource::features_at_point(coord2d const&, double tol)
{
MAPNIK_LOG_WARN(raster) << "raster_datasource: feature_at_point not supported";
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}

View file

@ -551,7 +551,7 @@ featureset_ptr sqlite_datasource::features(query const& q) const
using_subquery_);
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
featureset_ptr sqlite_datasource::features_at_point(coord2d const& pt, double tol) const
@ -631,5 +631,5 @@ featureset_ptr sqlite_datasource::features_at_point(coord2d const& pt, double to
using_subquery_);
}
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}

View file

@ -284,7 +284,7 @@ mapnik::featureset_ptr topojson_datasource::features(mapnik::query const& q) con
}
}
// otherwise return an empty featureset pointer
return mapnik::make_empty_featureset();
return mapnik::make_invalid_featureset();
}
mapnik::featureset_ptr topojson_datasource::features_at_point(mapnik::coord2d const& pt, double tol) const

View file

@ -3,7 +3,7 @@
This copyright and license do not apply to any other software
with which this software may have been included.
Copyright (c) 2001 - 2015 The SCons Foundation
Copyright (c) 2001 - 2016 The SCons Foundation
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the

View file

@ -1,4 +1,4 @@
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
SCons - a software construction tool

View file

@ -1,45 +0,0 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
collections compatibility module for older (pre-2.4) Python versions
This does not not NOT (repeat, *NOT*) provide complete collections
functionality. It only wraps the portions of collections functionality
used by SCons, in an interface that looks enough like collections for
our purposes.
"""
__revision__ = "src/engine/SCons/compat/_scons_collections.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
# Use exec to hide old names from fixers.
exec("""if True:
from UserDict import UserDict
from UserList import UserList
from UserString import UserString""")
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View file

@ -1,563 +0,0 @@
"""Classes to represent arbitrary sets (including sets of sets).
This module implements sets using dictionaries whose values are
ignored. The usual operations (union, intersection, deletion, etc.)
are provided as both methods and operators.
Important: sets are not sequences! While they support 'x in s',
'len(s)', and 'for x in s', none of those operations are unique for
sequences; for example, mappings support all three as well. The
characteristic operation for sequences is subscripting with small
integers: s[i], for i in range(len(s)). Sets don't support
subscripting at all. Also, sequences allow multiple occurrences and
their elements have a definite order; sets on the other hand don't
record multiple occurrences and don't remember the order of element
insertion (which is why they don't support s[i]).
The following classes are provided:
BaseSet -- All the operations common to both mutable and immutable
sets. This is an abstract class, not meant to be directly
instantiated.
Set -- Mutable sets, subclass of BaseSet; not hashable.
ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
An iterable argument is mandatory to create an ImmutableSet.
_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
giving the same hash value as the immutable set equivalent
would have. Do not use this class directly.
Only hashable objects can be added to a Set. In particular, you cannot
really add a Set as an element to another Set; if you try, what is
actually added is an ImmutableSet built from it (it compares equal to
the one you tried adding).
When you ask if `x in y' where x is a Set and y is a Set or
ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
what's tested is actually `z in y'.
"""
# Code history:
#
# - Greg V. Wilson wrote the first version, using a different approach
# to the mutable/immutable problem, and inheriting from dict.
#
# - Alex Martelli modified Greg's version to implement the current
# Set/ImmutableSet approach, and make the data an attribute.
#
# - Guido van Rossum rewrote much of the code, made some API changes,
# and cleaned up the docstrings.
#
# - Raymond Hettinger added a number of speedups and other
# improvements.
# protect this import from the fixers...
exec('from itertools import ifilterfalse as filterfalse')
__all__ = ['BaseSet', 'Set', 'ImmutableSet']
class BaseSet(object):
"""Common base class for mutable and immutable sets."""
__slots__ = ['_data']
# Constructor
def __init__(self):
"""This is an abstract class."""
# Don't call this from a concrete subclass!
if self.__class__ is BaseSet:
raise TypeError("BaseSet is an abstract class. "
"Use Set or ImmutableSet.")
# Standard protocols: __len__, __repr__, __str__, __iter__
def __len__(self):
"""Return the number of elements of a set."""
return len(self._data)
def __repr__(self):
"""Return string representation of a set.
This looks like 'Set([<list of elements>])'.
"""
return self._repr()
# __str__ is the same as __repr__
__str__ = __repr__
def _repr(self, sort_them=False):
elements = list(self._data.keys())
if sort_them:
elements.sort()
return '%s(%r)' % (self.__class__.__name__, elements)
def __iter__(self):
"""Return an iterator over the elements or a set.
This is the keys iterator for the underlying dict.
"""
# Wrapping name in () prevents fixer from "fixing" this
return (self._data.iterkeys)()
# Three-way comparison is not supported. However, because __eq__ is
# tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
# then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
# case).
def __cmp__(self, other):
raise TypeError("can't compare sets using cmp()")
# Equality comparisons using the underlying dicts. Mixed-type comparisons
# are allowed here, where Set == z for non-Set z always returns False,
# and Set != z always True. This allows expressions like "x in y" to
# give the expected result when y is a sequence of mixed types, not
# raising a pointless TypeError just because y contains a Set, or x is
# a Set and y contain's a non-set ("in" invokes only __eq__).
# Subtle: it would be nicer if __eq__ and __ne__ could return
# NotImplemented instead of True or False. Then the other comparand
# would get a chance to determine the result, and if the other comparand
# also returned NotImplemented then it would fall back to object address
# comparison (which would always return False for __eq__ and always
# True for __ne__). However, that doesn't work, because this type
# *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
# Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
def __eq__(self, other):
if isinstance(other, BaseSet):
return self._data == other._data
else:
return False
def __ne__(self, other):
if isinstance(other, BaseSet):
return self._data != other._data
else:
return True
# Copying operations
def copy(self):
"""Return a shallow copy of a set."""
result = self.__class__()
result._data.update(self._data)
return result
__copy__ = copy # For the copy module
def __deepcopy__(self, memo):
"""Return a deep copy of a set; used by copy module."""
# This pre-creates the result and inserts it in the memo
# early, in case the deep copy recurses into another reference
# to this same set. A set can't be an element of itself, but
# it can certainly contain an object that has a reference to
# itself.
from copy import deepcopy
result = self.__class__()
memo[id(self)] = result
data = result._data
value = True
for elt in self:
data[deepcopy(elt, memo)] = value
return result
# Standard set operations: union, intersection, both differences.
# Each has an operator version (e.g. __or__, invoked with |) and a
# method version (e.g. union).
# Subtle: Each pair requires distinct code so that the outcome is
# correct when the type of other isn't suitable. For example, if
# we did "union = __or__" instead, then Set().union(3) would return
# NotImplemented instead of raising TypeError (albeit that *why* it
# raises TypeError as-is is also a bit subtle).
def __or__(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.union(other)
def union(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
result = self.__class__(self)
result._update(other)
return result
def __and__(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.intersection(other)
def intersection(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
other = Set(other)
if len(self) <= len(other):
little, big = self, other
else:
little, big = other, self
common = iter(filter(big._data.has_key, little))
return self.__class__(common)
def __xor__(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.symmetric_difference(other)
def symmetric_difference(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
result = self.__class__()
data = result._data
value = True
selfdata = self._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
for elt in filterfalse(otherdata.has_key, selfdata):
data[elt] = value
for elt in filterfalse(selfdata.has_key, otherdata):
data[elt] = value
return result
def __sub__(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.difference(other)
def difference(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
result = self.__class__()
data = result._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
value = True
for elt in filterfalse(otherdata.has_key, self):
data[elt] = value
return result
# Membership test
def __contains__(self, element):
"""Report whether an element is a member of a set.
(Called in response to the expression `element in self'.)
"""
try:
return element in self._data
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
return transform() in self._data
# Subset and superset test
def issubset(self, other):
"""Report whether another set contains this set."""
self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases
return False
for elt in filterfalse(other._data.has_key, self):
return False
return True
def issuperset(self, other):
"""Report whether this set contains another set."""
self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases
return False
for elt in filterfalse(self._data.has_key, other):
return False
return True
# Inequality comparisons using the is-subset relation.
__le__ = issubset
__ge__ = issuperset
def __lt__(self, other):
self._binary_sanity_check(other)
return len(self) < len(other) and self.issubset(other)
def __gt__(self, other):
self._binary_sanity_check(other)
return len(self) > len(other) and self.issuperset(other)
# Assorted helpers
def _binary_sanity_check(self, other):
# Check that the other argument to a binary operation is also
# a set, raising a TypeError otherwise.
if not isinstance(other, BaseSet):
raise TypeError("Binary operation only permitted between sets")
def _compute_hash(self):
# Calculate hash code for a set by xor'ing the hash codes of
# the elements. This ensures that the hash code does not depend
# on the order in which elements are added to the set. This is
# not called __hash__ because a BaseSet should not be hashable;
# only an ImmutableSet is hashable.
result = 0
for elt in self:
result ^= hash(elt)
return result
def _update(self, iterable):
# The main loop for update() and the subclass __init__() methods.
data = self._data
# Use the fast update() method when a dictionary is available.
if isinstance(iterable, BaseSet):
data.update(iterable._data)
return
value = True
if type(iterable) in (list, tuple, xrange):
# Optimized: we know that __iter__() and next() can't
# raise TypeError, so we can move 'try:' out of the loop.
it = iter(iterable)
while True:
try:
for element in it:
data[element] = value
return
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
else:
# Safe: only catch TypeError where intended
for element in iterable:
try:
data[element] = value
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
class ImmutableSet(BaseSet):
"""Immutable set class."""
__slots__ = ['_hashcode']
# BaseSet + hashing
def __init__(self, iterable=None):
"""Construct an immutable set from an optional iterable."""
self._hashcode = None
self._data = {}
if iterable is not None:
self._update(iterable)
def __hash__(self):
if self._hashcode is None:
self._hashcode = self._compute_hash()
return self._hashcode
def __getstate__(self):
return self._data, self._hashcode
def __setstate__(self, state):
self._data, self._hashcode = state
class Set(BaseSet):
""" Mutable set class."""
__slots__ = []
# BaseSet + operations requiring mutability; no hashing
def __init__(self, iterable=None):
"""Construct a set from an optional iterable."""
self._data = {}
if iterable is not None:
self._update(iterable)
def __getstate__(self):
# getstate's results are ignored if it is not
return self._data,
def __setstate__(self, data):
self._data, = data
def __hash__(self):
"""A Set cannot be hashed."""
# We inherit object.__hash__, so we must deny this explicitly
raise TypeError("Can't hash a Set, only an ImmutableSet.")
# In-place union, intersection, differences.
# Subtle: The xyz_update() functions deliberately return None,
# as do all mutating operations on built-in container types.
# The __xyz__ spellings have to return self, though.
def __ior__(self, other):
"""Update a set with the union of itself and another."""
self._binary_sanity_check(other)
self._data.update(other._data)
return self
def union_update(self, other):
"""Update a set with the union of itself and another."""
self._update(other)
def __iand__(self, other):
"""Update a set with the intersection of itself and another."""
self._binary_sanity_check(other)
self._data = (self & other)._data
return self
def intersection_update(self, other):
"""Update a set with the intersection of itself and another."""
if isinstance(other, BaseSet):
self &= other
else:
self._data = (self.intersection(other))._data
def __ixor__(self, other):
"""Update a set with the symmetric difference of itself and another."""
self._binary_sanity_check(other)
self.symmetric_difference_update(other)
return self
def symmetric_difference_update(self, other):
"""Update a set with the symmetric difference of itself and another."""
data = self._data
value = True
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in other:
if elt in data:
del data[elt]
else:
data[elt] = value
def __isub__(self, other):
"""Remove all elements of another set from this set."""
self._binary_sanity_check(other)
self.difference_update(other)
return self
def difference_update(self, other):
"""Remove all elements of another set from this set."""
data = self._data
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in filter(data.has_key, other):
del data[elt]
# Python dict-like mass mutations: update, clear
def update(self, iterable):
"""Add all values from an iterable (such as a list or file)."""
self._update(iterable)
def clear(self):
"""Remove all elements from this set."""
self._data.clear()
# Single-element mutations: add, remove, discard
def add(self, element):
"""Add an element to a set.
This has no effect if the element is already present.
"""
try:
self._data[element] = True
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
self._data[transform()] = True
def remove(self, element):
"""Remove an element from a set; it must be a member.
If the element is not a member, raise a KeyError.
"""
try:
del self._data[element]
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
del self._data[transform()]
def discard(self, element):
"""Remove an element from a set if it is a member.
If the element is not a member, do nothing.
"""
try:
self.remove(element)
except KeyError:
pass
def pop(self):
"""Remove and return an arbitrary set element."""
return self._data.popitem()[0]
def __as_immutable__(self):
# Return a copy of self as an immutable set
return ImmutableSet(self)
def __as_temporarily_immutable__(self):
# Return self wrapped in a temporarily immutable set
return _TemporarilyImmutableSet(self)
class _TemporarilyImmutableSet(BaseSet):
# Wrap a mutable set as if it was temporarily immutable.
# This only supplies hashing and equality comparisons.
def __init__(self, set):
self._set = set
self._data = set._data # Needed by ImmutableSet.__eq__()
def __hash__(self):
return self._set._compute_hash()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

File diff suppressed because it is too large Load diff

View file

@ -76,7 +76,7 @@ way for wrapping up the functions.
"""
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -97,7 +97,7 @@ way for wrapping up the functions.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Action.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Action.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import dis
import os
@ -235,11 +235,7 @@ def _code_contents(code):
# The code contents depends on the number of local variables
# but not their actual names.
contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames)))
try:
contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars)))
except AttributeError:
# Older versions of Python do not support closures.
contents.append(",0,0")
# The code contents depends on any constants accessed by the
# function. Note that we have to call _object_contents on each
@ -276,11 +272,7 @@ def _function_contents(func):
contents.append(',()')
# The function contents depends on the closure captured cell values.
try:
closure = func.func_closure or []
except AttributeError:
# Older versions of Python do not support closures.
closure = []
#xxx = [_object_contents(x.cell_contents) for x in closure]
try:
@ -946,7 +938,6 @@ class LazyAction(CommandGeneratorAction, CommandAction):
def __init__(self, var, kw):
if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.LazyAction')
#FUTURE CommandAction.__init__(self, '${'+var+'}', **kw)
CommandAction.__init__(self, '${'+var+'}', **kw)
self.var = SCons.Util.to_String(var)
self.gen_kw = kw

View file

@ -76,7 +76,7 @@ There are the following methods for internal use within this module:
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -97,7 +97,7 @@ There are the following methods for internal use within this module:
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Builder.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Builder.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import collections
@ -299,7 +299,7 @@ def _node_errors(builder, env, tlist, slist):
msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
else:
msg = "Two environments with different actions were specified for the same target: %s" % t
msg = "Two environments with different actions were specified for the same target: %s\n(action 1: %s)\n(action 2: %s)" % (t,t_contents,contents)
raise UserError(msg)
if builder.multi:
if t.builder != builder:

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,17 +21,19 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/CacheDir.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/CacheDir.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """
CacheDir support
"""
import os.path
import json
import os
import stat
import sys
import SCons.Action
import SCons.Warnings
cache_enabled = True
cache_debug = False
@ -71,7 +73,8 @@ CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString)
CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None)
def CachePushFunc(target, source, env):
if cache_readonly: return
if cache_readonly:
return
t = target[0]
if t.nocache:
@ -124,6 +127,10 @@ def CachePushFunc(target, source, env):
CachePush = SCons.Action.Action(CachePushFunc, None)
# Nasty hack to cut down to one warning for each cachedir path that needs
# upgrading.
warned = dict()
class CacheDir(object):
def __init__(self, path):
@ -132,11 +139,63 @@ class CacheDir(object):
except ImportError:
msg = "No hashlib or MD5 module available, CacheDir() not supported"
SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg)
self.path = None
else:
path = None
self.path = path
self.current_cache_debug = None
self.debugFP = None
self.config = dict()
if path is None:
return
# See if there's a config file in the cache directory. If there is,
# use it. If there isn't, and the directory exists and isn't empty,
# produce a warning. If the directory doesn't exist or is empty,
# write a config file.
config_file = os.path.join(path, 'config')
if not os.path.exists(config_file):
# A note: There is a race hazard here, if two processes start and
# attempt to create the cache directory at the same time. However,
# python doesn't really give you the option to do exclusive file
# creation (it doesn't even give you the option to error on opening
# an existing file for writing...). The ordering of events here
# as an attempt to alleviate this, on the basis that it's a pretty
# unlikely occurence (it'd require two builds with a brand new cache
# directory)
if os.path.isdir(path) and len(os.listdir(path)) != 0:
self.config['prefix_len'] = 1
# When building the project I was testing this on, the warning
# was output over 20 times. That seems excessive
global warned
if self.path not in warned:
msg = "Please upgrade your cache by running " +\
" scons-configure-cache.py " + self.path
SCons.Warnings.warn(SCons.Warnings.CacheVersionWarning, msg)
warned[self.path] = True
else:
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError:
# If someone else is trying to create the directory at
# the same time as me, bad things will happen
msg = "Failed to create cache directory " + path
raise SCons.Errors.EnvironmentError(msg)
self.config['prefix_len'] = 2
if not os.path.exists(config_file):
try:
with open(config_file, 'w') as config:
json.dump(self.config, config)
except:
msg = "Failed to write cache configuration for " + path
raise SCons.Errors.EnvironmentError(msg)
else:
try:
with open(config_file) as config:
self.config = json.load(config)
except ValueError:
msg = "Failed to read cache configuration for " + path
raise SCons.Errors.EnvironmentError(msg)
def CacheDebug(self, fmt, target, cachefile):
if cache_debug != self.current_cache_debug:
@ -151,7 +210,7 @@ class CacheDir(object):
self.debugFP.write(fmt % (target, os.path.split(cachefile)[1]))
def is_enabled(self):
return (cache_enabled and not self.path is None)
return cache_enabled and not self.path is None
def is_readonly(self):
return cache_readonly
@ -163,7 +222,7 @@ class CacheDir(object):
return None, None
sig = node.get_cachedir_bsig()
subdir = sig[0].upper()
subdir = sig[:self.config['prefix_len']].upper()
dir = os.path.join(self.path, subdir)
return dir, os.path.join(dir, sig)

View file

@ -1,12 +1,15 @@
"""SCons.Debug
Code for debugging SCons internal things. Shouldn't be
needed by most users.
needed by most users. Quick shortcuts:
from SCons.Debug import caller_trace
caller_trace()
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,7 +31,7 @@ needed by most users.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Debug.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Debug.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import os
import sys
@ -137,8 +140,12 @@ def caller_stack():
caller_bases = {}
caller_dicts = {}
# trace a caller's stack
def caller_trace(back=0):
"""
Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution.
"""
global caller_bases, caller_dicts
import traceback
tb = traceback.extract_stack(limit=3+back)
tb.reverse()

View file

@ -10,7 +10,7 @@ from distutils.msvccompiler.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -33,7 +33,7 @@ from distutils.msvccompiler.
#
from __future__ import division
__revision__ = "src/engine/SCons/Defaults.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Defaults.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import os
@ -169,15 +169,73 @@ def get_paths_str(dest):
else:
return '"' + str(dest) + '"'
permission_dic = {
'u':{
'r':stat.S_IRUSR,
'w':stat.S_IWUSR,
'x':stat.S_IXUSR
},
'g':{
'r':stat.S_IRGRP,
'w':stat.S_IWGRP,
'x':stat.S_IXGRP
},
'o':{
'r':stat.S_IROTH,
'w':stat.S_IWOTH,
'x':stat.S_IXOTH
}
}
def chmod_func(dest, mode):
import SCons.Util
from string import digits
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
if SCons.Util.is_String(mode) and not 0 in [i in digits for i in mode]:
mode = int(mode, 8)
if not SCons.Util.is_String(mode):
for element in dest:
os.chmod(str(element), mode)
else:
mode = str(mode)
for operation in mode.split(","):
if "=" in operation:
operator = "="
elif "+" in operation:
operator = "+"
elif "-" in operation:
operator = "-"
else:
raise SyntaxError("Could not find +, - or =")
operation_list = operation.split(operator)
if len(operation_list) is not 2:
raise SyntaxError("More than one operator found")
user = operation_list[0].strip().replace("a", "ugo")
permission = operation_list[1].strip()
new_perm = 0
for u in user:
for p in permission:
try:
new_perm = new_perm | permission_dic[u][p]
except KeyError:
raise SyntaxError("Unrecognized user or permission format")
for element in dest:
curr_perm = os.stat(str(element)).st_mode
if operator == "=":
os.chmod(str(element), new_perm)
elif operator == "+":
os.chmod(str(element), curr_perm | new_perm)
elif operator == "-":
os.chmod(str(element), curr_perm & ~new_perm)
def chmod_strfunc(dest, mode):
import SCons.Util
if not SCons.Util.is_String(mode):
return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode)
else:
return 'Chmod(%s, "%s")' % (get_paths_str(dest), str(mode))
Chmod = ActionFactory(chmod_func, chmod_strfunc)
@ -493,7 +551,7 @@ def __libversionflags(env, version_var, flags_var):
ConstructionEnvironment = {
'BUILDERS' : {},
'SCANNERS' : [],
'SCANNERS' : [ SCons.Tool.SourceFileScanner ],
'CONFIGUREDIR' : '#/.sconf_temp',
'CONFIGURELOG' : '#/config.log',
'CPPSUFFIXES' : SCons.Tool.CSuffixes,

View file

@ -10,7 +10,7 @@ Environment
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -31,7 +31,7 @@ Environment
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Environment.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Environment.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import copy
@ -857,25 +857,6 @@ class SubstitutionEnvironment(object):
self[key] = t
return self
# def MergeShellPaths(self, args, prepend=1):
# """
# Merge the dict in args into the shell environment in env['ENV'].
# Shell path elements are appended or prepended according to prepend.
# Uses Pre/AppendENVPath, so it always appends or prepends uniquely.
# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'})
# prepends /usr/local/lib to env['ENV']['LIBPATH'].
# """
# for pathname, pathval in args.items():
# if not pathval:
# continue
# if prepend:
# self.PrependENVPath(pathname, pathval)
# else:
# self.AppendENVPath(pathname, pathval)
def default_decide_source(dependency, target, prev_ni):
f = SCons.Defaults.DefaultEnvironment().decide_source

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -28,7 +28,7 @@ and user errors in SCons.
"""
__revision__ = "src/engine/SCons/Errors.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Errors.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons.Util
@ -73,7 +73,7 @@ class BuildError(Exception):
Information about the cause of the location of the error:
---------------------------------------------------------
node : the error occured while building this target node(s)
node : the error occurred while building this target node(s)
executor : the executor that caused the build to fail (might
be None if the build failures is not due to the

View file

@ -6,7 +6,7 @@ Nodes.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -27,7 +27,7 @@ Nodes.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Executor.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Executor.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import collections
@ -122,7 +122,6 @@ def execute_action_list(obj, target, kw):
kw = obj.get_kw(kw)
status = 0
for act in obj.get_action_list():
#args = (self.get_all_targets(), self.get_all_sources(), env)
args = ([], [], env)
status = act(*args, **kw)
if isinstance(status, SCons.Errors.BuildError):
@ -218,7 +217,9 @@ class Executor(object):
us = []
ut = []
for b in self.batches:
if b.targets[0].is_up_to_date():
# don't add targets marked always build to unchanged lists
# add to changed list as they always need to build
if not b.targets[0].always_build and b.targets[0].is_up_to_date():
us.extend(list(map(rfile, b.sources)))
ut.extend(b.targets)
else:
@ -244,14 +245,12 @@ class Executor(object):
return self._changed_targets_list
def _get_source(self, *args, **kw):
#return SCons.Util.NodeList([rfile(self.batches[0].sources[0]).get_subst_proxy()])
return rfile(self.batches[0].sources[0]).get_subst_proxy()
def _get_sources(self, *args, **kw):
return SCons.Util.NodeList([rfile(n).get_subst_proxy() for n in self.get_all_sources()])
def _get_target(self, *args, **kw):
#return SCons.Util.NodeList([self.batches[0].targets[0].get_subst_proxy()])
return self.batches[0].targets[0].get_subst_proxy()
def _get_targets(self, *args, **kw):
@ -486,29 +485,15 @@ class Executor(object):
each individual target, which is a hell of a lot more efficient.
"""
env = self.get_build_env()
path = self.get_build_scanner_path
kw = self.get_kw()
# TODO(batch): scan by batches)
deps = []
if scanner:
for node in node_list:
node.disambiguate()
s = scanner.select(node)
if not s:
continue
path = self.get_build_scanner_path(s)
deps.extend(node.get_implicit_deps(env, s, path))
else:
kw = self.get_kw()
for node in node_list:
node.disambiguate()
scanner = node.get_env_scanner(env, kw)
if not scanner:
continue
scanner = scanner.select(node)
if not scanner:
continue
path = self.get_build_scanner_path(scanner)
deps.extend(node.get_implicit_deps(env, scanner, path))
deps.extend(node.get_implicit_deps(env, scanner, path, kw))
deps.extend(self.get_implicit_deps())

View file

@ -7,7 +7,7 @@ stop, and wait on jobs.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -29,7 +29,7 @@ stop, and wait on jobs.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Job.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Job.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons.compat
@ -70,7 +70,7 @@ class Jobs(object):
def __init__(self, num, taskmaster):
"""
create 'num' jobs using the given taskmaster.
Create 'num' jobs using the given taskmaster.
If 'num' is 1 or less, then a serial job will be used,
otherwise a parallel job with 'num' worker threads will
@ -126,10 +126,10 @@ class Jobs(object):
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it very difficult to stop the build process
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queue's do not seem to
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
@ -177,7 +177,7 @@ class Serial(object):
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed or failed() will be called if it failed to
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
@ -351,7 +351,7 @@ else:
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed or failed()
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Memoize.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Memoize.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Memoizer

View file

@ -8,7 +8,7 @@ This creates a hash of global Aliases (dummy targets).
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -30,7 +30,7 @@ This creates a hash of global Aliases (dummy targets).
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Alias.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Node/Alias.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import collections

View file

@ -11,7 +11,7 @@ that can be used by scripts or modules looking for the canonical default.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -32,7 +32,7 @@ that can be used by scripts or modules looking for the canonical default.
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Node/FS.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Node/FS.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import fnmatch
import os
@ -186,7 +186,7 @@ needs_normpath_check = re.compile(
# We need to renormalize the path if it contains a '.'
# directory, but NOT if it is a single '.' '/' characters. We
# do not want to match a single '.' because this case is checked
# for explicitely since this is common enough case.
# for explicitly since this is common enough case.
#
# Note that we check for all the following cases:
#
@ -1165,15 +1165,6 @@ class LocalFS(object):
return ''
#class RemoteFS:
# # Skeleton for the obvious methods we might need from the
# # abstraction layer for a remote filesystem.
# def upload(self, local_src, remote_dst):
# pass
# def download(self, remote_src, local_dst):
# pass
class FS(LocalFS):
def __init__(self, path = None):
@ -2234,7 +2225,6 @@ class Dir(Base):
# the overall list will also be filtered later,
# after we exit this loop.
if pattern[0] != '.':
#disk_names = [ d for d in disk_names if d[0] != '.' ]
disk_names = [x for x in disk_names if x[0] != '.']
disk_names = fnmatch.filter(disk_names, pattern)
dirEntry = dir.Entry
@ -2627,13 +2617,6 @@ class File(Base):
the directory of this file."""
return self.dir.File(name)
#def generate_build_dict(self):
# """Return an appropriate dictionary of values for building
# this File."""
# return {'Dir' : self.Dir,
# 'File' : self.File,
# 'RDirs' : self.RDirs}
def _morph(self):
"""Turn a file system node into a File object."""
self.scanner_paths = {}
@ -2907,9 +2890,7 @@ class File(Base):
pass
if scanner:
# result = [n.disambiguate() for n in scanner(self, env, path)]
result = scanner(self, env, path)
result = [N.disambiguate() for N in result]
result = [n.disambiguate() for n in scanner(self, env, path)]
else:
result = []
@ -3519,36 +3500,6 @@ class FileFinder(object):
filedir, filename = os.path.split(filename)
if filedir:
# More compact code that we can't use until we drop
# support for Python 1.5.2:
#
#def filedir_lookup(p, fd=filedir):
# """
# A helper function that looks up a directory for a file
# we're trying to find. This only creates the Dir Node
# if it exists on-disk, since if the directory doesn't
# exist we know we won't find any files in it... :-)
# """
# dir, name = os.path.split(fd)
# if dir:
# p = filedir_lookup(p, dir)
# if not p:
# return None
# norm_name = _my_normcase(name)
# try:
# node = p.entries[norm_name]
# except KeyError:
# return p.dir_on_disk(name)
# if isinstance(node, Dir):
# return node
# if isinstance(node, Entry):
# node.must_be_same(Dir)
# return node
# if isinstance(node, Dir) or isinstance(node, Entry):
# return node
# return None
#paths = [_f for _f in map(filedir_lookup, paths) if _f]
self.default_filedir = filedir
paths = [_f for _f in map(self.filedir_lookup, paths) if _f]

View file

@ -5,7 +5,7 @@ Python nodes.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -27,7 +27,7 @@ Python nodes.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Python.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Node/Python.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons.Node

View file

@ -20,7 +20,7 @@ be able to depend on any other type of "thing."
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -41,7 +41,7 @@ be able to depend on any other type of "thing."
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Node/__init__.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Node/__init__.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import collections
import copy
@ -916,34 +916,55 @@ class Node(object):
"""
return []
def get_implicit_deps(self, env, scanner, path):
def get_implicit_deps(self, env, initial_scanner, path_func, kw = {}):
"""Return a list of implicit dependencies for this node.
This method exists to handle recursive invocation of the scanner
on the implicit dependencies returned by the scanner, if the
scanner's recursive flag says that we should.
"""
if not scanner:
return []
# Give the scanner a chance to select a more specific scanner
# for this Node.
#scanner = scanner.select(self)
nodes = [self]
seen = {}
seen[self] = 1
deps = []
while nodes:
n = nodes.pop(0)
d = [x for x in n.get_found_includes(env, scanner, path) if x not in seen]
if d:
deps.extend(d)
for n in d:
seen[n] = 1
nodes.extend(scanner.recurse_nodes(d))
dependencies = []
return deps
root_node_scanner = self._get_scanner(env, initial_scanner, None, kw)
while nodes:
node = nodes.pop(0)
scanner = node._get_scanner(env, initial_scanner, root_node_scanner, kw)
if not scanner:
continue
path = path_func(scanner)
included_deps = [x for x in node.get_found_includes(env, scanner, path) if x not in seen]
if included_deps:
dependencies.extend(included_deps)
for dep in included_deps:
seen[dep] = 1
nodes.extend(scanner.recurse_nodes(included_deps))
return dependencies
def _get_scanner(self, env, initial_scanner, root_node_scanner, kw):
if not initial_scanner:
# handle implicit scanner case
scanner = self.get_env_scanner(env, kw)
if scanner:
scanner = scanner.select(self)
else:
# handle explicit scanner case
scanner = initial_scanner.select(self)
if not scanner:
# no scanner could be found for the given node's scanner key;
# thus, make an attempt at using a default.
scanner = root_node_scanner
return scanner
def get_env_scanner(self, env, kw={}):
return env.get_scanner(self.scanner_key())
@ -1260,11 +1281,6 @@ class Node(object):
def _add_child(self, collection, set, child):
"""Adds 'child' to 'collection', first checking 'set' to see if it's
already present."""
#if type(child) is not type([]):
# child = [child]
#for c in child:
# if not isinstance(c, Node):
# raise TypeError, c
added = None
for c in child:
if c not in set:

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/BoolOption.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/BoolOption.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/EnumOption.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/EnumOption.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/ListOption.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/ListOption.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PackageOption.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/PackageOption.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PathOption.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/PathOption.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,7 +21,7 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/__init__.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Options/__init__.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy

View file

@ -1,5 +1,5 @@
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -21,13 +21,13 @@
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/PathList.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/PathList.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """SCons.PathList
A module for handling lists of directory paths (the sort of things
that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and
efficiency as we can while still keeping the evaluation delayed so that we
efficiency as we can, while still keeping the evaluation delayed so that we
Do the Right Thing (almost) regardless of how the variable is specified.
"""

View file

@ -12,7 +12,7 @@ environment. Consequently, we'll examine both sys.platform and os.name
(and anything else that might come in to play) in order to return some
specification which is unique enough for our purposes.
Note that because this subsysem just *selects* a callable that can
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "platform specification" in an arbitrary callable function.
No one needs to use or tie in to this subsystem in order to roll
@ -20,7 +20,7 @@ their own platform definition.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -42,7 +42,7 @@ their own platform definition.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/__init__.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Platform/__init__.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons.compat

View file

@ -8,7 +8,7 @@ selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -30,7 +30,7 @@ selection method.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/aix.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Platform/aix.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import os
import subprocess

View file

@ -8,7 +8,7 @@ selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@ -30,7 +30,7 @@ selection method.
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/cygwin.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
__revision__ = "src/engine/SCons/Platform/cygwin.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import posix
from SCons.Platform import TempFileMunge

Some files were not shown because too many files have changed in this diff Show more