scons: avoid skipping configure if --clean is requested since it will bork scons given the need for the configured/pickled dictionary to populate env variables in Sconscript files - thanks rcoup for noticing this
This commit is contained in:
parent
fd50ef8400
commit
ed695f6aae
1 changed files with 388 additions and 389 deletions
777
SConstruct
777
SConstruct
|
@ -397,410 +397,409 @@ conf_tests = { 'CheckPKGConfig' : CheckPKGConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if not env.GetOption('clean'):
|
if not preconfigured:
|
||||||
if not preconfigured:
|
|
||||||
|
|
||||||
color_print(4,'Configuring build environment...')
|
color_print(4,'Configuring build environment...')
|
||||||
|
|
||||||
if env['USE_CONFIG']:
|
if env['USE_CONFIG']:
|
||||||
if not env['CONFIG'].endswith('.py'):
|
if not env['CONFIG'].endswith('.py'):
|
||||||
color_print(1,'SCons CONFIG file specified is not a python file, will not be read...')
|
color_print(1,'SCons CONFIG file specified is not a python file, will not be read...')
|
||||||
else:
|
|
||||||
# Accept more than one file as comma-delimited list
|
|
||||||
user_confs = env['CONFIG'].split(',')
|
|
||||||
# If they exist add the files to the existing `opts`
|
|
||||||
for conf in user_confs:
|
|
||||||
if os.path.exists(conf):
|
|
||||||
opts.files.append(conf)
|
|
||||||
color_print(4,"SCons CONFIG found: '%s', variables will be inherited..." % conf)
|
|
||||||
optfile = file(conf)
|
|
||||||
print optfile.read().replace("\n", " ").replace("'","").replace(" = ","=")
|
|
||||||
optfile.close()
|
|
||||||
|
|
||||||
elif not conf == SCONS_LOCAL_CONFIG:
|
|
||||||
# if default missing, no worries
|
|
||||||
# but if the default is overridden and the file is not found, give warning
|
|
||||||
color_print(1,"SCons CONFIG not found: '%s'" % conf)
|
|
||||||
# Recreate the base environment using modified `opts`
|
|
||||||
env = Environment(ENV=os.environ,options=opts)
|
|
||||||
env['USE_CONFIG'] = True
|
|
||||||
else:
|
else:
|
||||||
color_print(4,'SCons USE_CONFIG specified as false, will not inherit variables python config file...')
|
# Accept more than one file as comma-delimited list
|
||||||
|
user_confs = env['CONFIG'].split(',')
|
||||||
conf = Configure(env, custom_tests = conf_tests)
|
# If they exist add the files to the existing `opts`
|
||||||
|
for conf in user_confs:
|
||||||
if env['DEBUG']:
|
if os.path.exists(conf):
|
||||||
mode = 'debug mode'
|
opts.files.append(conf)
|
||||||
else:
|
color_print(4,"SCons CONFIG found: '%s', variables will be inherited..." % conf)
|
||||||
mode = 'release mode'
|
optfile = file(conf)
|
||||||
|
print optfile.read().replace("\n", " ").replace("'","").replace(" = ","=")
|
||||||
env['PLATFORM'] = platform.uname()[0]
|
optfile.close()
|
||||||
color_print (4,"Configuring on %s in *%s*..." % (env['PLATFORM'],mode))
|
|
||||||
|
|
||||||
env['MISSING_DEPS'] = []
|
|
||||||
env['SKIPPED_DEPS'] = []
|
|
||||||
|
|
||||||
env['LIBDIR_SCHEMA'] = LIBDIR_SCHEMA
|
|
||||||
env['PLUGINS'] = PLUGINS
|
|
||||||
|
|
||||||
if env['FAST']:
|
|
||||||
# caching is 'auto' by default in SCons
|
|
||||||
# But let's also cache implicit deps...
|
|
||||||
EnsureSConsVersion(0,98)
|
|
||||||
SetOption('implicit_cache', 1)
|
|
||||||
env.Decider('MD5-timestamp')
|
|
||||||
SetOption('max_drift', 1)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Set the cache mode to 'force' unless requested, avoiding hidden caching of Scons 'opts' in '.sconsign.dblite'
|
|
||||||
# This allows for a SCONS_LOCAL_CONFIG, if present, to be used as the primary means of storing paths to successful build dependencies
|
|
||||||
SetCacheMode('force')
|
|
||||||
|
|
||||||
if env['JOBS'] > 1:
|
|
||||||
SetOption("num_jobs", env['JOBS'])
|
|
||||||
|
|
||||||
thread_suffix = 'mt'
|
|
||||||
if env['PLATFORM'] == 'FreeBSD':
|
|
||||||
thread_suffix = ''
|
|
||||||
env.Append(LIBS = 'pthread')
|
|
||||||
|
|
||||||
if env['SYSTEM_FONTS']:
|
|
||||||
if not os.path.isdir(env['SYSTEM_FONTS']):
|
|
||||||
color_print(1,'Warning: Directory specified for SYSTEM_FONTS does not exist!')
|
|
||||||
#### Libraries and headers dependency checks ####
|
|
||||||
|
|
||||||
# Set up for libraries and headers dependency checks
|
|
||||||
env['CPPPATH'] = ['#include', '#']
|
|
||||||
env['LIBPATH'] = ['#src']
|
|
||||||
|
|
||||||
# Solaris & Sun Studio settings (the `SUNCC` flag will only be
|
|
||||||
# set if the `CXX` option begins with `CC`)
|
|
||||||
SOLARIS = env['PLATFORM'] == 'SunOS'
|
|
||||||
env['SUNCC'] = SOLARIS and env['CXX'].startswith('CC')
|
|
||||||
|
|
||||||
# For Solaris include paths (e.g., for freetype2, ltdl, etc.).
|
|
||||||
if SOLARIS:
|
|
||||||
blastwave_dir = '/opt/csw/%s'
|
|
||||||
env.AppendUnique(CPPPATH = blastwave_dir % 'include')
|
|
||||||
env.AppendUnique(LIBPATH = blastwave_dir % LIBDIR_SCHEMA)
|
|
||||||
|
|
||||||
# If the Sun Studio C++ compiler (`CC`) is used instead of GCC.
|
|
||||||
if env['SUNCC']:
|
|
||||||
env['CC'] = 'cc'
|
|
||||||
# To be compatible w/Boost everything needs to be compiled
|
|
||||||
# with the `-library=stlport4` flag (which needs to come
|
|
||||||
# before the `-o` flag).
|
|
||||||
env['CXX'] = 'CC -library=stlport4'
|
|
||||||
if env['THREADING'] == 'multi':
|
|
||||||
env['CXXFLAGS'] = ['-mt']
|
|
||||||
|
|
||||||
# Adding the required prerequisite library directories to the include path for
|
|
||||||
# compiling and the library path for linking, respectively.
|
|
||||||
for required in ('BOOST', 'PNG', 'JPEG', 'TIFF','PROJ','ICU'):
|
|
||||||
inc_path = env['%s_INCLUDES' % required]
|
|
||||||
lib_path = env['%s_LIBS' % required]
|
|
||||||
env.AppendUnique(CPPPATH = inc_path)
|
|
||||||
env.AppendUnique(LIBPATH = lib_path)
|
|
||||||
|
|
||||||
conf.parse_config('FREETYPE_CONFIG')
|
|
||||||
|
|
||||||
if env['XMLPARSER'] == 'tinyxml':
|
|
||||||
env['CPPPATH'].append('#tinyxml')
|
|
||||||
env.Append(CXXFLAGS = '-DBOOST_PROPERTY_TREE_XML_PARSER_TINYXML -DTIXML_USE_STL')
|
|
||||||
elif env['XMLPARSER'] == 'libxml2':
|
|
||||||
if conf.parse_config('XML2_CONFIG'):
|
|
||||||
env.Append(CXXFLAGS = '-DHAVE_LIBXML2')
|
|
||||||
|
|
||||||
if env['CAIRO'] and conf.CheckPKGConfig('0.15.0') and conf.CheckPKG('cairomm-1.0'):
|
|
||||||
env.ParseConfig('pkg-config --libs --cflags cairomm-1.0')
|
|
||||||
env.Append(CXXFLAGS = '-DHAVE_CAIRO')
|
|
||||||
else:
|
|
||||||
env['SKIPPED_DEPS'].extend(['cairo','cairomm'])
|
|
||||||
|
|
||||||
LIBSHEADERS = [
|
|
||||||
['m', 'math.h', True,'C'],
|
|
||||||
['ltdl', 'ltdl.h', True,'C'],
|
|
||||||
['png', 'png.h', True,'C'],
|
|
||||||
['tiff', 'tiff.h', True,'C'],
|
|
||||||
['z', 'zlib.h', True,'C'],
|
|
||||||
['jpeg', ['stdio.h', 'jpeglib.h'], True,'C'],
|
|
||||||
['proj', 'proj_api.h', True,'C'],
|
|
||||||
['icuuc','unicode/unistr.h',True,'C++'],
|
|
||||||
['icudata','unicode/utypes.h' , True,'C++'],
|
|
||||||
]
|
|
||||||
|
|
||||||
# get boost version from boost headers rather than previous approach
|
|
||||||
# of fetching from the user provided INCLUDE path
|
|
||||||
boost_system_required = False
|
|
||||||
boost_lib_version_from_header = conf.GetBoostLibVersion()
|
|
||||||
if boost_lib_version_from_header:
|
|
||||||
boost_version_from_header = int(boost_lib_version_from_header.split('_')[1])
|
|
||||||
if boost_version_from_header >= 35 and env['PLATFORM'] == 'Darwin':
|
|
||||||
boost_system_required = True
|
|
||||||
else:
|
|
||||||
boost_system_required = False
|
|
||||||
|
|
||||||
# The other required boost headers.
|
|
||||||
BOOST_LIBSHEADERS = [
|
|
||||||
['system', 'boost/system/system_error.hpp', boost_system_required],
|
|
||||||
['filesystem', 'boost/filesystem/operations.hpp', True],
|
|
||||||
['regex', 'boost/regex.hpp', True],
|
|
||||||
['iostreams','boost/iostreams/device/mapped_file.hpp',True],
|
|
||||||
['program_options', 'boost/program_options.hpp', False]
|
|
||||||
]
|
|
||||||
|
|
||||||
if env['THREADING'] == 'multi':
|
|
||||||
BOOST_LIBSHEADERS.append(['thread', 'boost/thread/mutex.hpp', True])
|
|
||||||
thread_flag = thread_suffix
|
|
||||||
else:
|
|
||||||
thread_flag = ''
|
|
||||||
|
|
||||||
for libinfo in LIBSHEADERS:
|
|
||||||
if not conf.CheckLibWithHeader(libinfo[0], libinfo[1], libinfo[3]):
|
|
||||||
if libinfo[2]:
|
|
||||||
color_print (1,'Could not find required header or shared library for %s' % libinfo[0])
|
|
||||||
env['MISSING_DEPS'].append(libinfo[0])
|
|
||||||
else:
|
|
||||||
color_print(4,'Could not find optional header or shared library for %s' % libinfo[0])
|
|
||||||
env['SKIPPED_DEPS'].append(libinfo[0])
|
|
||||||
|
|
||||||
# Creating BOOST_APPEND according to the Boost library naming order,
|
|
||||||
# which goes <toolset>-<threading>-<abi>-<version>. See:
|
|
||||||
# http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html#library-naming
|
|
||||||
append_params = ['']
|
|
||||||
if env['BOOST_TOOLKIT']: append_params.append(env['BOOST_TOOLKIT'])
|
|
||||||
if thread_flag: append_params.append(thread_flag)
|
|
||||||
if env['BOOST_ABI']: append_params.append(env['BOOST_ABI'])
|
|
||||||
if env['BOOST_VERSION']: append_params.append(env['BOOST_VERSION'])
|
|
||||||
|
|
||||||
# if the user is not setting custom boost configuration
|
|
||||||
# enforce boost version greater than or equal to 1.33
|
|
||||||
if not conf.CheckBoost('1.33'):
|
|
||||||
color_print (1,'Boost version 1.33 or greater is requred')
|
|
||||||
if not env['BOOST_VERSION']:
|
|
||||||
env['MISSING_DEPS'].append('boost version >=1.33')
|
|
||||||
else:
|
|
||||||
color_print (4,'Found boost lib version... %s' % boost_lib_version_from_header )
|
|
||||||
|
|
||||||
# Constructing the BOOST_APPEND setting that will be used to find the
|
|
||||||
# Boost libraries.
|
|
||||||
if len(append_params) > 1:
|
|
||||||
env['BOOST_APPEND'] = '-'.join(append_params)
|
|
||||||
else:
|
|
||||||
env['BOOST_APPEND'] = ''
|
|
||||||
|
|
||||||
for count, libinfo in enumerate(BOOST_LIBSHEADERS):
|
|
||||||
if not conf.CheckLibWithHeader('boost_%s%s' % (libinfo[0],env['BOOST_APPEND']), libinfo[1], 'C++'):
|
|
||||||
if libinfo[2]:
|
|
||||||
color_print(1,'Could not find required header or shared library for boost %s' % libinfo[0])
|
|
||||||
env['MISSING_DEPS'].append('boost ' + libinfo[0])
|
|
||||||
else:
|
|
||||||
color_print(4,'Could not find optional header or shared library for boost %s' % libinfo[0])
|
|
||||||
env['SKIPPED_DEPS'].append('boost ' + libinfo[0])
|
|
||||||
|
|
||||||
env['REQUESTED_PLUGINS'] = [ driver.strip() for driver in Split(env['INPUT_PLUGINS'])]
|
|
||||||
|
|
||||||
color_print(4,'Checking for requested plugins dependencies...')
|
|
||||||
for plugin in env['REQUESTED_PLUGINS']:
|
|
||||||
details = env['PLUGINS'][plugin]
|
|
||||||
if plugin == 'gdal':
|
|
||||||
if conf.parse_config('GDAL_CONFIG',checks='--libs'):
|
|
||||||
conf.parse_config('GDAL_CONFIG',checks='--cflags')
|
|
||||||
libname = conf.get_pkg_lib('GDAL_CONFIG','gdal')
|
|
||||||
if libname:
|
|
||||||
details['lib'] = libname
|
|
||||||
elif plugin == 'postgis':
|
|
||||||
conf.parse_pg_config('PG_CONFIG')
|
|
||||||
elif plugin == 'ogr':
|
|
||||||
if conf.ogr_enabled():
|
|
||||||
if not 'gdal' in env['REQUESTED_PLUGINS']:
|
|
||||||
conf.parse_config('GDAL_CONFIG',checks='--libs')
|
|
||||||
conf.parse_config('GDAL_CONFIG',checks='--cflags')
|
|
||||||
libname = conf.get_pkg_lib('GDAL_CONFIG','ogr')
|
|
||||||
if libname:
|
|
||||||
details['lib'] = libname
|
|
||||||
|
|
||||||
elif details['path'] and details['lib'] and details['inc']:
|
elif not conf == SCONS_LOCAL_CONFIG:
|
||||||
backup = env.Clone().Dictionary()
|
# if default missing, no worries
|
||||||
# Note, the 'delete_existing' keyword makes sure that these paths are prepended
|
# but if the default is overridden and the file is not found, give warning
|
||||||
# to the beginning of the path list even if they already exist
|
color_print(1,"SCons CONFIG not found: '%s'" % conf)
|
||||||
env.PrependUnique(CPPPATH = env['%s_INCLUDES' % details['path']],delete_existing=True)
|
# Recreate the base environment using modified `opts`
|
||||||
env.PrependUnique(LIBPATH = env['%s_LIBS' % details['path']],delete_existing=True)
|
env = Environment(ENV=os.environ,options=opts)
|
||||||
if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
|
env['USE_CONFIG'] = True
|
||||||
env.Replace(**backup)
|
else:
|
||||||
env['SKIPPED_DEPS'].append(details['lib'])
|
color_print(4,'SCons USE_CONFIG specified as false, will not inherit variables python config file...')
|
||||||
|
|
||||||
# re-append the local paths for mapnik sources to the beginning of the list
|
conf = Configure(env, custom_tests = conf_tests)
|
||||||
# to make sure they come before any plugins that were 'prepended'
|
|
||||||
env.PrependUnique(CPPPATH = ['#include', '#'], delete_existing=True)
|
if env['DEBUG']:
|
||||||
env.PrependUnique(LIBPATH = '#src', delete_existing=True)
|
mode = 'debug mode'
|
||||||
|
else:
|
||||||
|
mode = 'release mode'
|
||||||
|
|
||||||
# Decide which libagg to use
|
env['PLATFORM'] = platform.uname()[0]
|
||||||
# if we are using internal agg, then prepend to make sure
|
color_print (4,"Configuring on %s in *%s*..." % (env['PLATFORM'],mode))
|
||||||
# we link locally
|
|
||||||
if env['INTERNAL_LIBAGG']:
|
env['MISSING_DEPS'] = []
|
||||||
env.Prepend(CPPPATH = '#agg/include')
|
env['SKIPPED_DEPS'] = []
|
||||||
env.Prepend(LIBPATH = '#agg')
|
|
||||||
|
env['LIBDIR_SCHEMA'] = LIBDIR_SCHEMA
|
||||||
|
env['PLUGINS'] = PLUGINS
|
||||||
|
|
||||||
|
if env['FAST']:
|
||||||
|
# caching is 'auto' by default in SCons
|
||||||
|
# But let's also cache implicit deps...
|
||||||
|
EnsureSConsVersion(0,98)
|
||||||
|
SetOption('implicit_cache', 1)
|
||||||
|
env.Decider('MD5-timestamp')
|
||||||
|
SetOption('max_drift', 1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set the cache mode to 'force' unless requested, avoiding hidden caching of Scons 'opts' in '.sconsign.dblite'
|
||||||
|
# This allows for a SCONS_LOCAL_CONFIG, if present, to be used as the primary means of storing paths to successful build dependencies
|
||||||
|
SetCacheMode('force')
|
||||||
|
|
||||||
|
if env['JOBS'] > 1:
|
||||||
|
SetOption("num_jobs", env['JOBS'])
|
||||||
|
|
||||||
|
thread_suffix = 'mt'
|
||||||
|
if env['PLATFORM'] == 'FreeBSD':
|
||||||
|
thread_suffix = ''
|
||||||
|
env.Append(LIBS = 'pthread')
|
||||||
|
|
||||||
|
if env['SYSTEM_FONTS']:
|
||||||
|
if not os.path.isdir(env['SYSTEM_FONTS']):
|
||||||
|
color_print(1,'Warning: Directory specified for SYSTEM_FONTS does not exist!')
|
||||||
|
#### Libraries and headers dependency checks ####
|
||||||
|
|
||||||
|
# Set up for libraries and headers dependency checks
|
||||||
|
env['CPPPATH'] = ['#include', '#']
|
||||||
|
env['LIBPATH'] = ['#src']
|
||||||
|
|
||||||
|
# Solaris & Sun Studio settings (the `SUNCC` flag will only be
|
||||||
|
# set if the `CXX` option begins with `CC`)
|
||||||
|
SOLARIS = env['PLATFORM'] == 'SunOS'
|
||||||
|
env['SUNCC'] = SOLARIS and env['CXX'].startswith('CC')
|
||||||
|
|
||||||
|
# For Solaris include paths (e.g., for freetype2, ltdl, etc.).
|
||||||
|
if SOLARIS:
|
||||||
|
blastwave_dir = '/opt/csw/%s'
|
||||||
|
env.AppendUnique(CPPPATH = blastwave_dir % 'include')
|
||||||
|
env.AppendUnique(LIBPATH = blastwave_dir % LIBDIR_SCHEMA)
|
||||||
|
|
||||||
|
# If the Sun Studio C++ compiler (`CC`) is used instead of GCC.
|
||||||
|
if env['SUNCC']:
|
||||||
|
env['CC'] = 'cc'
|
||||||
|
# To be compatible w/Boost everything needs to be compiled
|
||||||
|
# with the `-library=stlport4` flag (which needs to come
|
||||||
|
# before the `-o` flag).
|
||||||
|
env['CXX'] = 'CC -library=stlport4'
|
||||||
|
if env['THREADING'] == 'multi':
|
||||||
|
env['CXXFLAGS'] = ['-mt']
|
||||||
|
|
||||||
|
# Adding the required prerequisite library directories to the include path for
|
||||||
|
# compiling and the library path for linking, respectively.
|
||||||
|
for required in ('BOOST', 'PNG', 'JPEG', 'TIFF','PROJ','ICU'):
|
||||||
|
inc_path = env['%s_INCLUDES' % required]
|
||||||
|
lib_path = env['%s_LIBS' % required]
|
||||||
|
env.AppendUnique(CPPPATH = inc_path)
|
||||||
|
env.AppendUnique(LIBPATH = lib_path)
|
||||||
|
|
||||||
|
conf.parse_config('FREETYPE_CONFIG')
|
||||||
|
|
||||||
|
if env['XMLPARSER'] == 'tinyxml':
|
||||||
|
env['CPPPATH'].append('#tinyxml')
|
||||||
|
env.Append(CXXFLAGS = '-DBOOST_PROPERTY_TREE_XML_PARSER_TINYXML -DTIXML_USE_STL')
|
||||||
|
elif env['XMLPARSER'] == 'libxml2':
|
||||||
|
if conf.parse_config('XML2_CONFIG'):
|
||||||
|
env.Append(CXXFLAGS = '-DHAVE_LIBXML2')
|
||||||
|
|
||||||
|
if env['CAIRO'] and conf.CheckPKGConfig('0.15.0') and conf.CheckPKG('cairomm-1.0'):
|
||||||
|
env.ParseConfig('pkg-config --libs --cflags cairomm-1.0')
|
||||||
|
env.Append(CXXFLAGS = '-DHAVE_CAIRO')
|
||||||
|
else:
|
||||||
|
env['SKIPPED_DEPS'].extend(['cairo','cairomm'])
|
||||||
|
|
||||||
|
LIBSHEADERS = [
|
||||||
|
['m', 'math.h', True,'C'],
|
||||||
|
['ltdl', 'ltdl.h', True,'C'],
|
||||||
|
['png', 'png.h', True,'C'],
|
||||||
|
['tiff', 'tiff.h', True,'C'],
|
||||||
|
['z', 'zlib.h', True,'C'],
|
||||||
|
['jpeg', ['stdio.h', 'jpeglib.h'], True,'C'],
|
||||||
|
['proj', 'proj_api.h', True,'C'],
|
||||||
|
['icuuc','unicode/unistr.h',True,'C++'],
|
||||||
|
['icudata','unicode/utypes.h' , True,'C++'],
|
||||||
|
]
|
||||||
|
|
||||||
|
# get boost version from boost headers rather than previous approach
|
||||||
|
# of fetching from the user provided INCLUDE path
|
||||||
|
boost_system_required = False
|
||||||
|
boost_lib_version_from_header = conf.GetBoostLibVersion()
|
||||||
|
if boost_lib_version_from_header:
|
||||||
|
boost_version_from_header = int(boost_lib_version_from_header.split('_')[1])
|
||||||
|
if boost_version_from_header >= 35 and env['PLATFORM'] == 'Darwin':
|
||||||
|
boost_system_required = True
|
||||||
else:
|
else:
|
||||||
env.ParseConfig('pkg-config --libs --cflags libagg')
|
boost_system_required = False
|
||||||
|
|
||||||
|
# The other required boost headers.
|
||||||
|
BOOST_LIBSHEADERS = [
|
||||||
|
['system', 'boost/system/system_error.hpp', boost_system_required],
|
||||||
|
['filesystem', 'boost/filesystem/operations.hpp', True],
|
||||||
|
['regex', 'boost/regex.hpp', True],
|
||||||
|
['iostreams','boost/iostreams/device/mapped_file.hpp',True],
|
||||||
|
['program_options', 'boost/program_options.hpp', False]
|
||||||
|
]
|
||||||
|
|
||||||
|
if env['THREADING'] == 'multi':
|
||||||
|
BOOST_LIBSHEADERS.append(['thread', 'boost/thread/mutex.hpp', True])
|
||||||
|
thread_flag = thread_suffix
|
||||||
|
else:
|
||||||
|
thread_flag = ''
|
||||||
|
|
||||||
|
for libinfo in LIBSHEADERS:
|
||||||
|
if not conf.CheckLibWithHeader(libinfo[0], libinfo[1], libinfo[3]):
|
||||||
|
if libinfo[2]:
|
||||||
|
color_print (1,'Could not find required header or shared library for %s' % libinfo[0])
|
||||||
|
env['MISSING_DEPS'].append(libinfo[0])
|
||||||
|
else:
|
||||||
|
color_print(4,'Could not find optional header or shared library for %s' % libinfo[0])
|
||||||
|
env['SKIPPED_DEPS'].append(libinfo[0])
|
||||||
|
|
||||||
|
# Creating BOOST_APPEND according to the Boost library naming order,
|
||||||
|
# which goes <toolset>-<threading>-<abi>-<version>. See:
|
||||||
|
# http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html#library-naming
|
||||||
|
append_params = ['']
|
||||||
|
if env['BOOST_TOOLKIT']: append_params.append(env['BOOST_TOOLKIT'])
|
||||||
|
if thread_flag: append_params.append(thread_flag)
|
||||||
|
if env['BOOST_ABI']: append_params.append(env['BOOST_ABI'])
|
||||||
|
if env['BOOST_VERSION']: append_params.append(env['BOOST_VERSION'])
|
||||||
|
|
||||||
|
# if the user is not setting custom boost configuration
|
||||||
|
# enforce boost version greater than or equal to 1.33
|
||||||
|
if not conf.CheckBoost('1.33'):
|
||||||
|
color_print (1,'Boost version 1.33 or greater is requred')
|
||||||
|
if not env['BOOST_VERSION']:
|
||||||
|
env['MISSING_DEPS'].append('boost version >=1.33')
|
||||||
|
else:
|
||||||
|
color_print (4,'Found boost lib version... %s' % boost_lib_version_from_header )
|
||||||
|
|
||||||
|
# Constructing the BOOST_APPEND setting that will be used to find the
|
||||||
|
# Boost libraries.
|
||||||
|
if len(append_params) > 1:
|
||||||
|
env['BOOST_APPEND'] = '-'.join(append_params)
|
||||||
|
else:
|
||||||
|
env['BOOST_APPEND'] = ''
|
||||||
|
|
||||||
|
for count, libinfo in enumerate(BOOST_LIBSHEADERS):
|
||||||
|
if not conf.CheckLibWithHeader('boost_%s%s' % (libinfo[0],env['BOOST_APPEND']), libinfo[1], 'C++'):
|
||||||
|
if libinfo[2]:
|
||||||
|
color_print(1,'Could not find required header or shared library for boost %s' % libinfo[0])
|
||||||
|
env['MISSING_DEPS'].append('boost ' + libinfo[0])
|
||||||
|
else:
|
||||||
|
color_print(4,'Could not find optional header or shared library for boost %s' % libinfo[0])
|
||||||
|
env['SKIPPED_DEPS'].append('boost ' + libinfo[0])
|
||||||
|
|
||||||
|
env['REQUESTED_PLUGINS'] = [ driver.strip() for driver in Split(env['INPUT_PLUGINS'])]
|
||||||
|
|
||||||
|
color_print(4,'Checking for requested plugins dependencies...')
|
||||||
|
for plugin in env['REQUESTED_PLUGINS']:
|
||||||
|
details = env['PLUGINS'][plugin]
|
||||||
|
if plugin == 'gdal':
|
||||||
|
if conf.parse_config('GDAL_CONFIG',checks='--libs'):
|
||||||
|
conf.parse_config('GDAL_CONFIG',checks='--cflags')
|
||||||
|
libname = conf.get_pkg_lib('GDAL_CONFIG','gdal')
|
||||||
|
if libname:
|
||||||
|
details['lib'] = libname
|
||||||
|
elif plugin == 'postgis':
|
||||||
|
conf.parse_pg_config('PG_CONFIG')
|
||||||
|
elif plugin == 'ogr':
|
||||||
|
if conf.ogr_enabled():
|
||||||
|
if not 'gdal' in env['REQUESTED_PLUGINS']:
|
||||||
|
conf.parse_config('GDAL_CONFIG',checks='--libs')
|
||||||
|
conf.parse_config('GDAL_CONFIG',checks='--cflags')
|
||||||
|
libname = conf.get_pkg_lib('GDAL_CONFIG','ogr')
|
||||||
|
if libname:
|
||||||
|
details['lib'] = libname
|
||||||
|
|
||||||
|
elif details['path'] and details['lib'] and details['inc']:
|
||||||
|
backup = env.Clone().Dictionary()
|
||||||
|
# Note, the 'delete_existing' keyword makes sure that these paths are prepended
|
||||||
|
# to the beginning of the path list even if they already exist
|
||||||
|
env.PrependUnique(CPPPATH = env['%s_INCLUDES' % details['path']],delete_existing=True)
|
||||||
|
env.PrependUnique(LIBPATH = env['%s_LIBS' % details['path']],delete_existing=True)
|
||||||
|
if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
|
||||||
|
env.Replace(**backup)
|
||||||
|
env['SKIPPED_DEPS'].append(details['lib'])
|
||||||
|
|
||||||
|
# re-append the local paths for mapnik sources to the beginning of the list
|
||||||
|
# to make sure they come before any plugins that were 'prepended'
|
||||||
|
env.PrependUnique(CPPPATH = ['#include', '#'], delete_existing=True)
|
||||||
|
env.PrependUnique(LIBPATH = '#src', delete_existing=True)
|
||||||
|
|
||||||
|
# Decide which libagg to use
|
||||||
|
# if we are using internal agg, then prepend to make sure
|
||||||
|
# we link locally
|
||||||
|
if env['INTERNAL_LIBAGG']:
|
||||||
|
env.Prepend(CPPPATH = '#agg/include')
|
||||||
|
env.Prepend(LIBPATH = '#agg')
|
||||||
|
else:
|
||||||
|
env.ParseConfig('pkg-config --libs --cflags libagg')
|
||||||
|
|
||||||
|
if 'python' in env['BINDINGS']:
|
||||||
|
# checklibwithheader does not work for boost_python since we can't feed it
|
||||||
|
# multiple header files, so we fall back on a simple check for boost_python headers
|
||||||
|
if not conf.CheckHeader(header='boost/python/detail/config.hpp',language='C++'):
|
||||||
|
color_print(1,'Could not find required header files for boost python')
|
||||||
|
env['MISSING_DEPS'].append('boost python')
|
||||||
|
|
||||||
|
if env['CAIRO'] and conf.CheckPKGConfig('0.15.0') and conf.CheckPKG('pycairo'):
|
||||||
|
env.ParseConfig('pkg-config --cflags pycairo')
|
||||||
|
env.Append(CXXFLAGS = '-DHAVE_PYCAIRO')
|
||||||
|
else:
|
||||||
|
env['SKIPPED_DEPS'].extend(['pycairo'])
|
||||||
|
|
||||||
|
#### End Config Stage for Required Dependencies ####
|
||||||
|
|
||||||
|
if env['MISSING_DEPS']:
|
||||||
|
# if required dependencies are missing, print warnings and then let SCons finish without building or saving local config
|
||||||
|
color_print(1,'\nExiting... the following required dependencies were not found:\n - %s' % '\n - '.join(env['MISSING_DEPS']))
|
||||||
|
color_print(1,"\nSee the 'config.log' for details on possible problems.")
|
||||||
|
if env['SKIPPED_DEPS']:
|
||||||
|
color_print(4,'\nAlso, these optional dependencies were not found:\n - %s' % '\n - '.join(env['SKIPPED_DEPS']))
|
||||||
|
color_print(4,"\nSet custom paths to these libraries and header files on the command-line or in a file called '%s'" % SCONS_LOCAL_CONFIG)
|
||||||
|
color_print(4," ie. $ python scons/scons.py BOOST_INCLUDES=/usr/local/include/boost-1_37 BOOST_LIBS=/usr/local/lib")
|
||||||
|
color_print(4, "\nOnce all required dependencies are found a local '%s' will be saved and then install:" % SCONS_LOCAL_CONFIG)
|
||||||
|
color_print(4," $ sudo python scons/scons.py install")
|
||||||
|
color_print(4,"\nTo view available path variables:\n $ python scons/scons.py --help or -h")
|
||||||
|
color_print(4,'\nTo view overall SCons help options:\n $ python scons/scons.py --help-options or -H\n')
|
||||||
|
color_print(4,'More info: http://trac.mapnik.org/wiki/MapnikInstallation')
|
||||||
|
Exit(0)
|
||||||
|
else:
|
||||||
|
# Save the custom variables in a SCONS_LOCAL_CONFIG
|
||||||
|
# that will be reloaded to allow for `install` without re-specifying custom variables
|
||||||
|
color_print(4,"\nAll Required dependencies found!\n")
|
||||||
|
if env['USE_CONFIG']:
|
||||||
|
if os.path.exists(SCONS_LOCAL_CONFIG):
|
||||||
|
action = 'Overwriting and re-saving'
|
||||||
|
os.unlink(SCONS_LOCAL_CONFIG)
|
||||||
|
else:
|
||||||
|
action = 'Saving new'
|
||||||
|
color_print(4,"%s file '%s'..." % (action,SCONS_LOCAL_CONFIG))
|
||||||
|
color_print(4,"Will hold custom path variables from commandline and python config file(s)...")
|
||||||
|
opts.Save(SCONS_LOCAL_CONFIG,env)
|
||||||
|
else:
|
||||||
|
color_print(4,"Did not use user config file, no custom path variables will be saved...")
|
||||||
|
|
||||||
|
if env['SKIPPED_DEPS']:
|
||||||
|
color_print(1,'\nHowever, these optional dependencies were not found:\n - %s' % '\n - '.join(env['SKIPPED_DEPS']))
|
||||||
|
print
|
||||||
|
|
||||||
|
# fetch the mapnik version header in order to set the
|
||||||
|
# ABI version used to build libmapnik.so on linux in src/SConscript
|
||||||
|
abi = conf.GetMapnikLibVersion()
|
||||||
|
abi_fallback = [0,6,0]
|
||||||
|
if not abi:
|
||||||
|
color_print(1,'Problem encountered parsing mapnik version, falling back to %s' % abi_fallback)
|
||||||
|
env['ABI_VERSION'] = abi_fallback
|
||||||
|
else:
|
||||||
|
env['ABI_VERSION'] = abi
|
||||||
|
|
||||||
|
# Common C++ flags.
|
||||||
|
if env['THREADING'] == 'multi' :
|
||||||
|
common_cxx_flags = '-D%s -DBOOST_SPIRIT_THREADSAFE -DMAPNIK_THREADSAFE ' % env['PLATFORM'].upper()
|
||||||
|
else :
|
||||||
|
common_cxx_flags = '-D%s ' % env['PLATFORM'].upper()
|
||||||
|
|
||||||
|
# Mac OSX (Darwin) special settings
|
||||||
|
if env['PLATFORM'] == 'Darwin':
|
||||||
|
pthread = ''
|
||||||
|
# Getting the macintosh version number, sticking as a compiler macro
|
||||||
|
# for Leopard -- needed because different workarounds are needed than
|
||||||
|
# for Tiger.
|
||||||
|
if platform.mac_ver()[0].startswith('10.5'):
|
||||||
|
common_cxx_flags += '-DOSX_LEOPARD '
|
||||||
|
else:
|
||||||
|
pthread = '-pthread'
|
||||||
|
|
||||||
|
# Common debugging flags.
|
||||||
|
debug_flags = '-g -DDEBUG -DMAPNIK_DEBUG'
|
||||||
|
ndebug_flags = '-DNDEBUG'
|
||||||
|
|
||||||
|
# Customizing the C++ compiler flags depending on:
|
||||||
|
# (1) the C++ compiler used; and
|
||||||
|
# (2) whether debug binaries are requested.
|
||||||
|
if env['SUNCC']:
|
||||||
|
if env['DEBUG']:
|
||||||
|
env.Append(CXXFLAGS = common_cxx_flags + debug_flags)
|
||||||
|
else:
|
||||||
|
env.Append(CXXFLAGS = common_cxx_flags + '-O %s' % ndebug_flags)
|
||||||
|
else:
|
||||||
|
# Common flags for GCC.
|
||||||
|
gcc_cxx_flags = '-ansi -Wall %s -ftemplate-depth-100 %s' % (pthread, common_cxx_flags)
|
||||||
|
|
||||||
|
if env['DEBUG']:
|
||||||
|
env.Append(CXXFLAGS = gcc_cxx_flags + '-O0 -fno-inline %s' % debug_flags)
|
||||||
|
else:
|
||||||
|
env.Append(CXXFLAGS = gcc_cxx_flags + '-O%s -finline-functions -Wno-inline %s' % (env['OPTIMIZATION'],ndebug_flags))
|
||||||
|
|
||||||
if 'python' in env['BINDINGS']:
|
if 'python' in env['BINDINGS']:
|
||||||
# checklibwithheader does not work for boost_python since we can't feed it
|
if not os.access(env['PYTHON'], os.X_OK):
|
||||||
# multiple header files, so we fall back on a simple check for boost_python headers
|
color_print(1,"Cannot run python interpreter at '%s', make sure that you have the permissions to execute it." % env['PYTHON'])
|
||||||
if not conf.CheckHeader(header='boost/python/detail/config.hpp',language='C++'):
|
Exit(1)
|
||||||
color_print(1,'Could not find required header files for boost python')
|
|
||||||
env['MISSING_DEPS'].append('boost python')
|
|
||||||
|
|
||||||
if env['CAIRO'] and conf.CheckPKGConfig('0.15.0') and conf.CheckPKG('pycairo'):
|
|
||||||
env.ParseConfig('pkg-config --cflags pycairo')
|
|
||||||
env.Append(CXXFLAGS = '-DHAVE_PYCAIRO')
|
|
||||||
else:
|
|
||||||
env['SKIPPED_DEPS'].extend(['pycairo'])
|
|
||||||
|
|
||||||
#### End Config Stage for Required Dependencies ####
|
|
||||||
|
|
||||||
if env['MISSING_DEPS']:
|
sys_prefix = "%s -c 'import sys; print sys.prefix'" % env['PYTHON']
|
||||||
# if required dependencies are missing, print warnings and then let SCons finish without building or saving local config
|
env['PYTHON_SYS_PREFIX'] = call(sys_prefix)
|
||||||
color_print(1,'\nExiting... the following required dependencies were not found:\n - %s' % '\n - '.join(env['MISSING_DEPS']))
|
|
||||||
color_print(1,"\nSee the 'config.log' for details on possible problems.")
|
if HAS_DISTUTILS:
|
||||||
if env['SKIPPED_DEPS']:
|
sys_version = "%s -c 'from distutils.sysconfig import get_python_version; print get_python_version()'" % env['PYTHON']
|
||||||
color_print(4,'\nAlso, these optional dependencies were not found:\n - %s' % '\n - '.join(env['SKIPPED_DEPS']))
|
env['PYTHON_VERSION'] = call(sys_version)
|
||||||
color_print(4,"\nSet custom paths to these libraries and header files on the command-line or in a file called '%s'" % SCONS_LOCAL_CONFIG)
|
py_includes = "%s -c 'from distutils.sysconfig import get_python_inc; print get_python_inc()'" % env['PYTHON']
|
||||||
color_print(4," ie. $ python scons/scons.py BOOST_INCLUDES=/usr/local/include/boost-1_37 BOOST_LIBS=/usr/local/lib")
|
env['PYTHON_INCLUDES'] = call(py_includes)
|
||||||
color_print(4, "\nOnce all required dependencies are found a local '%s' will be saved and then install:" % SCONS_LOCAL_CONFIG)
|
# Note: we use the plat_specific argument here to make sure to respect the arch-specific site-packages location
|
||||||
color_print(4," $ sudo python scons/scons.py install")
|
site_packages = "%s -c 'from distutils.sysconfig import get_python_lib; print get_python_lib(plat_specific=True)'" % env['PYTHON']
|
||||||
color_print(4,"\nTo view available path variables:\n $ python scons/scons.py --help or -h")
|
env['PYTHON_SITE_PACKAGES'] = call(site_packages)
|
||||||
color_print(4,'\nTo view overall SCons help options:\n $ python scons/scons.py --help-options or -H\n')
|
|
||||||
color_print(4,'More info: http://trac.mapnik.org/wiki/MapnikInstallation')
|
|
||||||
Exit(0)
|
|
||||||
else:
|
|
||||||
# Save the custom variables in a SCONS_LOCAL_CONFIG
|
|
||||||
# that will be reloaded to allow for `install` without re-specifying custom variables
|
|
||||||
color_print(4,"\nAll Required dependencies found!\n")
|
|
||||||
if env['USE_CONFIG']:
|
|
||||||
if os.path.exists(SCONS_LOCAL_CONFIG):
|
|
||||||
action = 'Overwriting and re-saving'
|
|
||||||
os.unlink(SCONS_LOCAL_CONFIG)
|
|
||||||
else:
|
|
||||||
action = 'Saving new'
|
|
||||||
color_print(4,"%s file '%s'..." % (action,SCONS_LOCAL_CONFIG))
|
|
||||||
color_print(4,"Will hold custom path variables from commandline and python config file(s)...")
|
|
||||||
opts.Save(SCONS_LOCAL_CONFIG,env)
|
|
||||||
else:
|
else:
|
||||||
color_print(4,"Did not use user config file, no custom path variables will be saved...")
|
env['PYTHON_SYS_PREFIX'] = os.popen("%s -c 'import sys; print sys.prefix'" % env['PYTHON']).read().strip()
|
||||||
|
env['PYTHON_VERSION'] = os.popen("%s -c 'import sys; print sys.version'" % env['PYTHON']).read()[0:3]
|
||||||
if env['SKIPPED_DEPS']:
|
env['PYTHON_INCLUDES'] = env['PYTHON_SYS_PREFIX'] + '/include/python' + env['PYTHON_VERSION']
|
||||||
color_print(1,'\nHowever, these optional dependencies were not found:\n - %s' % '\n - '.join(env['SKIPPED_DEPS']))
|
env['PYTHON_SITE_PACKAGES'] = env['DESTDIR'] + '/' + env['PYTHON_SYS_PREFIX'] + '/' + env['LIBDIR_SCHEMA'] + '/python' + env['PYTHON_VERSION'] + '/site-packages/'
|
||||||
print
|
|
||||||
|
# if user-requested custom prefix fall back to manual concatenation for building subdirectories
|
||||||
# fetch the mapnik version header in order to set the
|
if env['PYTHON_PREFIX']:
|
||||||
# ABI version used to build libmapnik.so on linux in src/SConscript
|
py_relative_install = env['LIBDIR_SCHEMA'] + '/python' + env['PYTHON_VERSION'] + '/site-packages/'
|
||||||
abi = conf.GetMapnikLibVersion()
|
env['PYTHON_INSTALL_LOCATION'] = env['DESTDIR'] + '/' + env['PYTHON_PREFIX'] + '/' + py_relative_install
|
||||||
abi_fallback = [0,6,0]
|
|
||||||
if not abi:
|
|
||||||
color_print(1,'Problem encountered parsing mapnik version, falling back to %s' % abi_fallback)
|
|
||||||
env['ABI_VERSION'] = abi_fallback
|
|
||||||
else:
|
else:
|
||||||
env['ABI_VERSION'] = abi
|
env['PYTHON_INSTALL_LOCATION'] = env['DESTDIR'] + '/' + env['PYTHON_SITE_PACKAGES']
|
||||||
|
|
||||||
|
majver, minver = env['PYTHON_VERSION'].split('.')
|
||||||
|
|
||||||
|
if (int(majver), int(minver)) < (2, 2):
|
||||||
|
color_print(1,"Python version 2.2 or greater required")
|
||||||
|
Exit(1)
|
||||||
|
|
||||||
|
color_print(4,'Bindings Python version... %s' % env['PYTHON_VERSION'])
|
||||||
|
color_print(4,'Python %s prefix... %s' % (env['PYTHON_VERSION'], env['PYTHON_SYS_PREFIX']))
|
||||||
|
color_print(4,'Python bindings will install in... %s' % os.path.normpath(env['PYTHON_INSTALL_LOCATION']))
|
||||||
|
|
||||||
# Common C++ flags.
|
# finish config stage and pickle results
|
||||||
if env['THREADING'] == 'multi' :
|
env = conf.Finish()
|
||||||
common_cxx_flags = '-D%s -DBOOST_SPIRIT_THREADSAFE -DMAPNIK_THREADSAFE ' % env['PLATFORM'].upper()
|
env_cache = open(SCONS_CONFIGURE_CACHE, 'w')
|
||||||
else :
|
pickle_dict = {}
|
||||||
common_cxx_flags = '-D%s ' % env['PLATFORM'].upper()
|
for i in pickle_store:
|
||||||
|
pickle_dict[i] = env.get(i)
|
||||||
# Mac OSX (Darwin) special settings
|
pickle.dump(pickle_dict,env_cache)
|
||||||
if env['PLATFORM'] == 'Darwin':
|
env_cache.close()
|
||||||
pthread = ''
|
# fix up permissions on pickled options
|
||||||
# Getting the macintosh version number, sticking as a compiler macro
|
try:
|
||||||
# for Leopard -- needed because different workarounds are needed than
|
os.chmod(SCONS_CONFIGURE_CACHE,0666)
|
||||||
# for Tiger.
|
except: pass
|
||||||
if platform.mac_ver()[0].startswith('10.5'):
|
# clean up test build targets
|
||||||
common_cxx_flags += '-DOSX_LEOPARD '
|
if not env['FAST']:
|
||||||
else:
|
|
||||||
pthread = '-pthread'
|
|
||||||
|
|
||||||
# Common debugging flags.
|
|
||||||
debug_flags = '-g -DDEBUG -DMAPNIK_DEBUG'
|
|
||||||
ndebug_flags = '-DNDEBUG'
|
|
||||||
|
|
||||||
# Customizing the C++ compiler flags depending on:
|
|
||||||
# (1) the C++ compiler used; and
|
|
||||||
# (2) whether debug binaries are requested.
|
|
||||||
if env['SUNCC']:
|
|
||||||
if env['DEBUG']:
|
|
||||||
env.Append(CXXFLAGS = common_cxx_flags + debug_flags)
|
|
||||||
else:
|
|
||||||
env.Append(CXXFLAGS = common_cxx_flags + '-O %s' % ndebug_flags)
|
|
||||||
else:
|
|
||||||
# Common flags for GCC.
|
|
||||||
gcc_cxx_flags = '-ansi -Wall %s -ftemplate-depth-100 %s' % (pthread, common_cxx_flags)
|
|
||||||
|
|
||||||
if env['DEBUG']:
|
|
||||||
env.Append(CXXFLAGS = gcc_cxx_flags + '-O0 -fno-inline %s' % debug_flags)
|
|
||||||
else:
|
|
||||||
env.Append(CXXFLAGS = gcc_cxx_flags + '-O%s -finline-functions -Wno-inline %s' % (env['OPTIMIZATION'],ndebug_flags))
|
|
||||||
|
|
||||||
if 'python' in env['BINDINGS']:
|
|
||||||
if not os.access(env['PYTHON'], os.X_OK):
|
|
||||||
color_print(1,"Cannot run python interpreter at '%s', make sure that you have the permissions to execute it." % env['PYTHON'])
|
|
||||||
Exit(1)
|
|
||||||
|
|
||||||
sys_prefix = "%s -c 'import sys; print sys.prefix'" % env['PYTHON']
|
|
||||||
env['PYTHON_SYS_PREFIX'] = call(sys_prefix)
|
|
||||||
|
|
||||||
if HAS_DISTUTILS:
|
|
||||||
sys_version = "%s -c 'from distutils.sysconfig import get_python_version; print get_python_version()'" % env['PYTHON']
|
|
||||||
env['PYTHON_VERSION'] = call(sys_version)
|
|
||||||
py_includes = "%s -c 'from distutils.sysconfig import get_python_inc; print get_python_inc()'" % env['PYTHON']
|
|
||||||
env['PYTHON_INCLUDES'] = call(py_includes)
|
|
||||||
# Note: we use the plat_specific argument here to make sure to respect the arch-specific site-packages location
|
|
||||||
site_packages = "%s -c 'from distutils.sysconfig import get_python_lib; print get_python_lib(plat_specific=True)'" % env['PYTHON']
|
|
||||||
env['PYTHON_SITE_PACKAGES'] = call(site_packages)
|
|
||||||
else:
|
|
||||||
env['PYTHON_SYS_PREFIX'] = os.popen("%s -c 'import sys; print sys.prefix'" % env['PYTHON']).read().strip()
|
|
||||||
env['PYTHON_VERSION'] = os.popen("%s -c 'import sys; print sys.version'" % env['PYTHON']).read()[0:3]
|
|
||||||
env['PYTHON_INCLUDES'] = env['PYTHON_SYS_PREFIX'] + '/include/python' + env['PYTHON_VERSION']
|
|
||||||
env['PYTHON_SITE_PACKAGES'] = env['DESTDIR'] + '/' + env['PYTHON_SYS_PREFIX'] + '/' + env['LIBDIR_SCHEMA'] + '/python' + env['PYTHON_VERSION'] + '/site-packages/'
|
|
||||||
|
|
||||||
# if user-requested custom prefix fall back to manual concatenation for building subdirectories
|
|
||||||
if env['PYTHON_PREFIX']:
|
|
||||||
py_relative_install = env['LIBDIR_SCHEMA'] + '/python' + env['PYTHON_VERSION'] + '/site-packages/'
|
|
||||||
env['PYTHON_INSTALL_LOCATION'] = env['DESTDIR'] + '/' + env['PYTHON_PREFIX'] + '/' + py_relative_install
|
|
||||||
else:
|
|
||||||
env['PYTHON_INSTALL_LOCATION'] = env['DESTDIR'] + '/' + env['PYTHON_SITE_PACKAGES']
|
|
||||||
|
|
||||||
majver, minver = env['PYTHON_VERSION'].split('.')
|
|
||||||
|
|
||||||
if (int(majver), int(minver)) < (2, 2):
|
|
||||||
color_print(1,"Python version 2.2 or greater required")
|
|
||||||
Exit(1)
|
|
||||||
|
|
||||||
color_print(4,'Bindings Python version... %s' % env['PYTHON_VERSION'])
|
|
||||||
color_print(4,'Python %s prefix... %s' % (env['PYTHON_VERSION'], env['PYTHON_SYS_PREFIX']))
|
|
||||||
color_print(4,'Python bindings will install in... %s' % os.path.normpath(env['PYTHON_INSTALL_LOCATION']))
|
|
||||||
|
|
||||||
# finish config stage and pickle results
|
|
||||||
env = conf.Finish()
|
|
||||||
env_cache = open(SCONS_CONFIGURE_CACHE, 'w')
|
|
||||||
pickle_dict = {}
|
|
||||||
for i in pickle_store:
|
|
||||||
pickle_dict[i] = env.get(i)
|
|
||||||
pickle.dump(pickle_dict,env_cache)
|
|
||||||
env_cache.close()
|
|
||||||
# fix up permissions on pickled options
|
|
||||||
try:
|
try:
|
||||||
os.chmod(SCONS_CONFIGURE_CACHE,0666)
|
for test in glob('%s/*' % SCONF_TEMP_DIR):
|
||||||
|
os.unlink(test)
|
||||||
except: pass
|
except: pass
|
||||||
# clean up test build targets
|
if 'configure' in command_line_args:
|
||||||
if not env['FAST']:
|
color_print(4,'\n*Configure complete*\nNow run "python scons/scons.py" to build or "python scons/scons.py install" to install')
|
||||||
try:
|
Exit(0)
|
||||||
for test in glob('%s/*' % SCONF_TEMP_DIR):
|
|
||||||
os.unlink(test)
|
|
||||||
except: pass
|
|
||||||
if 'configure' in command_line_args:
|
|
||||||
color_print(4,'\n*Configure complete*\nNow run "python scons/scons.py" to build or "python scons/scons.py install" to install')
|
|
||||||
Exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
#### Builds ####
|
#### Builds ####
|
||||||
|
|
Loading…
Reference in a new issue