Upgrade SCons to latest production release (v4.7.0) [skip ci]

This commit is contained in:
Artem Pavlenko 2024-04-15 14:35:35 +01:00
parent 5cc57566cc
commit d6fc3b1ddc
1541 changed files with 5263 additions and 3864 deletions

2
scons/scons-LICENSE vendored
View file

@ -5,7 +5,7 @@
MIT License MIT License
Copyright (c) 2001 - 2023 The SCons Foundation Copyright (c) 2001 - 2024 The SCons Foundation
Permission is hereby granted, free of charge, to any person obtaining Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the a copy of this software and associated documentation files (the

27
scons/scons-README vendored
View file

@ -43,11 +43,9 @@ scons-local package, or any SCons package, at the SCons download page:
EXECUTION REQUIREMENTS EXECUTION REQUIREMENTS
====================== ======================
Running SCons requires Python 3.5 or higher. Running SCons requires Python 3.6 or higher. There should be no other
There should be no other dependencies or requirements to run SCons. dependencies or requirements to run standard SCons.
The last release to support Python 3.5 was 4.2.0.
As of SCons 4.2.0 support for Python 3.5 is deprecated and will be removed
with the next major release.
The default SCons configuration assumes use of the Microsoft Visual C++ The default SCons configuration assumes use of the Microsoft Visual C++
compiler suite on WIN32 systems (either through the Visual Studio compiler suite on WIN32 systems (either through the Visual Studio
@ -84,6 +82,11 @@ Or (if, for example, you installed this package in a subdirectory named
That should be all you have to do. (If it isn't that simple, please let That should be all you have to do. (If it isn't that simple, please let
us know!) us know!)
Since 4.5, there is also an alternate form of scons-local avaialble:
a zipapp. This is a single file with a .pyz suffix, which can be
downloaded and executed directly (e.g.: python scons-local-4.5.2.pyz)
without unpacking. This may be more convenient in a few cases.
CONTENTS OF THIS PACKAGE CONTENTS OF THIS PACKAGE
======================== ========================
@ -191,8 +194,9 @@ You may subscribe to the scons-users mailing list at:
http://two.pairlist.net/mailman/listinfo/scons-users http://two.pairlist.net/mailman/listinfo/scons-users
An active mailing list for developers of SCons is available. You may In addition to the scons-users list which is appropriate for almost any
send questions or comments to the list at: question, there is a mailing list specifically for developers of SCons
You may send questions or comments to the list at:
scons-dev@scons.org scons-dev@scons.org
@ -204,10 +208,11 @@ Subscription to the developer's mailing list is by approval. In practice, no
one is refused list membership, but we reserve the right to limit membership one is refused list membership, but we reserve the right to limit membership
in the future and/or weed out lurkers. in the future and/or weed out lurkers.
There are other mailing lists available for SCons users, for notification of Note that while this list still exists, the number of different places you
SCons code changes, and for notification of updated bug reports and project can talk about SCons means it is no longer very active. GitHub has
documents. Please see our mailing lists page for details. support for discussions as well as for issues, and there is usually more
immediacy on the Discord chat, so these are probably now considered the
preferred places for "development" topics.
FOR MORE INFORMATION FOR MORE INFORMATION

View file

@ -32,15 +32,15 @@ The files are split into directories named by the first few
digits of the signature. The prefix length used for directory digits of the signature. The prefix length used for directory
names can be changed by this script. names can be changed by this script.
""" """
__revision__ = "scripts/scons-configure-cache.py 120fd4f633e9ef3cafbc0fec35306d7555ffd1db Tue, 21 Mar 2023 12:11:27 -0400 bdbaddog" __revision__ = "scripts/scons-configure-cache.py 265be6883fadbb5a545612265acc919595158366 Sun, 17 Mar 2024 17:33:54 -0700 bdbaddog"
__version__ = "4.5.2" __version__ = "4.7.0"
__build__ = "120fd4f633e9ef3cafbc0fec35306d7555ffd1db" __build__ = "265be6883fadbb5a545612265acc919595158366"
__buildsys__ = "M1DOG2021" __buildsys__ = "M1Dog2021"
__date__ = "Tue, 21 Mar 2023 12:11:27 -0400" __date__ = "Sun, 17 Mar 2024 17:33:54 -0700"
__developer__ = "bdbaddog" __developer__ = "bdbaddog"

View file

@ -1,746 +0,0 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Serial and Parallel classes to execute build tasks.
The Jobs class provides a higher level interface to start,
stop, and wait on jobs.
"""
import SCons.compat
import logging
import os
import signal
import sys
import threading
from enum import Enum
import SCons.Errors
import SCons.Warnings
# The default stack size (in kilobytes) of the threads used to execute
# jobs in parallel.
#
# We use a stack size of 256 kilobytes. The default on some platforms
# is too large and prevents us from creating enough threads to fully
# parallelized the build. For example, the default stack size on linux
# is 8 MBytes.
explicit_stack_size = None
default_stack_size = 256
interrupt_msg = 'Build interrupted.'
class InterruptState:
def __init__(self):
self.interrupted = False
def set(self):
self.interrupted = True
def __call__(self):
return self.interrupted
class Jobs:
"""An instance of this class initializes N jobs, and provides
methods for starting, stopping, and waiting on all N jobs.
"""
def __init__(self, num, taskmaster):
"""
Create 'num' jobs using the given taskmaster.
If 'num' is 1 or less, then a serial job will be used,
otherwise a parallel job with 'num' worker threads will
be used.
The 'num_jobs' attribute will be set to the actual number of jobs
allocated. If more than one job is requested but the Parallel
class can't do it, it gets reset to 1. Wrapping interfaces that
care should check the value of 'num_jobs' after initialization.
"""
# Importing GetOption here instead of at top of file to avoid
# circular imports
# pylint: disable=import-outside-toplevel
from SCons.Script import GetOption
self.job = None
if num > 1:
stack_size = explicit_stack_size
if stack_size is None:
stack_size = default_stack_size
try:
experimental_option = GetOption('experimental')
if 'tm_v2' in experimental_option:
self.job = NewParallel(taskmaster, num, stack_size)
else:
self.job = LegacyParallel(taskmaster, num, stack_size)
self.num_jobs = num
except NameError:
pass
if self.job is None:
self.job = Serial(taskmaster)
self.num_jobs = 1
def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
def were_interrupted(self):
"""Returns whether the jobs were interrupted by a signal."""
return self.job.interrupted()
def _setup_sig_handler(self):
"""Setup an interrupt handler so that SCons can shutdown cleanly in
various conditions:
a) SIGINT: Keyboard interrupt
b) SIGTERM: kill or system shutdown
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
Note also that we have to be careful to handle the case when
SCons forks before executing another process. In that case, we
want the child to exit immediately.
"""
def handler(signum, stack, self=self, parentpid=os.getpid()):
if os.getpid() == parentpid:
self.job.taskmaster.stop()
self.job.interrupted.set()
else:
os._exit(2) # pylint: disable=protected-access
self.old_sigint = signal.signal(signal.SIGINT, handler)
self.old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
self.old_sighup = signal.signal(signal.SIGHUP, handler)
except AttributeError:
pass
if (self.old_sigint is None) or (self.old_sigterm is None) or \
(hasattr(self, "old_sighup") and self.old_sighup is None):
msg = "Overwritting previous signal handler which was not installed from Python. " + \
"Will not be able to reinstate and so will return to default handler."
SCons.Warnings.warn(SCons.Warnings.SConsWarning, msg)
def _reset_sig_handler(self):
"""Restore the signal handlers to their previous state (before the
call to _setup_sig_handler()."""
sigint_to_use = self.old_sigint if self.old_sigint is not None else signal.SIG_DFL
sigterm_to_use = self.old_sigterm if self.old_sigterm is not None else signal.SIG_DFL
signal.signal(signal.SIGINT, sigint_to_use)
signal.signal(signal.SIGTERM, sigterm_to_use)
try:
sigterm_to_use = self.old_sighup if self.old_sighup is not None else signal.SIG_DFL
signal.signal(signal.SIGHUP, sigterm_to_use)
except AttributeError:
pass
class Serial:
"""This class is used to execute tasks in series, and is more efficient
than Parallel, but is only appropriate for non-parallel builds. Only
one instance of this class should be in existence at a time.
This class is not thread safe.
"""
def __init__(self, taskmaster):
"""Create a new serial job given a taskmaster.
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
self.interrupted = InterruptState()
def start(self):
"""Start the job. This will begin pulling tasks from the taskmaster
and executing them, and return when there are no more tasks. If a task
fails to execute (i.e. execute() raises an exception), then the job will
stop."""
while True:
task = self.taskmaster.next_task()
if task is None:
break
try:
task.prepare()
if task.needs_execute():
task.execute()
except Exception:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
else:
task.exception_set()
# Let the failed() callback function arrange for the
# build to stop if that's appropriate.
task.failed()
else:
task.executed()
task.postprocess()
self.taskmaster.cleanup()
# Trap import failure so that everything in the Job module but the
# Parallel class (and its dependent classes) will work if the interpreter
# doesn't support threads.
try:
import queue
import threading
except ImportError:
pass
else:
class Worker(threading.Thread):
"""A worker thread waits on a task to be posted to its request queue,
dequeues the task, executes it, and posts a tuple including the task
and a boolean indicating whether the task executed successfully. """
def __init__(self, requestQueue, resultsQueue, interrupted):
super().__init__()
self.daemon = True
self.requestQueue = requestQueue
self.resultsQueue = resultsQueue
self.interrupted = interrupted
self.start()
def run(self):
while True:
task = self.requestQueue.get()
if task is None:
# The "None" value is used as a sentinel by
# ThreadPool.cleanup(). This indicates that there
# are no more tasks, so we should quit.
break
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
task.exception_set()
ok = False
else:
ok = True
self.resultsQueue.put((task, ok))
class ThreadPool:
"""This class is responsible for spawning and managing worker threads."""
def __init__(self, num, stack_size, interrupted):
"""Create the request and reply queues, and 'num' worker threads.
One must specify the stack size of the worker threads. The
stack size is specified in kilobytes.
"""
self.requestQueue = queue.Queue(0)
self.resultsQueue = queue.Queue(0)
try:
prev_size = threading.stack_size(stack_size * 1024)
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
# Create worker threads
self.workers = []
for _ in range(num):
worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
self.workers.append(worker)
if 'prev_size' in locals():
threading.stack_size(prev_size)
def put(self, task):
"""Put task into request queue."""
self.requestQueue.put(task)
def get(self):
"""Remove and return a result tuple from the results queue."""
return self.resultsQueue.get()
def preparation_failed(self, task):
self.resultsQueue.put((task, False))
def cleanup(self):
"""
Shuts down the thread pool, giving each worker thread a
chance to shut down gracefully.
"""
# For each worker thread, put a sentinel "None" value
# on the requestQueue (indicating that there's no work
# to be done) so that each worker thread will get one and
# terminate gracefully.
for _ in self.workers:
self.requestQueue.put(None)
# Wait for all of the workers to terminate.
#
# If we don't do this, later Python versions (2.4, 2.5) often
# seem to raise exceptions during shutdown. This happens
# in requestQueue.get(), as an assertion failure that
# requestQueue.not_full is notified while not acquired,
# seemingly because the main thread has shut down (or is
# in the process of doing so) while the workers are still
# trying to pull sentinels off the requestQueue.
#
# Normally these terminations should happen fairly quickly,
# but we'll stick a one-second timeout on here just in case
# someone gets hung.
for worker in self.workers:
worker.join(1.0)
self.workers = []
class LegacyParallel:
"""This class is used to execute tasks in parallel, and is somewhat
less efficient than Serial, but is appropriate for parallel builds.
This class is thread safe.
"""
def __init__(self, taskmaster, num, stack_size):
"""Create a new parallel job given a taskmaster.
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).
Note: calls to taskmaster are serialized, but calls to
execute() on distinct tasks are not serialized, because
that is the whole point of parallel jobs: they can execute
multiple tasks simultaneously. """
self.taskmaster = taskmaster
self.interrupted = InterruptState()
self.tp = ThreadPool(num, stack_size, self.interrupted)
self.maxjobs = num
def start(self):
"""Start the job. This will begin pulling tasks from the
taskmaster and executing them, and return when there are no
more tasks. If a task fails to execute (i.e. execute() raises
an exception), then the job will stop."""
jobs = 0
while True:
# Start up as many available tasks as we're
# allowed to.
while jobs < self.maxjobs:
task = self.taskmaster.next_task()
if task is None:
break
try:
# prepare task for execution
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if task.needs_execute():
# dispatch task
self.tp.put(task)
jobs += 1
else:
task.executed()
task.postprocess()
if not task and not jobs:
break
# Let any/all completed tasks finish up before we go
# back and put the next batch of tasks on the queue.
while True:
task, ok = self.tp.get()
jobs -= 1
if ok:
task.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
task.failed()
task.postprocess()
if self.tp.resultsQueue.empty():
break
self.tp.cleanup()
self.taskmaster.cleanup()
# An experimental new parallel scheduler that uses a leaders/followers pattern.
class NewParallel:
class State(Enum):
READY = 0
SEARCHING = 1
STALLED = 2
COMPLETED = 3
class Worker(threading.Thread):
def __init__(self, owner):
super().__init__()
self.daemon = True
self.owner = owner
self.start()
def run(self):
self.owner._work()
def __init__(self, taskmaster, num, stack_size):
self.taskmaster = taskmaster
self.num_workers = num
self.stack_size = stack_size
self.interrupted = InterruptState()
self.workers = []
# The `tm_lock` is what ensures that we only have one
# thread interacting with the taskmaster at a time. It
# also protects access to our state that gets updated
# concurrently. The `can_search_cv` is associated with
# this mutex.
self.tm_lock = threading.Lock()
# Guarded under `tm_lock`.
self.jobs = 0
self.state = NewParallel.State.READY
# The `can_search_cv` is used to manage a leader /
# follower pattern for access to the taskmaster, and to
# awaken from stalls.
self.can_search_cv = threading.Condition(self.tm_lock)
# The queue of tasks that have completed execution. The
# next thread to obtain `tm_lock`` will retire them.
self.results_queue_lock = threading.Lock()
self.results_queue = []
if self.taskmaster.trace:
self.trace = self._setup_logging()
else:
self.trace = False
def _setup_logging(self):
jl = logging.getLogger("Job")
jl.setLevel(level=logging.DEBUG)
jl.addHandler(self.taskmaster.trace.log_handler)
return jl
def trace_message(self, message):
# This grabs the name of the function which calls trace_message()
method_name = sys._getframe(1).f_code.co_name + "():"
thread_id=threading.get_ident()
self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
# print('%-15s %s' % (method_name, message))
def start(self):
self._start_workers()
for worker in self.workers:
worker.join()
self.workers = []
self.taskmaster.cleanup()
def _start_workers(self):
prev_size = self._adjust_stack_size()
for _ in range(self.num_workers):
self.workers.append(NewParallel.Worker(self))
self._restore_stack_size(prev_size)
def _adjust_stack_size(self):
try:
prev_size = threading.stack_size(self.stack_size * 1024)
return prev_size
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
return None
def _restore_stack_size(self, prev_size):
if prev_size is not None:
threading.stack_size(prev_size)
def _work(self):
task = None
while True:
# Obtain `tm_lock`, granting exclusive access to the taskmaster.
with self.can_search_cv:
if self.trace:
self.trace_message("Gained exclusive access")
# Capture whether we got here with `task` set,
# then drop our reference to the task as we are no
# longer interested in the actual object.
completed_task = (task is not None)
task = None
# We will only have `completed_task` set here if
# we have looped back after executing a task. If
# we have completed a task and find that we are
# stalled, we should speculatively indicate that
# we are no longer stalled by transitioning to the
# 'ready' state which will bypass the condition
# wait so that we immediately process the results
# queue and hopefully light up new
# work. Otherwise, stay stalled, and we will wait
# in the condvar. Some other thread will come back
# here with a completed task.
if self.state == NewParallel.State.STALLED and completed_task:
if self.trace:
self.trace_message("Detected stall with completed task, bypassing wait")
self.state = NewParallel.State.READY
# Wait until we are neither searching nor stalled.
while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED:
if self.trace:
self.trace_message("Search already in progress, waiting")
self.can_search_cv.wait()
# If someone set the completed flag, bail.
if self.state == NewParallel.State.COMPLETED:
if self.trace:
self.trace_message("Completion detected, breaking from main loop")
break
# Set the searching flag to indicate that a thread
# is currently in the critical section for
# taskmaster work.
#
if self.trace:
self.trace_message("Starting search")
self.state = NewParallel.State.SEARCHING
# Bulk acquire the tasks in the results queue
# under the result queue lock, then process them
# all outside that lock. We need to process the
# tasks in the results queue before looking for
# new work because we might be unable to find new
# work if we don't.
results_queue = []
with self.results_queue_lock:
results_queue, self.results_queue = self.results_queue, results_queue
if self.trace:
self.trace_message("Found {len(results_queue)} completed tasks to process")
for (rtask, rresult) in results_queue:
if rresult:
rtask.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
rtask.targets[0], errstr=interrupt_msg)
except Exception:
rtask.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
rtask.failed()
rtask.postprocess()
self.jobs -= 1
# We are done with any task objects that were in
# the results queue.
results_queue.clear()
# Now, turn the crank on the taskmaster until we
# either run out of tasks, or find a task that
# needs execution. If we run out of tasks, go idle
# until results arrive if jobs are pending, or
# mark the walk as complete if not.
while self.state == NewParallel.State.SEARCHING:
if self.trace:
self.trace_message("Searching for new tasks")
task = self.taskmaster.next_task()
if task:
# We found a task. Walk it through the
# task lifecycle. If it does not need
# execution, just complete the task and
# look for the next one. Otherwise,
# indicate that we are no longer searching
# so we can drop out of this loop, execute
# the task outside the lock, and allow
# another thread in to search.
try:
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if not task.needs_execute():
if self.trace:
self.trace_message("Found internal task")
task.executed()
task.postprocess()
else:
self.jobs += 1
if self.trace:
self.trace_message("Found task requiring execution")
self.state = NewParallel.State.READY
self.can_search_cv.notify()
else:
# We failed to find a task, so this thread
# cannot continue turning the taskmaster
# crank. We must exit the loop.
if self.jobs:
# No task was found, but there are
# outstanding jobs executing that
# might unblock new tasks when they
# complete. Transition to the stalled
# state. We do not need a notify,
# because we know there are threads
# outstanding that will re-enter the
# loop.
#
if self.trace:
self.trace_message("Found no task requiring execution, but have jobs: marking stalled")
self.state = NewParallel.State.STALLED
else:
# We didn't find a task and there are
# no jobs outstanding, so there is
# nothing that will ever return
# results which might unblock new
# tasks. We can conclude that the walk
# is complete. Update our state to
# note completion and awaken anyone
# sleeping on the condvar.
#
if self.trace:
self.trace_message("Found no task requiring execution, and have no jobs: marking complete")
self.state = NewParallel.State.COMPLETED
self.can_search_cv.notify_all()
# We no longer hold `tm_lock` here. If we have a task,
# we can now execute it. If there are threads waiting
# to search, one of them can now begin turning the
# taskmaster crank in NewParallel.
if task:
if self.trace:
self.trace_message("Executing task")
ok = True
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
ok = False
task.exception_set()
# Grab the results queue lock and enqueue the
# executed task and state. The next thread into
# the searching loop will complete the
# postprocessing work under the taskmaster lock.
#
if self.trace:
self.trace_message("Enqueueing executed task results")
with self.results_queue_lock:
self.results_queue.append((task, ok))
# Tricky state "fallthrough" here. We are going back
# to the top of the loop, which behaves differently
# depending on whether `task` is set. Do not perturb
# the value of the `task` variable if you add new code
# after this comment.
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View file

@ -1,55 +0,0 @@
<?xml version='1.0'?>
<!--
Copyright (c) 2001-2010 The SCons Foundation
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-->
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:fo="http://www.w3.org/1999/XSL/Format"
version="1.0">
<xsl:import href="file:///usr/share/xml/docbook/stylesheet/docbook-xsl/html/docbook.xsl"/>
<xsl:param name="l10n.gentext.default.language" select="'en'"/>
<xsl:param name="section.autolabel" select="1"/>
<xsl:param name="html.stylesheet" select="'scons.css'"/>
<xsl:param name="generate.toc">
/appendix toc,title
article/appendix nop
/article toc,title
book toc,title,figure,table,example,equation
/chapter toc,title
part toc,title
/preface toc,title
reference toc,title
/sect1 toc
/sect2 toc
/sect3 toc
/sect4 toc
/sect5 toc
/section toc
set toc,title
</xsl:param>
</xsl:stylesheet>

View file

@ -1,62 +0,0 @@
<?xml version='1.0'?>
<!--
Copyright (c) 2001-2010 The SCons Foundation
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-->
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:fo="http://www.w3.org/1999/XSL/Format"
version="1.0">
<xsl:import href="file:///usr/share/xml/docbook/stylesheet/docbook-xsl/fo/docbook.xsl"/>
<xsl:param name="l10n.gentext.default.language" select="'en'"/>
<xsl:param name="section.autolabel" select="1"></xsl:param>
<xsl:param name="toc.indent.width" select="0"></xsl:param>
<xsl:param name="body.start.indent">0pt</xsl:param>
<xsl:param name="shade.verbatim" select="1"></xsl:param>
<xsl:param name="generate.toc">
/appendix toc,title
article/appendix nop
/article toc,title
book toc,title,figure,table,example,equation
/chapter toc,title
part toc,title
/preface toc,title
reference toc,title
/sect1 toc
/sect2 toc
/sect3 toc
/sect4 toc
/sect5 toc
/section toc
set toc,title
</xsl:param>
<xsl:template match="varlistentry/term">
<xsl:call-template name="inline.boldseq"/>
</xsl:template>
</xsl:stylesheet>

View file

@ -1,241 +0,0 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""The SCons warnings framework."""
import sys
import SCons.Errors
class SConsWarning(SCons.Errors.UserError):
pass
class WarningOnByDefault(SConsWarning):
pass
# NOTE: If you add a new warning class, add it to the man page, too!
# Not all warnings are defined here, some are defined in the location of use
class TargetNotBuiltWarning(SConsWarning): # Should go to OnByDefault
pass
class CacheVersionWarning(WarningOnByDefault):
pass
class CacheWriteErrorWarning(SConsWarning):
pass
class CacheCleanupErrorWarning(SConsWarning):
pass
class CorruptSConsignWarning(WarningOnByDefault):
pass
class DependencyWarning(SConsWarning):
pass
class DevelopmentVersionWarning(WarningOnByDefault):
pass
class DuplicateEnvironmentWarning(WarningOnByDefault):
pass
class FutureReservedVariableWarning(WarningOnByDefault):
pass
class LinkWarning(WarningOnByDefault):
pass
class MisleadingKeywordsWarning(WarningOnByDefault):
pass
class MissingSConscriptWarning(WarningOnByDefault):
pass
class NoObjectCountWarning(WarningOnByDefault):
pass
class NoParallelSupportWarning(WarningOnByDefault):
pass
class ReservedVariableWarning(WarningOnByDefault):
pass
class StackSizeWarning(WarningOnByDefault):
pass
class VisualCMissingWarning(WarningOnByDefault):
pass
# Used when MSVC_VERSION and MSVS_VERSION do not point to the
# same version (MSVS_VERSION is deprecated)
class VisualVersionMismatch(WarningOnByDefault):
pass
class VisualStudioMissingWarning(SConsWarning):
pass
class FortranCxxMixWarning(LinkWarning):
pass
# Deprecation warnings
class FutureDeprecatedWarning(SConsWarning):
pass
class DeprecatedWarning(SConsWarning):
pass
class MandatoryDeprecatedWarning(DeprecatedWarning):
pass
# Special case; base always stays DeprecatedWarning
class PythonVersionWarning(DeprecatedWarning):
pass
class DeprecatedSourceCodeWarning(FutureDeprecatedWarning):
pass
class TaskmasterNeedsExecuteWarning(DeprecatedWarning):
pass
class DeprecatedOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedDebugOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedMissingSConscriptWarning(DeprecatedWarning):
pass
class ToolQtDeprecatedWarning(DeprecatedWarning):
pass
# The below is a list of 2-tuples. The first element is a class object.
# The second element is true if that class is enabled, false if it is disabled.
_enabled = []
# If set, raise the warning as an exception
_warningAsException = False
# If not None, a function to call with the warning
_warningOut = None
def suppressWarningClass(clazz):
"""Suppresses all warnings of type clazz or derived from clazz."""
_enabled.insert(0, (clazz, False))
def enableWarningClass(clazz):
"""Enables all warnings of type clazz or derived from clazz."""
_enabled.insert(0, (clazz, True))
def warningAsException(flag=True):
"""Set global _warningAsExeption flag.
Args:
flag: value to set warnings-as-exceptions to [default: True]
Returns:
The previous value.
"""
global _warningAsException
old = _warningAsException
_warningAsException = flag
return old
def warn(clazz, *args):
"""Issue a warning, accounting for SCons rules.
Check if warnings for this class are enabled.
If warnings are treated as exceptions, raise exception.
Use the global warning-emitter _warningOut, which allows selecting
different ways of presenting a traceback (see Script/Main.py)
"""
warning = clazz(args)
for cls, flag in _enabled:
if isinstance(warning, cls):
if flag:
if _warningAsException:
raise warning
if _warningOut:
_warningOut(warning)
break
def process_warn_strings(arguments):
"""Process requests to enable/disable warnings.
The requests are strings passed to the --warn option or the
SetOption('warn') function.
An argument to this option should be of the form "warning-class"
or "no-warning-class". The warning class is munged and has
the suffix "Warning" added in order to get an actual class name
from the classes above, which we need to pass to the
{enable,disable}WarningClass() functions.
For example, "deprecated" will enable the DeprecatedWarning class.
"no-dependency" will disable the DependencyWarning class.
As a special case, --warn=all and --warn=no-all will enable or
disable (respectively) the base class of all SCons warnings.
"""
def _classmunge(s):
"""Convert a warning argument to SConsCase.
The result is CamelCase, except "Scons" is changed to "SCons"
"""
s = s.replace("-", " ").title().replace(" ", "")
return s.replace("Scons", "SCons")
for arg in arguments:
enable = True
if arg.startswith("no-"):
enable = False
arg = arg[len("no-") :]
if arg == 'all':
class_name = "SConsWarning"
else:
class_name = _classmunge(arg) + 'Warning'
try:
clazz = globals()[class_name]
except KeyError:
sys.stderr.write("No warning type: '%s'\n" % arg)
else:
if enable:
enableWarningClass(clazz)
elif issubclass(clazz, MandatoryDeprecatedWarning):
fmt = "Can not disable mandataory warning: '%s'\n"
sys.stderr.write(fmt % arg)
else:
suppressWarningClass(clazz)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View file

@ -1,9 +0,0 @@
__version__="4.5.2"
__copyright__="Copyright (c) 2001 - 2023 The SCons Foundation"
__developer__="bdbaddog"
__date__="Tue, 21 Mar 2023 12:11:27 -0400"
__buildsys__="M1DOG2021"
__revision__="120fd4f633e9ef3cafbc0fec35306d7555ffd1db"
__build__="120fd4f633e9ef3cafbc0fec35306d7555ffd1db"
# make sure compatibility is always in place
import SCons.compat # noqa

View file

@ -100,24 +100,27 @@ way for wrapping up the functions.
""" """
import inspect
import os import os
import pickle import pickle
import re import re
import sys
import subprocess import subprocess
from subprocess import DEVNULL import sys
import inspect from abc import ABC, abstractmethod
from collections import OrderedDict from collections import OrderedDict
from subprocess import DEVNULL, PIPE
from typing import List, Optional, Tuple
import SCons.Debug import SCons.Debug
import SCons.Util
from SCons.Debug import logInstanceCreation
import SCons.Errors import SCons.Errors
import SCons.Util
import SCons.Subst import SCons.Subst
import SCons.Util
# we use these a lot, so try to optimize them # we use these a lot, so try to optimize them
from SCons.Debug import logInstanceCreation
from SCons.Subst import SUBST_CMD, SUBST_RAW, SUBST_SIG
from SCons.Util import is_String, is_List from SCons.Util import is_String, is_List
from SCons.Util.sctyping import ExecutorType
class _null: class _null:
pass pass
@ -126,13 +129,10 @@ print_actions = True
execute_actions = True execute_actions = True
print_actions_presub = False print_actions_presub = False
# Use pickle protocol 1 when pickling functions for signature # Use pickle protocol 4 when pickling functions for signature.
# otherwise python3 and python2 will yield different pickles # This is the common format since Python 3.4
# for the same object. # TODO: use is commented out as not stable since 2017: e0bc3a04d5. Drop?
# This is due to default being 1 for python 2.7, and 3 for 3.x # ACTION_SIGNATURE_PICKLE_PROTOCOL = 4
# TODO: We can roll this forward to 2 (if it has value), but not
# before a deprecation cycle as the sconsigns will change
ACTION_SIGNATURE_PICKLE_PROTOCOL = 1
def rfile(n): def rfile(n):
@ -148,9 +148,8 @@ def default_exitstatfunc(s):
strip_quotes = re.compile(r'^[\'"](.*)[\'"]$') strip_quotes = re.compile(r'^[\'"](.*)[\'"]$')
def _callable_contents(obj): def _callable_contents(obj) -> bytearray:
"""Return the signature contents of a callable Python object. """Return the signature contents of a callable Python object."""
"""
try: try:
# Test if obj is a method. # Test if obj is a method.
return _function_contents(obj.__func__) return _function_contents(obj.__func__)
@ -170,7 +169,7 @@ def _callable_contents(obj):
return _function_contents(obj) return _function_contents(obj)
def _object_contents(obj): def _object_contents(obj) -> bytearray:
"""Return the signature contents of any Python object. """Return the signature contents of any Python object.
We have to handle the case where object contains a code object We have to handle the case where object contains a code object
@ -210,8 +209,10 @@ def _object_contents(obj):
# the best we can. # the best we can.
return bytearray(repr(obj), 'utf-8') return bytearray(repr(obj), 'utf-8')
# TODO: docstrings for _code_contents and _function_contents
# do not render well with Sphinx. Consider reworking.
def _code_contents(code, docstring=None): def _code_contents(code, docstring=None) -> bytearray:
r"""Return the signature contents of a code object. r"""Return the signature contents of a code object.
By providing direct access to the code object of the By providing direct access to the code object of the
@ -223,7 +224,7 @@ def _code_contents(code, docstring=None):
recompilations from moving a Python function. recompilations from moving a Python function.
See: See:
- https://docs.python.org/2/library/inspect.html - https://docs.python.org/3/library/inspect.html
- http://python-reference.readthedocs.io/en/latest/docs/code/index.html - http://python-reference.readthedocs.io/en/latest/docs/code/index.html
For info on what each co\_ variable provides For info on what each co\_ variable provides
@ -243,12 +244,11 @@ def _code_contents(code, docstring=None):
co_code - Returns a string representing the sequence of bytecode instructions. co_code - Returns a string representing the sequence of bytecode instructions.
""" """
# contents = [] # contents = []
# The code contents depends on the number of local variables # The code contents depends on the number of local variables
# but not their actual names. # but not their actual names.
contents = bytearray("{}, {}".format(code.co_argcount, len(code.co_varnames)), 'utf-8') contents = bytearray(f"{code.co_argcount}, {len(code.co_varnames)}", 'utf-8')
contents.extend(b", ") contents.extend(b", ")
contents.extend(bytearray(str(len(code.co_cellvars)), 'utf-8')) contents.extend(bytearray(str(len(code.co_cellvars)), 'utf-8'))
@ -281,8 +281,9 @@ def _code_contents(code, docstring=None):
return contents return contents
def _function_contents(func): def _function_contents(func) -> bytearray:
""" """Return the signature contents of a function.
The signature is as follows (should be byte/chars): The signature is as follows (should be byte/chars):
< _code_contents (see above) from func.__code__ > < _code_contents (see above) from func.__code__ >
,( comma separated _object_contents for function argument defaults) ,( comma separated _object_contents for function argument defaults)
@ -293,11 +294,7 @@ def _function_contents(func):
- func.__code__ - The code object representing the compiled function body. - func.__code__ - The code object representing the compiled function body.
- func.__defaults__ - A tuple containing default argument values for those arguments that have defaults, or None if no arguments have a default value - func.__defaults__ - A tuple containing default argument values for those arguments that have defaults, or None if no arguments have a default value
- func.__closure__ - None or a tuple of cells that contain bindings for the function's free variables. - func.__closure__ - None or a tuple of cells that contain bindings for the function's free variables.
:Returns:
Signature contents of a function. (in bytes)
""" """
contents = [_code_contents(func.__code__, func.__doc__)] contents = [_code_contents(func.__code__, func.__doc__)]
# The function contents depends on the value of defaults arguments # The function contents depends on the value of defaults arguments
@ -389,9 +386,10 @@ def _object_instance_content(obj):
# print("Inst Methods :\n%s"%pp.pformat(methods)) # print("Inst Methods :\n%s"%pp.pformat(methods))
def _actionAppend(act1, act2): def _actionAppend(act1, act2):
# This function knows how to slap two actions together. """Joins two actions together.
# Mainly, it handles ListActions by concatenating into
# a single ListAction. Mainly, it handles ListActions by concatenating into a single ListAction.
"""
a1 = Action(act1) a1 = Action(act1)
a2 = Action(act2) a2 = Action(act2)
if a1 is None: if a1 is None:
@ -401,12 +399,11 @@ def _actionAppend(act1, act2):
if isinstance(a1, ListAction): if isinstance(a1, ListAction):
if isinstance(a2, ListAction): if isinstance(a2, ListAction):
return ListAction(a1.list + a2.list) return ListAction(a1.list + a2.list)
else:
return ListAction(a1.list + [ a2 ]) return ListAction(a1.list + [ a2 ])
else:
if isinstance(a2, ListAction): if isinstance(a2, ListAction):
return ListAction([ a1 ] + a2.list) return ListAction([ a1 ] + a2.list)
else:
return ListAction([ a1, a2 ]) return ListAction([ a1, a2 ])
@ -439,21 +436,18 @@ def _do_create_keywords(args, kw):
def _do_create_action(act, kw): def _do_create_action(act, kw):
"""This is the actual "implementation" for the """The internal implementation for the Action factory method.
Action factory method, below. This handles the
fact that passing lists to Action() itself has
different semantics than passing lists as elements
of lists.
The former will create a ListAction, the latter
will create a CommandAction by converting the inner
list elements to strings."""
This handles the fact that passing lists to :func:`Action` itself has
different semantics than passing lists as elements of lists.
The former will create a :class:`ListAction`, the latter will create a
:class:`CommandAction` by converting the inner list elements to strings.
"""
if isinstance(act, ActionBase): if isinstance(act, ActionBase):
return act return act
if is_String(act): if is_String(act):
var=SCons.Util.get_environment_var(act) var = SCons.Util.get_environment_var(act)
if var: if var:
# This looks like a string that is purely an Environment # This looks like a string that is purely an Environment
# variable reference, like "$FOO" or "${FOO}". We do # variable reference, like "$FOO" or "${FOO}". We do
@ -473,11 +467,7 @@ def _do_create_action(act, kw):
return CommandAction(act, **kw) return CommandAction(act, **kw)
if callable(act): if callable(act):
try: gen = kw.pop('generator', False)
gen = kw['generator']
del kw['generator']
except KeyError:
gen = 0
if gen: if gen:
action_type = CommandGeneratorAction action_type = CommandGeneratorAction
else: else:
@ -485,24 +475,32 @@ def _do_create_action(act, kw):
return action_type(act, kw) return action_type(act, kw)
# Catch a common error case with a nice message: # Catch a common error case with a nice message:
if isinstance(act, int) or isinstance(act, float): if isinstance(act, (int, float)):
raise TypeError("Don't know how to create an Action from a number (%s)"%act) raise TypeError("Don't know how to create an Action from a number (%s)"%act)
# Else fail silently (???) # Else fail silently (???)
return None return None
def _do_create_list_action(act, kw): # TODO: from __future__ import annotations once we get to Python 3.7 base,
"""A factory for list actions. Convert the input list into Actions # to avoid quoting the defined-later classname
and then wrap them in a ListAction.""" def _do_create_list_action(act, kw) -> "ListAction":
"""A factory for list actions.
Convert the input list *act* into Actions and then wrap them in a
:class:`ListAction`. If *act* has only a single member, return that
member, not a *ListAction*. This is intended to allow a contained
list to specify a command action without being processed into a
list action.
"""
acts = [] acts = []
for a in act: for a in act:
aa = _do_create_action(a, kw) aa = _do_create_action(a, kw)
if aa is not None: acts.append(aa) if aa is not None:
acts.append(aa)
if not acts: if not acts:
return ListAction([]) return ListAction([])
elif len(acts) == 1: if len(acts) == 1:
return acts[0] return acts[0]
else:
return ListAction(acts) return ListAction(acts)
@ -515,11 +513,26 @@ def Action(act, *args, **kw):
return _do_create_action(act, kw) return _do_create_action(act, kw)
class ActionBase: class ActionBase(ABC):
"""Base class for all types of action objects that can be held by """Base class for all types of action objects that can be held by
other objects (Builders, Executors, etc.) This provides the other objects (Builders, Executors, etc.) This provides the
common methods for manipulating and combining those actions.""" common methods for manipulating and combining those actions."""
@abstractmethod
def __call__(
self,
target,
source,
env,
exitstatfunc=_null,
presub=_null,
show=_null,
execute=_null,
chdir=_null,
executor: Optional[ExecutorType] = None,
):
raise NotImplementedError
def __eq__(self, other): def __eq__(self, other):
return self.__dict__ == other return self.__dict__ == other
@ -528,13 +541,21 @@ class ActionBase:
batch_key = no_batch_key batch_key = no_batch_key
def genstring(self, target, source, env): def genstring(self, target, source, env, executor: Optional[ExecutorType] = None) -> str:
return str(self) return str(self)
@abstractmethod
def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
raise NotImplementedError
@abstractmethod
def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
raise NotImplementedError
def get_contents(self, target, source, env): def get_contents(self, target, source, env):
result = self.get_presig(target, source, env) result = self.get_presig(target, source, env)
if not isinstance(result,(bytes, bytearray)): if not isinstance(result, (bytes, bytearray)):
result = bytearray(result, 'utf-8') result = bytearray(result, 'utf-8')
else: else:
# Make a copy and put in bytearray, without this the contents returned by get_presig # Make a copy and put in bytearray, without this the contents returned by get_presig
@ -552,17 +573,15 @@ class ActionBase:
for v in vl: for v in vl:
# do the subst this way to ignore $(...$) parts: # do the subst this way to ignore $(...$) parts:
if isinstance(result, bytearray): if isinstance(result, bytearray):
result.extend(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SCons.Subst.SUBST_SIG, target, source))) result.extend(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SUBST_SIG, target, source)))
else: else:
raise Exception("WE SHOULD NEVER GET HERE result should be bytearray not:%s"%type(result)) raise Exception("WE SHOULD NEVER GET HERE result should be bytearray not:%s"%type(result))
# result.append(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SCons.Subst.SUBST_SIG, target, source))) # result.append(SCons.Util.to_bytes(env.subst_target_source('${'+v+'}', SUBST_SIG, target, source)))
if isinstance(result, (bytes, bytearray)):
if isinstance(result, (bytes,bytearray)):
return result return result
else:
raise Exception("WE SHOULD NEVER GET HERE - #2 result should be bytearray not:%s" % type(result)) raise Exception("WE SHOULD NEVER GET HERE - #2 result should be bytearray not:%s" % type(result))
# return b''.join(result)
def __add__(self, other): def __add__(self, other):
return _actionAppend(self, other) return _actionAppend(self, other)
@ -582,10 +601,10 @@ class ActionBase:
self.presub_env = None # don't need this any more self.presub_env = None # don't need this any more
return lines return lines
def get_varlist(self, target, source, env, executor=None): def get_varlist(self, target, source, env, executor: Optional[ExecutorType] = None):
return self.varlist return self.varlist
def get_targets(self, env, executor): def get_targets(self, env, executor: Optional[ExecutorType]):
""" """
Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used
by this action. by this action.
@ -597,8 +616,8 @@ class _ActionAction(ActionBase):
"""Base class for actions that create output objects.""" """Base class for actions that create output objects."""
def __init__(self, cmdstr=_null, strfunction=_null, varlist=(), def __init__(self, cmdstr=_null, strfunction=_null, varlist=(),
presub=_null, chdir=None, exitstatfunc=None, presub=_null, chdir=None, exitstatfunc=None,
batch_key=None, targets='$TARGETS', batch_key=None, targets: str='$TARGETS',
**kw): **kw) -> None:
self.cmdstr = cmdstr self.cmdstr = cmdstr
if strfunction is not _null: if strfunction is not _null:
if strfunction is None: if strfunction is None:
@ -625,7 +644,7 @@ class _ActionAction(ActionBase):
batch_key = default_batch_key batch_key = default_batch_key
SCons.Util.AddMethod(self, batch_key, 'batch_key') SCons.Util.AddMethod(self, batch_key, 'batch_key')
def print_cmd_line(self, s, target, source, env): def print_cmd_line(self, s, target, source, env) -> None:
""" """
In python 3, and in some of our tests, sys.stdout is In python 3, and in some of our tests, sys.stdout is
a String io object, and it takes unicode strings only a String io object, and it takes unicode strings only
@ -639,7 +658,7 @@ class _ActionAction(ActionBase):
show=_null, show=_null,
execute=_null, execute=_null,
chdir=_null, chdir=_null,
executor=None): executor: Optional[ExecutorType] = None):
if not is_List(target): if not is_List(target):
target = [target] target = [target]
if not is_List(source): if not is_List(source):
@ -719,6 +738,16 @@ class _ActionAction(ActionBase):
return stat return stat
# Stub these two only so _ActionAction can be instantiated. It's really
# an ABC like parent ActionBase, but things reach in and use it. It's
# not just unittests or we could fix it up with a concrete subclass there.
def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
raise NotImplementedError
def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
raise NotImplementedError
def _string_from_cmd_list(cmd_list): def _string_from_cmd_list(cmd_list):
"""Takes a list of command line arguments and returns a pretty """Takes a list of command line arguments and returns a pretty
@ -751,7 +780,7 @@ def get_default_ENV(env):
return env['ENV'] return env['ENV']
except KeyError: except KeyError:
if not default_ENV: if not default_ENV:
import SCons.Environment import SCons.Environment # pylint: disable=import-outside-toplevel,redefined-outer-name
# This is a hideously expensive way to get a default execution # This is a hideously expensive way to get a default execution
# environment. What it really should do is run the platform # environment. What it really should do is run the platform
# setup to get the default ENV. Fortunately, it's incredibly # setup to get the default ENV. Fortunately, it's incredibly
@ -778,22 +807,127 @@ def _resolve_shell_env(env, target, source):
shell_gens = iter(shell_gen) shell_gens = iter(shell_gen)
except TypeError: except TypeError:
raise SCons.Errors.UserError("SHELL_ENV_GENERATORS must be iteratable.") raise SCons.Errors.UserError("SHELL_ENV_GENERATORS must be iteratable.")
else:
ENV = ENV.copy() ENV = ENV.copy()
for generator in shell_gens: for generator in shell_gens:
ENV = generator(env, target, source, ENV) ENV = generator(env, target, source, ENV)
if not isinstance(ENV, dict): if not isinstance(ENV, dict):
raise SCons.Errors.UserError(f"SHELL_ENV_GENERATORS function: {generator} must return a dict.") raise SCons.Errors.UserError(f"SHELL_ENV_GENERATORS function: {generator} must return a dict.")
return ENV return ENV
def scons_subproc_run(scons_env, *args, **kwargs) -> subprocess.CompletedProcess:
"""Run an external command using an SCons execution environment.
SCons normally runs external build commands using :mod:`subprocess`,
but does not harvest any output from such commands. This function
is a thin wrapper around :func:`subprocess.run` allowing running
a command in an SCons context (i.e. uses an "execution environment"
rather than the user's existing environment), and provides the ability
to return any output in a :class:`subprocess.CompletedProcess`
instance (this must be selected by setting ``stdout`` and/or
``stderr`` to ``PIPE``, or setting ``capture_output=True`` - see
Keyword Arguments). Typical use case is to run a tool's "version"
option to find out the installed version.
If supplied, the ``env`` keyword argument provides an execution
environment to process into appropriate form before it is supplied
to :mod:`subprocess`; if omitted, *scons_env* is used to derive a
suitable default. The other keyword arguments are passed through,
except that the SCons legacy ``error`` keyword is remapped to the
subprocess ``check`` keyword; if both are omitted ``check=False``
will be passed. The caller is responsible for setting up the desired
arguments for :func:`subprocess.run`.
This function retains the legacy behavior of returning something
vaguely usable even in the face of complete failure, unless
``check=True`` (in which case an error is allowed to be raised):
it synthesizes a :class:`~subprocess.CompletedProcess` instance in
this case.
A subset of interesting keyword arguments follows; see the Python
documentation of :mod:`subprocess` for the complete list.
Keyword Arguments:
stdout: (and *stderr*, *stdin*) if set to :const:`subprocess.PIPE`.
send input to or collect output from the relevant stream in
the subprocess; the default ``None`` does no redirection
(i.e. output or errors may go to the console or log file,
but is not captured); if set to :const:`subprocess.DEVNULL`
they are explicitly thrown away. ``capture_output=True`` is a
synonym for setting both ``stdout`` and ``stderr``
to :const:`~subprocess.PIPE`.
text: open *stdin*, *stdout*, *stderr* in text mode. Default
is binary mode. ``universal_newlines`` is a synonym.
encoding: specifies an encoding. Changes to text mode.
errors: specified error handling. Changes to text mode.
input: a byte sequence to be passed to *stdin*, unless text
mode is enabled, in which case it must be a string.
shell: if true, the command is executed through the shell.
check: if true and the subprocess exits with a non-zero exit
code, raise a :exc:`subprocess.CalledProcessError` exception.
Otherwise (the default) in case of an :exc:`OSError`, report the
exit code in the :class:`~Subprocess.CompletedProcess` instance.
.. versionadded:: 4.6
"""
# Figure out the execution environment to use
env = kwargs.get('env', None)
if env is None:
env = get_default_ENV(scons_env)
kwargs['env'] = SCons.Util.sanitize_shell_env(env)
# Backwards-compat with _subproc: accept 'error', map to 'check',
# and remove, since subprocess.run does not recognize.
# 'error' isn't True/False, it takes a string value (see _subproc)
error = kwargs.get('error')
if error and 'check' in kwargs:
raise ValueError('error and check arguments may not both be used.')
check = kwargs.get('check', False) # always set a value for 'check'
if error is not None:
if error == 'raise':
check = True
del kwargs['error']
kwargs['check'] = check
# TODO: Python version-compat stuff: remap/remove too-new args if needed
if 'text' in kwargs and sys.version_info[:3] < (3, 7):
kwargs['universal_newlines'] = kwargs.pop('text')
if 'capture_output' in kwargs and sys.version_info[:3] < (3, 7):
capture_output = kwargs.pop('capture_output')
if capture_output:
kwargs['stdout'] = kwargs['stderr'] = PIPE
# Most SCons tools/tests expect not to fail on things like missing files.
# check=True (or error="raise") means we're okay to take an exception;
# else we catch the likely exception and construct a dummy
# CompletedProcess instance.
# Note pylint can't see we always include 'check' in kwargs: suppress.
if check:
cp = subprocess.run(*args, **kwargs) # pylint: disable=subprocess-run-check
else:
try:
cp = subprocess.run(*args, **kwargs) # pylint: disable=subprocess-run-check
except OSError as exc:
argline = ' '.join(*args)
cp = subprocess.CompletedProcess(
args=argline, returncode=1, stdout="", stderr=""
)
return cp
def _subproc(scons_env, cmd, error='ignore', **kw): def _subproc(scons_env, cmd, error='ignore', **kw):
"""Wrapper for subprocess which pulls from construction env. """Wrapper for subprocess.Popen which pulls from construction env.
Use for calls to subprocess which need to interpolate values from Use for calls to subprocess which need to interpolate values from
an SCons construction environment into the environment passed to an SCons construction environment into the environment passed to
subprocess. Adds an an error-handling argument. Adds ability subprocess. Adds an an error-handling argument. Adds ability
to specify std{in,out,err} with "'devnull'" tag. to specify std{in,out,err} with "'devnull'" tag.
.. deprecated:: 4.6
""" """
# TODO: just uses subprocess.DEVNULL now, we can drop the "devnull" # TODO: just uses subprocess.DEVNULL now, we can drop the "devnull"
# string now - it is a holdover from Py2, which didn't have DEVNULL. # string now - it is a holdover from Py2, which didn't have DEVNULL.
@ -810,11 +944,11 @@ def _subproc(scons_env, cmd, error='ignore', **kw):
try: try:
pobj = subprocess.Popen(cmd, **kw) pobj = subprocess.Popen(cmd, **kw)
except EnvironmentError as e: except OSError as e:
if error == 'raise': raise if error == 'raise': raise
# return a dummy Popen instance that only returns error # return a dummy Popen instance that only returns error
class dummyPopen: class dummyPopen:
def __init__(self, e): def __init__(self, e) -> None:
self.exception = e self.exception = e
# Add the following two to enable using the return value as a context manager # Add the following two to enable using the return value as a context manager
# for example # for example
@ -824,7 +958,7 @@ def _subproc(scons_env, cmd, error='ignore', **kw):
def __enter__(self): def __enter__(self):
return self return self
def __exit__(self, *args): def __exit__(self, *args) -> None:
pass pass
def communicate(self, input=None): def communicate(self, input=None):
@ -835,8 +969,8 @@ def _subproc(scons_env, cmd, error='ignore', **kw):
stdin = None stdin = None
class f: class f:
def read(self): return '' def read(self) -> str: return ''
def readline(self): return '' def readline(self) -> str: return ''
def __iter__(self): return iter(()) def __iter__(self): return iter(())
stdout = stderr = f() stdout = stderr = f()
pobj = dummyPopen(e) pobj = dummyPopen(e)
@ -851,7 +985,7 @@ def _subproc(scons_env, cmd, error='ignore', **kw):
class CommandAction(_ActionAction): class CommandAction(_ActionAction):
"""Class for command-execution actions.""" """Class for command-execution actions."""
def __init__(self, cmd, **kw): def __init__(self, cmd, **kw) -> None:
# Cmd can actually be a list or a single item; if it's a # Cmd can actually be a list or a single item; if it's a
# single item it should be the command string to execute; if a # single item it should be the command string to execute; if a
# list then it should be the words of the command string to # list then it should be the words of the command string to
@ -870,23 +1004,24 @@ class CommandAction(_ActionAction):
"a single command") "a single command")
self.cmd_list = cmd self.cmd_list = cmd
def __str__(self): def __str__(self) -> str:
if is_List(self.cmd_list): if is_List(self.cmd_list):
return ' '.join(map(str, self.cmd_list)) return ' '.join(map(str, self.cmd_list))
return str(self.cmd_list) return str(self.cmd_list)
def process(self, target, source, env, executor=None, overrides=False):
def process(self, target, source, env, executor=None, overrides: Optional[dict] = None) -> Tuple[List, bool, bool]:
if executor: if executor:
result = env.subst_list(self.cmd_list, 0, executor=executor, overrides=overrides) result = env.subst_list(self.cmd_list, SUBST_CMD, executor=executor, overrides=overrides)
else: else:
result = env.subst_list(self.cmd_list, 0, target, source, overrides=overrides) result = env.subst_list(self.cmd_list, SUBST_CMD, target, source, overrides=overrides)
silent = None silent = False
ignore = None ignore = False
while True: while True:
try: c = result[0][0][0] try: c = result[0][0][0]
except IndexError: c = None except IndexError: c = None
if c == '@': silent = 1 if c == '@': silent = True
elif c == '-': ignore = 1 elif c == '-': ignore = True
else: break else: break
result[0][0] = result[0][0][1:] result[0][0] = result[0][0][1:]
try: try:
@ -896,11 +1031,10 @@ class CommandAction(_ActionAction):
pass pass
return result, ignore, silent return result, ignore, silent
def strfunction(self, target, source, env, executor=None, overrides=False): def strfunction(self, target, source, env, executor: Optional[ExecutorType] = None, overrides: Optional[dict] = None) -> str:
if self.cmdstr is None: if self.cmdstr is None:
return None return None
if self.cmdstr is not _null: if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor: if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor, overrides=overrides) c = env.subst(self.cmdstr, SUBST_RAW, executor=executor, overrides=overrides)
else: else:
@ -912,7 +1046,7 @@ class CommandAction(_ActionAction):
return '' return ''
return _string_from_cmd_list(cmd_list[0]) return _string_from_cmd_list(cmd_list[0])
def execute(self, target, source, env, executor=None): def execute(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Execute a command action. """Execute a command action.
This will handle lists of commands as well as individual commands, This will handle lists of commands as well as individual commands,
@ -933,12 +1067,11 @@ class CommandAction(_ActionAction):
spawn = env['SPAWN'] spawn = env['SPAWN']
except KeyError: except KeyError:
raise SCons.Errors.UserError('Missing SPAWN construction variable.') raise SCons.Errors.UserError('Missing SPAWN construction variable.')
else:
if is_String(spawn): if is_String(spawn):
spawn = env.subst(spawn, raw=1, conv=lambda x: x) spawn = env.subst(spawn, raw=1, conv=lambda x: x)
escape = env.get('ESCAPE', lambda x: x) escape = env.get('ESCAPE', lambda x: x)
ENV = _resolve_shell_env(env, target, source) ENV = _resolve_shell_env(env, target, source)
# Ensure that the ENV values are all strings: # Ensure that the ENV values are all strings:
@ -975,13 +1108,12 @@ class CommandAction(_ActionAction):
command=cmd_line) command=cmd_line)
return 0 return 0
def get_presig(self, target, source, env, executor=None): def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Return the signature contents of this action's command line. """Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string, This strips $(-$) and everything in between the string,
since those parts don't affect signatures. since those parts don't affect signatures.
""" """
from SCons.Subst import SUBST_SIG
cmd = self.cmd_list cmd = self.cmd_list
if is_List(cmd): if is_List(cmd):
cmd = ' '.join(map(str, cmd)) cmd = ' '.join(map(str, cmd))
@ -989,10 +1121,9 @@ class CommandAction(_ActionAction):
cmd = str(cmd) cmd = str(cmd)
if executor: if executor:
return env.subst_target_source(cmd, SUBST_SIG, executor=executor) return env.subst_target_source(cmd, SUBST_SIG, executor=executor)
else:
return env.subst_target_source(cmd, SUBST_SIG, target, source) return env.subst_target_source(cmd, SUBST_SIG, target, source)
def get_implicit_deps(self, target, source, env, executor=None): def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Return the implicit dependencies of this action's command line.""" """Return the implicit dependencies of this action's command line."""
icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True) icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True)
if is_String(icd) and icd[:1] == '$': if is_String(icd) and icd[:1] == '$':
@ -1010,17 +1141,15 @@ class CommandAction(_ActionAction):
# An integer value greater than 1 specifies the number of entries # An integer value greater than 1 specifies the number of entries
# to scan. "all" means to scan all. # to scan. "all" means to scan all.
return self._get_implicit_deps_heavyweight(target, source, env, executor, icd_int) return self._get_implicit_deps_heavyweight(target, source, env, executor, icd_int)
else:
# Everything else (usually 1 or True) means that we want # Everything else (usually 1 or True) means that we want
# lightweight dependency scanning. # lightweight dependency scanning.
return self._get_implicit_deps_lightweight(target, source, env, executor) return self._get_implicit_deps_lightweight(target, source, env, executor)
def _get_implicit_deps_lightweight(self, target, source, env, executor): def _get_implicit_deps_lightweight(self, target, source, env, executor: Optional[ExecutorType]):
""" """
Lightweight dependency scanning involves only scanning the first entry Lightweight dependency scanning involves only scanning the first entry
in an action string, even if it contains &&. in an action string, even if it contains &&.
""" """
from SCons.Subst import SUBST_SIG
if executor: if executor:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor) cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor)
else: else:
@ -1037,7 +1166,7 @@ class CommandAction(_ActionAction):
res.append(env.fs.File(d)) res.append(env.fs.File(d))
return res return res
def _get_implicit_deps_heavyweight(self, target, source, env, executor, def _get_implicit_deps_heavyweight(self, target, source, env, executor: Optional[ExecutorType],
icd_int): icd_int):
""" """
Heavyweight dependency scanning involves scanning more than just the Heavyweight dependency scanning involves scanning more than just the
@ -1058,7 +1187,6 @@ class CommandAction(_ActionAction):
# Avoid circular and duplicate dependencies by not providing source, # Avoid circular and duplicate dependencies by not providing source,
# target, or executor to subst_list. This causes references to # target, or executor to subst_list. This causes references to
# $SOURCES, $TARGETS, and all related variables to disappear. # $SOURCES, $TARGETS, and all related variables to disappear.
from SCons.Subst import SUBST_SIG
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, conv=lambda x: x) cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, conv=lambda x: x)
res = [] res = []
@ -1099,14 +1227,14 @@ class CommandAction(_ActionAction):
class CommandGeneratorAction(ActionBase): class CommandGeneratorAction(ActionBase):
"""Class for command-generator actions.""" """Class for command-generator actions."""
def __init__(self, generator, kw): def __init__(self, generator, kw) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.CommandGeneratorAction') if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.CommandGeneratorAction')
self.generator = generator self.generator = generator
self.gen_kw = kw self.gen_kw = kw
self.varlist = kw.get('varlist', ()) self.varlist = kw.get('varlist', ())
self.targets = kw.get('targets', '$TARGETS') self.targets = kw.get('targets', '$TARGETS')
def _generate(self, target, source, env, for_signature, executor=None): def _generate(self, target, source, env, for_signature, executor: Optional[ExecutorType] = None):
# ensure that target is a list, to make it easier to write # ensure that target is a list, to make it easier to write
# generator functions: # generator functions:
if not is_List(target): if not is_List(target):
@ -1124,7 +1252,7 @@ class CommandGeneratorAction(ActionBase):
raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret)) raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret))
return gen_cmd return gen_cmd
def __str__(self): def __str__(self) -> str:
try: try:
env = self.presub_env env = self.presub_env
except AttributeError: except AttributeError:
@ -1137,11 +1265,11 @@ class CommandGeneratorAction(ActionBase):
def batch_key(self, env, target, source): def batch_key(self, env, target, source):
return self._generate(target, source, env, 1).batch_key(env, target, source) return self._generate(target, source, env, 1).batch_key(env, target, source)
def genstring(self, target, source, env, executor=None): def genstring(self, target, source, env, executor: Optional[ExecutorType] = None) -> str:
return self._generate(target, source, env, 1, executor).genstring(target, source, env) return self._generate(target, source, env, 1, executor).genstring(target, source, env)
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None): show=_null, execute=_null, chdir=_null, executor: Optional[ExecutorType] = None):
act = self._generate(target, source, env, 0, executor) act = self._generate(target, source, env, 0, executor)
if act is None: if act is None:
raise SCons.Errors.UserError( raise SCons.Errors.UserError(
@ -1153,7 +1281,7 @@ class CommandGeneratorAction(ActionBase):
target, source, env, exitstatfunc, presub, show, execute, chdir, executor target, source, env, exitstatfunc, presub, show, execute, chdir, executor
) )
def get_presig(self, target, source, env, executor=None): def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Return the signature contents of this action's command line. """Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string, This strips $(-$) and everything in between the string,
@ -1161,13 +1289,13 @@ class CommandGeneratorAction(ActionBase):
""" """
return self._generate(target, source, env, 1, executor).get_presig(target, source, env) return self._generate(target, source, env, 1, executor).get_presig(target, source, env)
def get_implicit_deps(self, target, source, env, executor=None): def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env) return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env)
def get_varlist(self, target, source, env, executor=None): def get_varlist(self, target, source, env, executor: Optional[ExecutorType] = None):
return self._generate(target, source, env, 1, executor).get_varlist(target, source, env, executor) return self._generate(target, source, env, 1, executor).get_varlist(target, source, env, executor)
def get_targets(self, env, executor): def get_targets(self, env, executor: Optional[ExecutorType]):
return self._generate(None, None, env, 1, executor).get_targets(env, executor) return self._generate(None, None, env, 1, executor).get_targets(env, executor)
@ -1191,7 +1319,7 @@ class LazyAction(CommandGeneratorAction, CommandAction):
an action based on what's in the construction variable. an action based on what's in the construction variable.
""" """
def __init__(self, var, kw): def __init__(self, var, kw) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.LazyAction') if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.LazyAction')
CommandAction.__init__(self, '${'+var+'}', **kw) CommandAction.__init__(self, '${'+var+'}', **kw)
self.var = SCons.Util.to_String(var) self.var = SCons.Util.to_String(var)
@ -1213,18 +1341,22 @@ class LazyAction(CommandGeneratorAction, CommandAction):
raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c))) raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c)))
return gen_cmd return gen_cmd
def _generate(self, target, source, env, for_signature, executor=None): def _generate(self, target, source, env, for_signature, executor: Optional[ExecutorType] = None):
return self._generate_cache(env) return self._generate_cache(env)
def __call__(self, target, source, env, *args, **kw): def __call__(self, target, source, env, *args, **kw):
c = self.get_parent_class(env) c = self.get_parent_class(env)
return c.__call__(self, target, source, env, *args, **kw) return c.__call__(self, target, source, env, *args, **kw)
def get_presig(self, target, source, env): def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
c = self.get_parent_class(env) c = self.get_parent_class(env)
return c.get_presig(self, target, source, env) return c.get_presig(self, target, source, env)
def get_varlist(self, target, source, env, executor=None): def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
c = self.get_parent_class(env)
return c.get_implicit_deps(self, target, source, env)
def get_varlist(self, target, source, env, executor: Optional[ExecutorType] = None):
c = self.get_parent_class(env) c = self.get_parent_class(env)
return c.get_varlist(self, target, source, env, executor) return c.get_varlist(self, target, source, env, executor)
@ -1232,7 +1364,7 @@ class LazyAction(CommandGeneratorAction, CommandAction):
class FunctionAction(_ActionAction): class FunctionAction(_ActionAction):
"""Class for Python function actions.""" """Class for Python function actions."""
def __init__(self, execfunction, kw): def __init__(self, execfunction, kw) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.FunctionAction') if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.FunctionAction')
self.execfunction = execfunction self.execfunction = execfunction
@ -1257,11 +1389,10 @@ class FunctionAction(_ActionAction):
except AttributeError: except AttributeError:
return "unknown_python_function" return "unknown_python_function"
def strfunction(self, target, source, env, executor=None): def strfunction(self, target, source, env, executor: Optional[ExecutorType] = None):
if self.cmdstr is None: if self.cmdstr is None:
return None return None
if self.cmdstr is not _null: if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor: if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor) c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else: else:
@ -1293,13 +1424,13 @@ class FunctionAction(_ActionAction):
sstr = array(source) sstr = array(source)
return "%s(%s, %s)" % (name, tstr, sstr) return "%s(%s, %s)" % (name, tstr, sstr)
def __str__(self): def __str__(self) -> str:
name = self.function_name() name = self.function_name()
if name == 'ActionCaller': if name == 'ActionCaller':
return str(self.execfunction) return str(self.execfunction)
return "%s(target, source, env)" % name return "%s(target, source, env)" % name
def execute(self, target, source, env, executor=None): def execute(self, target, source, env, executor: Optional[ExecutorType] = None):
exc_info = (None,None,None) exc_info = (None,None,None)
try: try:
if executor: if executor:
@ -1308,9 +1439,7 @@ class FunctionAction(_ActionAction):
rsources = list(map(rfile, source)) rsources = list(map(rfile, source))
try: try:
result = self.execfunction(target=target, source=rsources, env=env) result = self.execfunction(target=target, source=rsources, env=env)
except KeyboardInterrupt as e: except (KeyboardInterrupt, SystemExit):
raise
except SystemExit as e:
raise raise
except Exception as e: except Exception as e:
result = e result = e
@ -1318,8 +1447,8 @@ class FunctionAction(_ActionAction):
if result: if result:
result = SCons.Errors.convert_to_BuildError(result, exc_info) result = SCons.Errors.convert_to_BuildError(result, exc_info)
result.node=target result.node = target
result.action=self result.action = self
try: try:
result.command=self.strfunction(target, source, env, executor) result.command=self.strfunction(target, source, env, executor)
except TypeError: except TypeError:
@ -1332,7 +1461,7 @@ class FunctionAction(_ActionAction):
# some codes do not check the return value of Actions and I do # some codes do not check the return value of Actions and I do
# not have the time to modify them at this point. # not have the time to modify them at this point.
if (exc_info[1] and if (exc_info[1] and
not isinstance(exc_info[1],EnvironmentError)): not isinstance(exc_info[1], EnvironmentError)):
raise result raise result
return result return result
@ -1342,19 +1471,19 @@ class FunctionAction(_ActionAction):
# more information about this issue. # more information about this issue.
del exc_info del exc_info
def get_presig(self, target, source, env): def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Return the signature contents of this callable action.""" """Return the signature contents of this callable action."""
try: try:
return self.gc(target, source, env) return self.gc(target, source, env)
except AttributeError: except AttributeError:
return self.funccontents return self.funccontents
def get_implicit_deps(self, target, source, env): def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
return [] return []
class ListAction(ActionBase): class ListAction(ActionBase):
"""Class for lists of other actions.""" """Class for lists of other actions."""
def __init__(self, actionlist): def __init__(self, actionlist) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.ListAction') if SCons.Debug.track_instances: logInstanceCreation(self, 'Action.ListAction')
def list_of_actions(x): def list_of_actions(x):
if isinstance(x, ActionBase): if isinstance(x, ActionBase):
@ -1366,17 +1495,17 @@ class ListAction(ActionBase):
self.varlist = () self.varlist = ()
self.targets = '$TARGETS' self.targets = '$TARGETS'
def genstring(self, target, source, env): def genstring(self, target, source, env, executor: Optional[ExecutorType] = None) -> str:
return '\n'.join([a.genstring(target, source, env) for a in self.list]) return '\n'.join([a.genstring(target, source, env) for a in self.list])
def __str__(self): def __str__(self) -> str:
return '\n'.join(map(str, self.list)) return '\n'.join(map(str, self.list))
def presub_lines(self, env): def presub_lines(self, env):
return SCons.Util.flatten_sequence( return SCons.Util.flatten_sequence(
[a.presub_lines(env) for a in self.list]) [a.presub_lines(env) for a in self.list])
def get_presig(self, target, source, env): def get_presig(self, target, source, env, executor: Optional[ExecutorType] = None):
"""Return the signature contents of this action list. """Return the signature contents of this action list.
Simple concatenation of the signatures of the elements. Simple concatenation of the signatures of the elements.
@ -1384,7 +1513,7 @@ class ListAction(ActionBase):
return b"".join([bytes(x.get_contents(target, source, env)) for x in self.list]) return b"".join([bytes(x.get_contents(target, source, env)) for x in self.list])
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null, def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None): show=_null, execute=_null, chdir=_null, executor: Optional[ExecutorType] = None):
if executor: if executor:
target = executor.get_all_targets() target = executor.get_all_targets()
source = executor.get_all_sources() source = executor.get_all_sources()
@ -1395,13 +1524,13 @@ class ListAction(ActionBase):
return stat return stat
return 0 return 0
def get_implicit_deps(self, target, source, env): def get_implicit_deps(self, target, source, env, executor: Optional[ExecutorType] = None):
result = [] result = []
for act in self.list: for act in self.list:
result.extend(act.get_implicit_deps(target, source, env)) result.extend(act.get_implicit_deps(target, source, env))
return result return result
def get_varlist(self, target, source, env, executor=None): def get_varlist(self, target, source, env, executor: Optional[ExecutorType] = None):
result = OrderedDict() result = OrderedDict()
for act in self.list: for act in self.list:
for var in act.get_varlist(target, source, env, executor): for var in act.get_varlist(target, source, env, executor):
@ -1418,7 +1547,7 @@ class ActionCaller:
but what it's really doing is hanging on to the arguments until we but what it's really doing is hanging on to the arguments until we
have a target, source and env to use for the expansion. have a target, source and env to use for the expansion.
""" """
def __init__(self, parent, args, kw): def __init__(self, parent, args, kw) -> None:
self.parent = parent self.parent = parent
self.args = args self.args = args
self.kw = kw self.kw = kw
@ -1453,8 +1582,9 @@ class ActionCaller:
# was called by using this hard-coded value as a special return. # was called by using this hard-coded value as a special return.
if s == '$__env__': if s == '$__env__':
return env return env
elif is_String(s): if is_String(s):
return env.subst(s, 1, target, source) return env.subst(s, 1, target, source)
return self.parent.convert(s) return self.parent.convert(s)
def subst_args(self, target, source, env): def subst_args(self, target, source, env):
@ -1466,7 +1596,7 @@ class ActionCaller:
kw[key] = self.subst(self.kw[key], target, source, env) kw[key] = self.subst(self.kw[key], target, source, env)
return kw return kw
def __call__(self, target, source, env, executor=None): def __call__(self, target, source, env, executor: Optional[ExecutorType] = None):
args = self.subst_args(target, source, env) args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env) kw = self.subst_kw(target, source, env)
return self.parent.actfunc(*args, **kw) return self.parent.actfunc(*args, **kw)
@ -1476,7 +1606,7 @@ class ActionCaller:
kw = self.subst_kw(target, source, env) kw = self.subst_kw(target, source, env)
return self.parent.strfunc(*args, **kw) return self.parent.strfunc(*args, **kw)
def __str__(self): def __str__(self) -> str:
return self.parent.strfunc(*self.args, **self.kw) return self.parent.strfunc(*self.args, **self.kw)
@ -1489,7 +1619,7 @@ class ActionFactory:
called with and give them to the ActionCaller object we create, called with and give them to the ActionCaller object we create,
so it can hang onto them until it needs them. so it can hang onto them until it needs them.
""" """
def __init__(self, actfunc, strfunc, convert=lambda x: x): def __init__(self, actfunc, strfunc, convert=lambda x: x) -> None:
self.actfunc = actfunc self.actfunc = actfunc
self.strfunc = strfunc self.strfunc = strfunc
self.convert = convert self.convert = convert

View file

@ -99,7 +99,10 @@ There are the following methods for internal use within this module:
""" """
import os
from collections import UserDict, UserList from collections import UserDict, UserList
from contextlib import suppress
from typing import Optional
import SCons.Action import SCons.Action
import SCons.Debug import SCons.Debug
@ -109,6 +112,7 @@ import SCons.Util
import SCons.Warnings import SCons.Warnings
from SCons.Debug import logInstanceCreation from SCons.Debug import logInstanceCreation
from SCons.Errors import InternalError, UserError from SCons.Errors import InternalError, UserError
from SCons.Util.sctyping import ExecutorType
class _Null: class _Null:
pass pass
@ -130,14 +134,14 @@ class DictCmdGenerator(SCons.Util.Selector):
to return the proper action based on the file suffix of to return the proper action based on the file suffix of
the source file.""" the source file."""
def __init__(self, mapping=None, source_ext_match=True): def __init__(self, mapping=None, source_ext_match: bool=True) -> None:
super().__init__(mapping) super().__init__(mapping)
self.source_ext_match = source_ext_match self.source_ext_match = source_ext_match
def src_suffixes(self): def src_suffixes(self):
return list(self.keys()) return list(self.keys())
def add_action(self, suffix, action): def add_action(self, suffix, action) -> None:
"""Add a suffix-action pair to the mapping. """Add a suffix-action pair to the mapping.
""" """
self[suffix] = action self[suffix] = action
@ -222,12 +226,12 @@ class OverrideWarner(UserDict):
can actually invoke multiple builders. This class only emits the can actually invoke multiple builders. This class only emits the
warnings once, no matter how many Builders are invoked. warnings once, no matter how many Builders are invoked.
""" """
def __init__(self, mapping): def __init__(self, mapping) -> None:
super().__init__(mapping) super().__init__(mapping)
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.OverrideWarner') if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.OverrideWarner')
self.already_warned = None self.already_warned = None
def warn(self): def warn(self) -> None:
if self.already_warned: if self.already_warned:
return return
for k in self.keys(): for k in self.keys():
@ -335,7 +339,7 @@ class EmitterProxy:
look there at actual build time to see if it holds look there at actual build time to see if it holds
a callable. If so, we will call that as the actual a callable. If so, we will call that as the actual
emitter.""" emitter."""
def __init__(self, var): def __init__(self, var) -> None:
self.var = SCons.Util.to_String(var) self.var = SCons.Util.to_String(var)
def __call__(self, target, source, env): def __call__(self, target, source, env):
@ -375,23 +379,23 @@ class BuilderBase:
""" """
def __init__(self, action = None, def __init__(self, action = None,
prefix = '', prefix: str = '',
suffix = '', suffix: str = '',
src_suffix = '', src_suffix: str = '',
target_factory = None, target_factory = None,
source_factory = None, source_factory = None,
target_scanner = None, target_scanner = None,
source_scanner = None, source_scanner = None,
emitter = None, emitter = None,
multi = 0, multi: int = 0,
env = None, env = None,
single_source = 0, single_source: bool = False,
name = None, name = None,
chdir = _null, chdir = _null,
is_explicit = 1, is_explicit: bool = True,
src_builder = None, src_builder = None,
ensure_suffix = False, ensure_suffix: bool = False,
**overrides): **overrides) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.BuilderBase') if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.BuilderBase')
self._memo = {} self._memo = {}
self.action = action self.action = action
@ -439,7 +443,7 @@ class BuilderBase:
src_builder = [ src_builder ] src_builder = [ src_builder ]
self.src_builder = src_builder self.src_builder = src_builder
def __bool__(self): def __bool__(self) -> bool:
raise InternalError("Do not test for the Node.builder attribute directly; use Node.has_builder() instead") raise InternalError("Do not test for the Node.builder attribute directly; use Node.has_builder() instead")
def get_name(self, env): def get_name(self, env):
@ -471,7 +475,7 @@ class BuilderBase:
suffixes = [] suffixes = []
return match_splitext(path, suffixes) return match_splitext(path, suffixes)
def _adjustixes(self, files, pre, suf, ensure_suffix=False): def _adjustixes(self, files, pre, suf, ensure_suffix: bool=False):
if not files: if not files:
return [] return []
result = [] result = []
@ -479,6 +483,11 @@ class BuilderBase:
files = [files] files = [files]
for f in files: for f in files:
# fspath() is to catch PathLike paths. We avoid the simpler
# str(f) so as not to "lose" files that are already Nodes:
# TypeError: expected str, bytes or os.PathLike object, not File
with suppress(TypeError):
f = os.fspath(f)
if SCons.Util.is_String(f): if SCons.Util.is_String(f):
f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix) f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix)
result.append(f) result.append(f)
@ -582,7 +591,7 @@ class BuilderBase:
# build this particular list of targets from this particular list of # build this particular list of targets from this particular list of
# sources. # sources.
executor = None executor: Optional[ExecutorType] = None
key = None key = None
if self.multi: if self.multi:
@ -673,7 +682,7 @@ class BuilderBase:
prefix = prefix(env, sources) prefix = prefix(env, sources)
return env.subst(prefix) return env.subst(prefix)
def set_suffix(self, suffix): def set_suffix(self, suffix) -> None:
if not callable(suffix): if not callable(suffix):
suffix = self.adjust_suffix(suffix) suffix = self.adjust_suffix(suffix)
self.suffix = suffix self.suffix = suffix
@ -684,7 +693,7 @@ class BuilderBase:
suffix = suffix(env, sources) suffix = suffix(env, sources)
return env.subst(suffix) return env.subst(suffix)
def set_src_suffix(self, src_suffix): def set_src_suffix(self, src_suffix) -> None:
if not src_suffix: if not src_suffix:
src_suffix = [] src_suffix = []
elif not SCons.Util.is_List(src_suffix): elif not SCons.Util.is_List(src_suffix):
@ -698,7 +707,7 @@ class BuilderBase:
return '' return ''
return ret[0] return ret[0]
def add_emitter(self, suffix, emitter): def add_emitter(self, suffix, emitter) -> None:
"""Add a suffix-emitter mapping to this Builder. """Add a suffix-emitter mapping to this Builder.
This assumes that emitter has been initialized with an This assumes that emitter has been initialized with an
@ -708,7 +717,7 @@ class BuilderBase:
""" """
self.emitter[suffix] = emitter self.emitter[suffix] = emitter
def add_src_builder(self, builder): def add_src_builder(self, builder) -> None:
""" """
Add a new Builder to the list of src_builders. Add a new Builder to the list of src_builders.
@ -875,7 +884,7 @@ class CompositeBuilder(SCons.Util.Proxy):
to the DictCmdGenerator's add_action() method. to the DictCmdGenerator's add_action() method.
""" """
def __init__(self, builder, cmdgen): def __init__(self, builder, cmdgen) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.CompositeBuilder') if SCons.Debug.track_instances: logInstanceCreation(self, 'Builder.CompositeBuilder')
super().__init__(builder) super().__init__(builder)
@ -885,11 +894,11 @@ class CompositeBuilder(SCons.Util.Proxy):
__call__ = SCons.Util.Delegate('__call__') __call__ = SCons.Util.Delegate('__call__')
def add_action(self, suffix, action): def add_action(self, suffix, action) -> None:
self.cmdgen.add_action(suffix, action) self.cmdgen.add_action(suffix, action)
self.set_src_suffix(self.cmdgen.src_suffixes()) self.set_src_suffix(self.cmdgen.src_suffixes())
def is_a_Builder(obj): def is_a_Builder(obj) -> bool:
""""Returns True if the specified obj is one of our Builder classes. """"Returns True if the specified obj is one of our Builder classes.
The test is complicated a bit by the fact that CompositeBuilder The test is complicated a bit by the fact that CompositeBuilder

View file

@ -34,7 +34,7 @@ import uuid
import SCons.Action import SCons.Action
import SCons.Errors import SCons.Errors
import SCons.Warnings import SCons.Warnings
import SCons import SCons.Util
cache_enabled = True cache_enabled = True
cache_debug = False cache_debug = False
@ -43,7 +43,7 @@ cache_show = False
cache_readonly = False cache_readonly = False
cache_tmp_uuid = uuid.uuid4().hex cache_tmp_uuid = uuid.uuid4().hex
def CacheRetrieveFunc(target, source, env): def CacheRetrieveFunc(target, source, env) -> int:
t = target[0] t = target[0]
fs = t.fs fs = t.fs
cd = env.get_CacheDir() cd = env.get_CacheDir()
@ -67,7 +67,7 @@ def CacheRetrieveFunc(target, source, env):
fs.chmod(t.get_internal_path(), stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) fs.chmod(t.get_internal_path(), stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
return 0 return 0
def CacheRetrieveString(target, source, env): def CacheRetrieveString(target, source, env) -> None:
t = target[0] t = target[0]
fs = t.fs fs = t.fs
cd = env.get_CacheDir() cd = env.get_CacheDir()
@ -118,7 +118,7 @@ def CachePushFunc(target, source, env):
cd.copy_to_cache(env, t.get_internal_path(), tempfile) cd.copy_to_cache(env, t.get_internal_path(), tempfile)
fs.rename(tempfile, cachefile) fs.rename(tempfile, cachefile)
except EnvironmentError: except OSError:
# It's possible someone else tried writing the file at the # It's possible someone else tried writing the file at the
# same time we did, or else that there was some problem like # same time we did, or else that there was some problem like
# the CacheDir being on a separate file system that's full. # the CacheDir being on a separate file system that's full.
@ -133,7 +133,7 @@ CachePush = SCons.Action.Action(CachePushFunc, None)
class CacheDir: class CacheDir:
def __init__(self, path): def __init__(self, path) -> None:
""" """
Initialize a CacheDir object. Initialize a CacheDir object.
@ -169,15 +169,16 @@ class CacheDir:
""" """
config_file = os.path.join(path, 'config') config_file = os.path.join(path, 'config')
try: try:
# still use a try block even with exist_ok, might have other fails
os.makedirs(path, exist_ok=True) os.makedirs(path, exist_ok=True)
except FileExistsError:
pass
except OSError: except OSError:
msg = "Failed to create cache directory " + path msg = "Failed to create cache directory " + path
raise SCons.Errors.SConsEnvironmentError(msg) raise SCons.Errors.SConsEnvironmentError(msg)
try: try:
with open(config_file, 'x') as config: with SCons.Util.FileLock(config_file, timeout=5, writer=True), open(
config_file, "x"
) as config:
self.config['prefix_len'] = 2 self.config['prefix_len'] = 2
try: try:
json.dump(self.config, config) json.dump(self.config, config)
@ -186,18 +187,20 @@ class CacheDir:
raise SCons.Errors.SConsEnvironmentError(msg) raise SCons.Errors.SConsEnvironmentError(msg)
except FileExistsError: except FileExistsError:
try: try:
with open(config_file) as config: with SCons.Util.FileLock(config_file, timeout=5, writer=False), open(
config_file
) as config:
self.config = json.load(config) self.config = json.load(config)
except ValueError: except (ValueError, json.decoder.JSONDecodeError):
msg = "Failed to read cache configuration for " + path msg = "Failed to read cache configuration for " + path
raise SCons.Errors.SConsEnvironmentError(msg) raise SCons.Errors.SConsEnvironmentError(msg)
def CacheDebug(self, fmt, target, cachefile): def CacheDebug(self, fmt, target, cachefile) -> None:
if cache_debug != self.current_cache_debug: if cache_debug != self.current_cache_debug:
if cache_debug == '-': if cache_debug == '-':
self.debugFP = sys.stdout self.debugFP = sys.stdout
elif cache_debug: elif cache_debug:
def debug_cleanup(debugFP): def debug_cleanup(debugFP) -> None:
debugFP.close() debugFP.close()
self.debugFP = open(cache_debug, 'w') self.debugFP = open(cache_debug, 'w')
@ -233,7 +236,7 @@ class CacheDir:
os.chmod(dst, st | stat.S_IWRITE) os.chmod(dst, st | stat.S_IWRITE)
return result return result
except AttributeError as ex: except AttributeError as ex:
raise EnvironmentError from ex raise OSError from ex
@property @property
def hit_ratio(self) -> float: def hit_ratio(self) -> float:
@ -270,8 +273,11 @@ class CacheDir:
cachedir = os.path.join(self.path, subdir) cachedir = os.path.join(self.path, subdir)
return cachedir, os.path.join(cachedir, sig) return cachedir, os.path.join(cachedir, sig)
def retrieve(self, node): def retrieve(self, node) -> bool:
""" """Retrieve a node from cache.
Returns True if a successful retrieval resulted.
This method is called from multiple threads in a parallel build, This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in so only do thread safe stuff here. Do thread unsafe stuff in
built(). built().

View file

@ -215,7 +215,7 @@ int main(void)
_YesNoResult(context, ret, None, text) _YesNoResult(context, ret, None, text)
return ret return ret
def _check_empty_program(context, comp, text, language, use_shared = False): def _check_empty_program(context, comp, text, language, use_shared: bool = False):
"""Return 0 on success, 1 otherwise.""" """Return 0 on success, 1 otherwise."""
if comp not in context.env or not context.env[comp]: if comp not in context.env or not context.env[comp]:
# The compiler construction variable is not set or empty # The compiler construction variable is not set or empty
@ -231,17 +231,22 @@ def _check_empty_program(context, comp, text, language, use_shared = False):
return context.CompileProg(text, suffix) return context.CompileProg(text, suffix)
def CheckFunc(context, function_name, header = None, language = None): def CheckFunc(context, function_name, header = None, language = None, funcargs = None):
""" """
Configure check for a function "function_name". Configure check for a function "function_name".
"language" should be "C" or "C++" and is used to select the compiler. "language" should be "C" or "C++" and is used to select the compiler.
Default is "C". Default is "C".
Optional "header" can be defined to define a function prototype, include a Optional "header" can be defined to define a function prototype, include a
header file or anything else that comes before main(). header file or anything else that comes before main().
Optional "funcargs" can be defined to define an argument list for the
generated function invocation.
Sets HAVE_function_name in context.havedict according to the result. Sets HAVE_function_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure. Returns an empty string for success, an error message for failure.
.. versionchanged:: 4.7.0
The ``funcargs`` parameter was added.
""" """
# Remarks from autoconf: # Remarks from autoconf:
@ -274,6 +279,9 @@ char %s(void);""" % function_name
context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) context.Display("Cannot check for %s(): %s\n" % (function_name, msg))
return msg return msg
if not funcargs:
funcargs = ''
text = """ text = """
%(include)s %(include)s
#include <assert.h> #include <assert.h>
@ -287,14 +295,15 @@ int main(void) {
#if defined (__stub_%(name)s) || defined (__stub___%(name)s) #if defined (__stub_%(name)s) || defined (__stub___%(name)s)
#error "%(name)s has a GNU stub, cannot check" #error "%(name)s has a GNU stub, cannot check"
#else #else
%(name)s(); %(name)s(%(args)s);
#endif #endif
return 0; return 0;
} }
""" % { 'name': function_name, """ % { 'name': function_name,
'include': includetext, 'include': includetext,
'hdr': header } 'hdr': header,
'args': funcargs}
context.Display("Checking for %s function %s()... " % (lang, function_name)) context.Display("Checking for %s function %s()... " % (lang, function_name))
ret = context.BuildProg(text, suffix) ret = context.BuildProg(text, suffix)
@ -626,8 +635,8 @@ int main(void) {
return ret return ret
def CheckLib(context, libs, func_name = None, header = None, def CheckLib(context, libs, func_name = None, header = None,
extra_libs = None, call = None, language = None, autoadd = 1, extra_libs = None, call = None, language = None, autoadd: int = 1,
append=True, unique=False): append: bool=True, unique: bool=False):
""" """
Configure check for a C or C++ libraries "libs". Searches through Configure check for a C or C++ libraries "libs". Searches through
the list of libraries, until one is found where the test succeeds. the list of libraries, until one is found where the test succeeds.
@ -676,8 +685,7 @@ char %s();
# if no function to test, leave main() blank # if no function to test, leave main() blank
text = text + """ text = text + """
int int main(void) {
main() {
%s %s
return 0; return 0;
} }
@ -753,7 +761,7 @@ def CheckProg(context, prog_name):
# END OF PUBLIC FUNCTIONS # END OF PUBLIC FUNCTIONS
# #
def _YesNoResult(context, ret, key, text, comment = None): def _YesNoResult(context, ret, key, text, comment = None) -> None:
r""" r"""
Handle the result of a test with a "yes" or "no" result. Handle the result of a test with a "yes" or "no" result.
@ -772,7 +780,7 @@ def _YesNoResult(context, ret, key, text, comment = None):
context.Display("yes\n") context.Display("yes\n")
def _Have(context, key, have, comment = None): def _Have(context, key, have, comment = None) -> None:
r""" r"""
Store result of a test in context.havedict and context.headerfilename. Store result of a test in context.havedict and context.headerfilename.
@ -815,7 +823,7 @@ def _Have(context, key, have, comment = None):
context.config_h = context.config_h + lines context.config_h = context.config_h + lines
def _LogFailed(context, text, msg): def _LogFailed(context, text, msg) -> None:
""" """
Write to the log about a failed program. Write to the log about a failed program.
Add line numbers, so that error messages can be understood. Add line numbers, so that error messages can be understood.

View file

@ -41,8 +41,11 @@ import inspect
track_instances = False track_instances = False
# List of currently tracked classes # List of currently tracked classes
tracked_classes = {} tracked_classes = {}
# Global variable that gets set to 'True' by the Main script
# when SConscript call tracing should be enabled.
sconscript_trace = False
def logInstanceCreation(instance, name=None): def logInstanceCreation(instance, name=None) -> None:
if name is None: if name is None:
name = instance.__class__.__name__ name = instance.__class__.__name__
if name not in tracked_classes: if name not in tracked_classes:
@ -60,15 +63,15 @@ def string_to_classes(s):
else: else:
return s.split() return s.split()
def fetchLoggedInstances(classes="*"): def fetchLoggedInstances(classes: str="*"):
classnames = string_to_classes(classes) classnames = string_to_classes(classes)
return [(cn, len(tracked_classes[cn])) for cn in classnames] return [(cn, len(tracked_classes[cn])) for cn in classnames]
def countLoggedInstances(classes, file=sys.stdout): def countLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes): for classname in string_to_classes(classes):
file.write("%s: %d\n" % (classname, len(tracked_classes[classname]))) file.write("%s: %d\n" % (classname, len(tracked_classes[classname])))
def listLoggedInstances(classes, file=sys.stdout): def listLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes): for classname in string_to_classes(classes):
file.write('\n%s:\n' % classname) file.write('\n%s:\n' % classname)
for ref in tracked_classes[classname]: for ref in tracked_classes[classname]:
@ -79,7 +82,7 @@ def listLoggedInstances(classes, file=sys.stdout):
if obj is not None: if obj is not None:
file.write(' %s\n' % repr(obj)) file.write(' %s\n' % repr(obj))
def dumpLoggedInstances(classes, file=sys.stdout): def dumpLoggedInstances(classes, file=sys.stdout) -> None:
for classname in string_to_classes(classes): for classname in string_to_classes(classes):
file.write('\n%s:\n' % classname) file.write('\n%s:\n' % classname)
for ref in tracked_classes[classname]: for ref in tracked_classes[classname]:
@ -92,14 +95,14 @@ def dumpLoggedInstances(classes, file=sys.stdout):
if sys.platform[:5] == "linux": if sys.platform[:5] == "linux":
# Linux doesn't actually support memory usage stats from getrusage(). # Linux doesn't actually support memory usage stats from getrusage().
def memory(): def memory() -> int:
with open('/proc/self/stat') as f: with open('/proc/self/stat') as f:
mstr = f.read() mstr = f.read()
mstr = mstr.split()[22] mstr = mstr.split()[22]
return int(mstr) return int(mstr)
elif sys.platform[:6] == 'darwin': elif sys.platform[:6] == 'darwin':
#TODO really get memory stats for OS X #TODO really get memory stats for OS X
def memory(): def memory() -> int:
return 0 return 0
elif sys.platform == 'win32': elif sys.platform == 'win32':
from SCons.compat.win32 import get_peak_memory_usage from SCons.compat.win32 import get_peak_memory_usage
@ -108,10 +111,10 @@ else:
try: try:
import resource import resource
except ImportError: except ImportError:
def memory(): def memory() -> int:
return 0 return 0
else: else:
def memory(): def memory() -> int:
res = resource.getrusage(resource.RUSAGE_SELF) res = resource.getrusage(resource.RUSAGE_SELF)
return res[4] return res[4]
@ -132,7 +135,7 @@ def caller_stack():
caller_bases = {} caller_bases = {}
caller_dicts = {} caller_dicts = {}
def caller_trace(back=0): def caller_trace(back: int=0) -> None:
""" """
Trace caller stack and save info into global dicts, which Trace caller stack and save info into global dicts, which
are printed automatically at the end of SCons execution. are printed automatically at the end of SCons execution.
@ -153,7 +156,7 @@ def caller_trace(back=0):
callee = caller callee = caller
# print a single caller and its callers, if any # print a single caller and its callers, if any
def _dump_one_caller(key, file, level=0): def _dump_one_caller(key, file, level: int=0) -> None:
leader = ' '*level leader = ' '*level
for v,c in sorted([(-v,c) for c,v in caller_dicts[key].items()]): for v,c in sorted([(-v,c) for c,v in caller_dicts[key].items()]):
file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:]))) file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:])))
@ -161,7 +164,7 @@ def _dump_one_caller(key, file, level=0):
_dump_one_caller(c, file, level+1) _dump_one_caller(c, file, level+1)
# print each call tree # print each call tree
def dump_caller_counts(file=sys.stdout): def dump_caller_counts(file=sys.stdout) -> None:
for k in sorted(caller_bases.keys()): for k in sorted(caller_bases.keys()):
file.write("Callers of %s:%d(%s), %d calls:\n" file.write("Callers of %s:%d(%s), %d calls:\n"
% (func_shorten(k) + (caller_bases[k],))) % (func_shorten(k) + (caller_bases[k],)))
@ -196,7 +199,7 @@ TimeStampDefault = False
StartTime = time.perf_counter() StartTime = time.perf_counter()
PreviousTime = StartTime PreviousTime = StartTime
def Trace(msg, tracefile=None, mode='w', tstamp=False): def Trace(msg, tracefile=None, mode: str='w', tstamp: bool=False) -> None:
"""Write a trace message. """Write a trace message.
Write messages when debugging which do not interfere with stdout. Write messages when debugging which do not interfere with stdout.
@ -217,7 +220,7 @@ def Trace(msg, tracefile=None, mode='w', tstamp=False):
global TimeStampDefault global TimeStampDefault
global PreviousTime global PreviousTime
def trace_cleanup(traceFP): def trace_cleanup(traceFP) -> None:
traceFP.close() traceFP.close()
if tracefile is None: if tracefile is None:

View file

@ -36,7 +36,7 @@ import shutil
import stat import stat
import sys import sys
import time import time
from typing import List from typing import List, Callable
import SCons.Action import SCons.Action
import SCons.Builder import SCons.Builder
@ -58,31 +58,28 @@ _default_env = None
# Lazily instantiate the default environment so the overhead of creating # Lazily instantiate the default environment so the overhead of creating
# it doesn't apply when it's not needed. # it doesn't apply when it's not needed.
def _fetch_DefaultEnvironment(*args, **kw): def _fetch_DefaultEnvironment(*args, **kwargs):
"""Returns the already-created default construction environment.""" """Returns the already-created default construction environment."""
global _default_env
return _default_env return _default_env
def DefaultEnvironment(*args, **kw): def DefaultEnvironment(*args, **kwargs):
""" """Construct the global ("default") construction environment.
Initial public entry point for creating the default construction
Environment.
After creating the environment, we overwrite our name The environment is provisioned with the values from *kwargs*.
(DefaultEnvironment) with the _fetch_DefaultEnvironment() function,
which more efficiently returns the initialized default construction
environment without checking for its existence.
(This function still exists with its _default_check because someone After the environment is created, this function is replaced with
else (*cough* Script/__init__.py *cough*) may keep a reference a reference to :func:`_fetch_DefaultEnvironment` which efficiently
to this function. So we can't use the fully functional idiom of returns the initialized default construction environment without
having the name originally be a something that *only* creates the checking for its existence.
construction environment and then overwrites the name.)
Historically, some parts of the code held references to this function.
Thus it still has the existence check for :data:`_default_env` rather
than just blindly creating the environment and overwriting itself.
""" """
global _default_env global _default_env
if not _default_env: if not _default_env:
_default_env = SCons.Environment.Environment(*args, **kw) _default_env = SCons.Environment.Environment(*args, **kwargs)
_default_env.Decider('content') _default_env.Decider('content')
global DefaultEnvironment global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment DefaultEnvironment = _fetch_DefaultEnvironment
@ -95,7 +92,7 @@ def DefaultEnvironment(*args, **kw):
# going into a shared library are, in fact, shared. # going into a shared library are, in fact, shared.
def StaticObjectEmitter(target, source, env): def StaticObjectEmitter(target, source, env):
for tgt in target: for tgt in target:
tgt.attributes.shared = None tgt.attributes.shared = False
return target, source return target, source
@ -112,7 +109,7 @@ def SharedFlagChecker(source, target, env):
try: try:
shared = src.attributes.shared shared = src.attributes.shared
except AttributeError: except AttributeError:
shared = None shared = False
if not shared: if not shared:
raise SCons.Errors.UserError( raise SCons.Errors.UserError(
"Source file: %s is static and is not compatible with shared target: %s" % (src, target[0])) "Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]))
@ -164,7 +161,7 @@ def get_paths_str(dest) -> str:
If *dest* is a list, manually converts each elem to a string. If *dest* is a list, manually converts each elem to a string.
""" """
def quote(arg): def quote(arg) -> str:
return f'"{arg}"' return f'"{arg}"'
if is_List(dest): if is_List(dest):
@ -256,7 +253,7 @@ Chmod = ActionFactory(chmod_func, chmod_strfunc)
def copy_func(dest, src, symlinks=True) -> int: def copy_func(dest, src, symlinks: bool=True) -> int:
"""Implementation of the Copy action function. """Implementation of the Copy action function.
Copies *src* to *dest*. If *src* is a list, *dest* must be Copies *src* to *dest*. If *src* is a list, *dest* must be
@ -308,7 +305,7 @@ def copy_func(dest, src, symlinks=True) -> int:
return 0 return 0
def copy_strfunc(dest, src, symlinks=True) -> str: def copy_strfunc(dest, src, symlinks: bool=True) -> str:
"""strfunction for the Copy action function.""" """strfunction for the Copy action function."""
return f'Copy({get_paths_str(dest)}, {get_paths_str(src)})' return f'Copy({get_paths_str(dest)}, {get_paths_str(src)})'
@ -316,7 +313,7 @@ def copy_strfunc(dest, src, symlinks=True) -> str:
Copy = ActionFactory(copy_func, copy_strfunc) Copy = ActionFactory(copy_func, copy_strfunc)
def delete_func(dest, must_exist=False) -> None: def delete_func(dest, must_exist: bool=False) -> None:
"""Implementation of the Delete action function. """Implementation of the Delete action function.
Lets the Python :func:`os.unlink` raise an error if *dest* does not exist, Lets the Python :func:`os.unlink` raise an error if *dest* does not exist,
@ -338,7 +335,7 @@ def delete_func(dest, must_exist=False) -> None:
os.unlink(entry) os.unlink(entry)
def delete_strfunc(dest, must_exist=False) -> str: def delete_strfunc(dest, must_exist: bool=False) -> str:
"""strfunction for the Delete action function.""" """strfunction for the Delete action function."""
return f'Delete({get_paths_str(dest)})' return f'Delete({get_paths_str(dest)})'
@ -392,7 +389,7 @@ Touch = ActionFactory(touch_func, lambda file: f'Touch({get_paths_str(file)})')
# Internal utility functions # Internal utility functions
# pylint: disable-msg=too-many-arguments # pylint: disable-msg=too-many-arguments
def _concat(prefix, items_iter, suffix, env, f=lambda x: x, target=None, source=None, affect_signature=True): def _concat(prefix, items_iter, suffix, env, f=lambda x: x, target=None, source=None, affect_signature: bool=True):
""" """
Creates a new list from 'items_iter' by first interpolating each element Creates a new list from 'items_iter' by first interpolating each element
in the list using the 'env' dictionary and then calling f on the in the list using the 'env' dictionary and then calling f on the
@ -458,16 +455,35 @@ def _concat_ixes(prefix, items_iter, suffix, env):
return result return result
def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None): def _stripixes(
""" prefix: str,
This is a wrapper around _concat()/_concat_ixes() that checks for items,
the existence of prefixes or suffixes on list items and strips them suffix: str,
where it finds them. This is used by tools (like the GNU linker) stripprefixes: List[str],
that need to turn something like 'libfoo.a' into '-lfoo'. stripsuffixes: List[str],
""" env,
literal_prefix: str = "",
c: Callable[[list], list] = None,
) -> list:
"""Returns a list with text added to items after first stripping them.
if not itms: A companion to :func:`_concat_ixes`, used by tools (like the GNU
return itms linker) that need to turn something like ``libfoo.a`` into ``-lfoo``.
*stripprefixes* and *stripsuffixes* are stripped from *items*.
Calls function *c* to postprocess the result.
Args:
prefix: string to prepend to elements
items: string or iterable to transform
suffix: string to append to elements
stripprefixes: prefix string(s) to strip from elements
stripsuffixes: suffix string(s) to strip from elements
env: construction environment for variable interpolation
c: optional function to perform a transformation on the list.
The default is `None`, which will select :func:`_concat_ixes`.
"""
if not items:
return items
if not callable(c): if not callable(c):
env_c = env['_concat'] env_c = env['_concat']
@ -483,8 +499,16 @@ def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None):
stripprefixes = list(map(env.subst, flatten(stripprefixes))) stripprefixes = list(map(env.subst, flatten(stripprefixes)))
stripsuffixes = list(map(env.subst, flatten(stripsuffixes))) stripsuffixes = list(map(env.subst, flatten(stripsuffixes)))
# This is a little funky: if literal_prefix is the same as os.pathsep
# (e.g. both ':'), the normal conversion to a PathList will drop the
# literal_prefix prefix. Tell it not to split in that case, which *should*
# be okay because if we come through here, we're normally processing
# library names and won't have strings like "path:secondpath:thirdpath"
# which is why PathList() otherwise wants to split strings.
do_split = not literal_prefix == os.pathsep
stripped = [] stripped = []
for l in SCons.PathList.PathList(itms).subst_path(env, None, None): for l in SCons.PathList.PathList(items, do_split).subst_path(env, None, None):
if isinstance(l, SCons.Node.FS.File): if isinstance(l, SCons.Node.FS.File):
stripped.append(l) stripped.append(l)
continue continue
@ -492,6 +516,10 @@ def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None):
if not is_String(l): if not is_String(l):
l = str(l) l = str(l)
if literal_prefix and l.startswith(literal_prefix):
stripped.append(l)
continue
for stripprefix in stripprefixes: for stripprefix in stripprefixes:
lsp = len(stripprefix) lsp = len(stripprefix)
if l[:lsp] == stripprefix: if l[:lsp] == stripprefix:
@ -591,18 +619,16 @@ def _defines(prefix, defs, suffix, env, target=None, source=None, c=_concat_ixes
class NullCmdGenerator: class NullCmdGenerator:
"""This is a callable class that can be used in place of other """Callable class for use as a no-effect command generator.
command generators if you don't want them to do anything.
The __call__ method for this class simply returns the thing The ``__call__`` method for this class simply returns the thing
you instantiated it with. you instantiated it with. Example usage::
Example usage:
env["DO_NOTHING"] = NullCmdGenerator env["DO_NOTHING"] = NullCmdGenerator
env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}" env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
""" """
def __init__(self, cmd): def __init__(self, cmd) -> None:
self.cmd = cmd self.cmd = cmd
def __call__(self, target, source, env, for_signature=None): def __call__(self, target, source, env, for_signature=None):
@ -613,16 +639,18 @@ class Variable_Method_Caller:
"""A class for finding a construction variable on the stack and """A class for finding a construction variable on the stack and
calling one of its methods. calling one of its methods.
We use this to support "construction variables" in our string Used to support "construction variables" appearing in string
eval()s that actually stand in for methods--specifically, use ``eval``s that actually stand in for methods--specifically, the use
of "RDirs" in call to _concat that should actually execute the of "RDirs" in a call to :func:`_concat` that should actually execute the
"TARGET.RDirs" method. (We used to support this by creating a little ``TARGET.RDirs`` method.
"build dictionary" that mapped RDirs to the method, but this got in
the way of Memoizing construction environments, because we had to Historical note: This was formerly supported by creating a little
create new environment objects to hold the variables.) "build dictionary" that mapped RDirs to the method, but this got
in the way of Memoizing construction environments, because we had to
create new environment objects to hold the variables.
""" """
def __init__(self, variable, method): def __init__(self, variable, method) -> None:
self.variable = variable self.variable = variable
self.method = method self.method = method
@ -677,6 +705,9 @@ def __lib_either_version_flag(env, version_var1, version_var2, flags_var):
return None return None
ConstructionEnvironment = { ConstructionEnvironment = {
'BUILDERS': {}, 'BUILDERS': {},
'SCANNERS': [SCons.Tool.SourceFileScanner], 'SCANNERS': [SCons.Tool.SourceFileScanner],

View file

@ -36,6 +36,8 @@ import sys
import re import re
import shlex import shlex
from collections import UserDict, deque from collections import UserDict, deque
from subprocess import PIPE, DEVNULL
from typing import Optional
import SCons.Action import SCons.Action
import SCons.Builder import SCons.Builder
@ -74,6 +76,7 @@ from SCons.Util import (
to_String_for_subst, to_String_for_subst,
uniquer_hashables, uniquer_hashables,
) )
from SCons.Util.sctyping import ExecutorType
class _Null: class _Null:
pass pass
@ -87,7 +90,7 @@ _warn_target_signatures_deprecated = True
CleanTargets = {} CleanTargets = {}
CalculatorArgs = {} CalculatorArgs = {}
def alias_builder(env, target, source): def alias_builder(env, target, source) -> None:
pass pass
AliasBuilder = SCons.Builder.Builder( AliasBuilder = SCons.Builder.Builder(
@ -99,7 +102,7 @@ AliasBuilder = SCons.Builder.Builder(
name='AliasBuilder', name='AliasBuilder',
) )
def apply_tools(env, tools, toolpath): def apply_tools(env, tools, toolpath) -> None:
# Store the toolpath in the Environment. # Store the toolpath in the Environment.
# This is expected to work even if no tools are given, so do this first. # This is expected to work even if no tools are given, so do this first.
if toolpath is not None: if toolpath is not None:
@ -145,11 +148,11 @@ def copy_non_reserved_keywords(dict):
del result[k] del result[k]
return result return result
def _set_reserved(env, key, value): def _set_reserved(env, key, value) -> None:
msg = "Ignoring attempt to set reserved variable `$%s'" msg = "Ignoring attempt to set reserved variable `$%s'"
SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key) SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key)
def _set_future_reserved(env, key, value): def _set_future_reserved(env, key, value) -> None:
env._dict[key] = value env._dict[key] = value
msg = "`$%s' will be reserved in a future release and setting it will become ignored" msg = "`$%s' will be reserved in a future release and setting it will become ignored"
SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key) SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key)
@ -167,11 +170,11 @@ def _set_BUILDERS(env, key, value):
raise UserError('%s is not a Builder.' % repr(v)) raise UserError('%s is not a Builder.' % repr(v))
bd.update(value) bd.update(value)
def _del_SCANNERS(env, key): def _del_SCANNERS(env, key) -> None:
del env._dict[key] del env._dict[key]
env.scanner_map_delete() env.scanner_map_delete()
def _set_SCANNERS(env, key, value): def _set_SCANNERS(env, key, value) -> None:
env._dict[key] = value env._dict[key] = value
env.scanner_map_delete() env.scanner_map_delete()
@ -305,7 +308,7 @@ def _add_cppdefines(
elif is_Tuple(defines): elif is_Tuple(defines):
if len(defines) > 2: if len(defines) > 2:
raise SCons.Errors.UserError( raise SCons.Errors.UserError(
f"Invalid tuple in CPPDEFINES: {define!r}, must be a two-tuple" f"Invalid tuple in CPPDEFINES: {defines!r}, must be a two-tuple"
) )
env_dict[key] = deque([defines]) env_dict[key] = deque([defines])
elif is_List(defines): elif is_List(defines):
@ -438,10 +441,10 @@ class BuilderWrapper(MethodWrapper):
source = [source] source = [source]
return super().__call__(target, source, *args, **kw) return super().__call__(target, source, *args, **kw)
def __repr__(self): def __repr__(self) -> str:
return '<BuilderWrapper %s>' % repr(self.name) return '<BuilderWrapper %s>' % repr(self.name)
def __str__(self): def __str__(self) -> str:
return self.__repr__() return self.__repr__()
def __getattr__(self, name): def __getattr__(self, name):
@ -452,7 +455,7 @@ class BuilderWrapper(MethodWrapper):
else: else:
raise AttributeError(name) raise AttributeError(name)
def __setattr__(self, name, value): def __setattr__(self, name, value) -> None:
if name == 'env': if name == 'env':
self.object = value self.object = value
elif name == 'builder': elif name == 'builder':
@ -476,7 +479,7 @@ class BuilderDict(UserDict):
the Builders. We need to do this because every time someone changes the Builders. We need to do this because every time someone changes
the Builders in the Environment's BUILDERS dictionary, we must the Builders in the Environment's BUILDERS dictionary, we must
update the Environment's attributes.""" update the Environment's attributes."""
def __init__(self, mapping, env): def __init__(self, mapping, env) -> None:
# Set self.env before calling the superclass initialization, # Set self.env before calling the superclass initialization,
# because it will end up calling our other methods, which will # because it will end up calling our other methods, which will
# need to point the values in this dictionary to self.env. # need to point the values in this dictionary to self.env.
@ -488,7 +491,7 @@ class BuilderDict(UserDict):
# just copying would modify the original builder # just copying would modify the original builder
raise TypeError( 'cannot semi_deepcopy a BuilderDict' ) raise TypeError( 'cannot semi_deepcopy a BuilderDict' )
def __setitem__(self, item, val): def __setitem__(self, item, val) -> None:
try: try:
method = getattr(self.env, item).method method = getattr(self.env, item).method
except AttributeError: except AttributeError:
@ -498,26 +501,22 @@ class BuilderDict(UserDict):
super().__setitem__(item, val) super().__setitem__(item, val)
BuilderWrapper(self.env, val, item) BuilderWrapper(self.env, val, item)
def __delitem__(self, item): def __delitem__(self, item) -> None:
super().__delitem__(item) super().__delitem__(item)
delattr(self.env, item) delattr(self.env, item)
def update(self, mapping): def update(self, mapping) -> None:
for i, v in mapping.items(): for i, v in mapping.items():
self.__setitem__(i, v) self.__setitem__(i, v)
_is_valid_var = re.compile(r'[_a-zA-Z]\w*$') _is_valid_var = re.compile(r'[_a-zA-Z]\w*$')
def is_valid_construction_var(varstr): def is_valid_construction_var(varstr) -> bool:
"""Return if the specified string is a legitimate construction """Return True if *varstr* is a legitimate construction variable."""
variable.
"""
return _is_valid_var.match(varstr) return _is_valid_var.match(varstr)
class SubstitutionEnvironment: class SubstitutionEnvironment:
"""Base class for different flavors of construction environments. """Base class for different flavors of construction environments.
@ -544,7 +543,7 @@ class SubstitutionEnvironment:
class actually becomes useful.) class actually becomes useful.)
""" """
def __init__(self, **kw): def __init__(self, **kw) -> None:
"""Initialization of an underlying SubstitutionEnvironment class. """Initialization of an underlying SubstitutionEnvironment class.
""" """
if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.SubstitutionEnvironment') if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.SubstitutionEnvironment')
@ -556,7 +555,7 @@ class SubstitutionEnvironment:
self.added_methods = [] self.added_methods = []
#self._memo = {} #self._memo = {}
def _init_special(self): def _init_special(self) -> None:
"""Initial the dispatch tables for special handling of """Initial the dispatch tables for special handling of
special construction variables.""" special construction variables."""
self._special_del = {} self._special_del = {}
@ -577,7 +576,7 @@ class SubstitutionEnvironment:
def __eq__(self, other): def __eq__(self, other):
return self._dict == other._dict return self._dict == other._dict
def __delitem__(self, key): def __delitem__(self, key) -> None:
special = self._special_del.get(key) special = self._special_del.get(key)
if special: if special:
special(self, key) special(self, key)
@ -614,7 +613,7 @@ class SubstitutionEnvironment:
"""Emulates the get() method of dictionaries.""" """Emulates the get() method of dictionaries."""
return self._dict.get(key, default) return self._dict.get(key, default)
def __contains__(self, key): def __contains__(self, key) -> bool:
return key in self._dict return key in self._dict
def keys(self): def keys(self):
@ -634,6 +633,17 @@ class SubstitutionEnvironment:
return self._dict.setdefault(key, default) return self._dict.setdefault(key, default)
def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw): def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw):
"""Converts *args* to a list of nodes.
Arguments:
args - filename strings or nodes to convert; nodes are just
added to the list without further processing.
node_factory - optional factory to create the nodes; if not
specified, will use this environment's ``fs.File method.
lookup_list - optional list of lookup functions to call to
attempt to find the file referenced by each *args*.
kw - keyword arguments that represent additional nodes to add.
"""
if node_factory is _null: if node_factory is _null:
node_factory = self.fs.File node_factory = self.fs.File
if lookup_list is _null: if lookup_list is _null:
@ -682,7 +692,7 @@ class SubstitutionEnvironment:
def lvars(self): def lvars(self):
return {} return {}
def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None, overrides=False): def subst(self, string, raw: int=0, target=None, source=None, conv=None, executor: Optional[ExecutorType] = None, overrides: Optional[dict] = None):
"""Recursively interpolates construction variables from the """Recursively interpolates construction variables from the
Environment into the specified string, returning the expanded Environment into the specified string, returning the expanded
result. Construction variables are specified by a $ prefix result. Construction variables are specified by a $ prefix
@ -699,7 +709,7 @@ class SubstitutionEnvironment:
lvars.update(executor.get_lvars()) lvars.update(executor.get_lvars())
return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides) return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides)
def subst_kw(self, kw, raw=0, target=None, source=None): def subst_kw(self, kw, raw: int=0, target=None, source=None):
nkw = {} nkw = {}
for k, v in kw.items(): for k, v in kw.items():
k = self.subst(k, raw, target, source) k = self.subst(k, raw, target, source)
@ -708,9 +718,11 @@ class SubstitutionEnvironment:
nkw[k] = v nkw[k] = v
return nkw return nkw
def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None, overrides=False): def subst_list(self, string, raw: int=0, target=None, source=None, conv=None, executor: Optional[ExecutorType] = None, overrides: Optional[dict] = None):
"""Calls through to SCons.Subst.scons_subst_list(). See """Calls through to SCons.Subst.scons_subst_list().
the documentation for that function."""
See the documentation for that function.
"""
gvars = self.gvars() gvars = self.gvars()
lvars = self.lvars() lvars = self.lvars()
lvars['__env__'] = self lvars['__env__'] = self
@ -719,9 +731,10 @@ class SubstitutionEnvironment:
return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides) return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv, overrides=overrides)
def subst_path(self, path, target=None, source=None): def subst_path(self, path, target=None, source=None):
"""Substitute a path list, turning EntryProxies into Nodes """Substitute a path list.
and leaving Nodes (and other objects) as-is."""
Turns EntryProxies into Nodes, leaving Nodes (and other objects) as-is.
"""
if not is_List(path): if not is_List(path):
path = [path] path = [path]
@ -774,14 +787,11 @@ class SubstitutionEnvironment:
Raises: Raises:
OSError: if the external command returned non-zero exit status. OSError: if the external command returned non-zero exit status.
""" """
import subprocess
# common arguments # common arguments
kw = { kw = {
"stdin": "devnull", "stdin": DEVNULL,
"stdout": subprocess.PIPE, "stdout": PIPE,
"stderr": subprocess.PIPE, "stderr": PIPE,
"universal_newlines": True, "universal_newlines": True,
} }
# if the command is a list, assume it's been quoted # if the command is a list, assume it's been quoted
@ -789,17 +799,15 @@ class SubstitutionEnvironment:
if not is_List(command): if not is_List(command):
kw["shell"] = True kw["shell"] = True
# run constructed command # run constructed command
p = SCons.Action._subproc(self, command, **kw) cp = SCons.Action.scons_subproc_run(self, command, **kw)
out, err = p.communicate() if cp.stderr:
status = p.wait() sys.stderr.write(cp.stderr)
if err: if cp.returncode:
sys.stderr.write("" + err) raise OSError(f'{command!r} exited {cp.returncode}')
if status: return cp.stdout
raise OSError("'%s' exited %d" % (command, status))
return out
def AddMethod(self, function, name=None): def AddMethod(self, function, name=None) -> None:
""" """
Adds the specified function as a method of this construction Adds the specified function as a method of this construction
environment with the specified name. If the name is omitted, environment with the specified name. If the name is omitted,
@ -808,7 +816,7 @@ class SubstitutionEnvironment:
method = MethodWrapper(self, function, name) method = MethodWrapper(self, function, name)
self.added_methods.append(method) self.added_methods.append(method)
def RemoveMethod(self, function): def RemoveMethod(self, function) -> None:
""" """
Removes the specified function's MethodWrapper from the Removes the specified function's MethodWrapper from the
added_methods list, so we don't re-bind it when making a clone. added_methods list, so we don't re-bind it when making a clone.
@ -873,7 +881,7 @@ class SubstitutionEnvironment:
'RPATH' : [], 'RPATH' : [],
} }
def do_parse(arg): def do_parse(arg) -> None:
# if arg is a sequence, recurse with each element # if arg is a sequence, recurse with each element
if not arg: if not arg:
return return
@ -887,7 +895,7 @@ class SubstitutionEnvironment:
arg = self.backtick(arg[1:]) arg = self.backtick(arg[1:])
# utility function to deal with -D option # utility function to deal with -D option
def append_define(name, mapping=mapping): def append_define(name, mapping=mapping) -> None:
t = name.split('=') t = name.split('=')
if len(t) == 1: if len(t) == 1:
mapping['CPPDEFINES'].append(name) mapping['CPPDEFINES'].append(name)
@ -1035,7 +1043,7 @@ class SubstitutionEnvironment:
do_parse(arg) do_parse(arg)
return mapping return mapping
def MergeFlags(self, args, unique=True) -> None: def MergeFlags(self, args, unique: bool=True) -> None:
"""Merge flags into construction variables. """Merge flags into construction variables.
Merges the flags from *args* into this construction environent. Merges the flags from *args* into this construction environent.
@ -1166,7 +1174,7 @@ class Base(SubstitutionEnvironment):
variables=None, variables=None,
parse_flags=None, parse_flags=None,
**kw **kw
): ) -> None:
"""Initialization of a basic SCons construction environment. """Initialization of a basic SCons construction environment.
Sets up special construction variables like BUILDER, Sets up special construction variables like BUILDER,
@ -1304,7 +1312,7 @@ class Base(SubstitutionEnvironment):
self._last_CacheDir = cd self._last_CacheDir = cd
return cd return cd
def get_factory(self, factory, default='File'): def get_factory(self, factory, default: str='File'):
"""Return a factory function for creating Nodes for this """Return a factory function for creating Nodes for this
construction environment. construction environment.
""" """
@ -1373,7 +1381,7 @@ class Base(SubstitutionEnvironment):
skey = skey.lower() skey = skey.lower()
return self._gsm().get(skey) return self._gsm().get(skey)
def scanner_map_delete(self, kw=None): def scanner_map_delete(self, kw=None) -> None:
"""Delete the cached scanner map (if we need to). """Delete the cached scanner map (if we need to).
""" """
try: try:
@ -1381,14 +1389,14 @@ class Base(SubstitutionEnvironment):
except KeyError: except KeyError:
pass pass
def _update(self, other): def _update(self, other) -> None:
"""Private method to update an environment's consvar dict directly. """Private method to update an environment's consvar dict directly.
Bypasses the normal checks that occur when users try to set items. Bypasses the normal checks that occur when users try to set items.
""" """
self._dict.update(other) self._dict.update(other)
def _update_onlynew(self, other): def _update_onlynew(self, other) -> None:
"""Private method to add new items to an environment's consvar dict. """Private method to add new items to an environment's consvar dict.
Only adds items from `other` whose keys do not already appear in Only adds items from `other` whose keys do not already appear in
@ -1399,23 +1407,6 @@ class Base(SubstitutionEnvironment):
if k not in self._dict: if k not in self._dict:
self._dict[k] = v self._dict[k] = v
def get_src_sig_type(self):
try:
return self.src_sig_type
except AttributeError:
t = SCons.Defaults.DefaultEnvironment().src_sig_type
self.src_sig_type = t
return t
def get_tgt_sig_type(self):
try:
return self.tgt_sig_type
except AttributeError:
t = SCons.Defaults.DefaultEnvironment().tgt_sig_type
self.tgt_sig_type = t
return t
####################################################################### #######################################################################
# Public methods for manipulating an Environment. These begin with # Public methods for manipulating an Environment. These begin with
# upper-case letters. The essential characteristic of methods in # upper-case letters. The essential characteristic of methods in
@ -1425,7 +1416,7 @@ class Base(SubstitutionEnvironment):
# an Environment's construction variables. # an Environment's construction variables.
####################################################################### #######################################################################
def Append(self, **kw): def Append(self, **kw) -> None:
"""Append values to construction variables in an Environment. """Append values to construction variables in an Environment.
The variable is created if it is not already present. The variable is created if it is not already present.
@ -1505,8 +1496,8 @@ class Base(SubstitutionEnvironment):
path = str(self.fs.Dir(path)) path = str(self.fs.Dir(path))
return path return path
def AppendENVPath(self, name, newpath, envname='ENV', def AppendENVPath(self, name, newpath, envname: str='ENV',
sep=os.pathsep, delete_existing=False): sep=os.pathsep, delete_existing: bool=False) -> None:
"""Append path elements to the path *name* in the *envname* """Append path elements to the path *name* in the *envname*
dictionary for this environment. Will only add any particular dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help path once, and will normpath and normcase all paths to help
@ -1528,7 +1519,7 @@ class Base(SubstitutionEnvironment):
self._dict[envname][name] = nv self._dict[envname][name] = nv
def AppendUnique(self, delete_existing=False, **kw): def AppendUnique(self, delete_existing: bool=False, **kw) -> None:
"""Append values to existing construction variables """Append values to existing construction variables
in an Environment, if they're not already there. in an Environment, if they're not already there.
If delete_existing is True, removes existing values first, so If delete_existing is True, removes existing values first, so
@ -1619,29 +1610,21 @@ class Base(SubstitutionEnvironment):
if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.EnvironmentClone') if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.EnvironmentClone')
return clone return clone
def _changed_build(self, dependency, target, prev_ni, repo_node=None): def _changed_build(self, dependency, target, prev_ni, repo_node=None) -> bool:
if dependency.changed_state(target, prev_ni, repo_node): if dependency.changed_state(target, prev_ni, repo_node):
return 1 return True
return self.decide_source(dependency, target, prev_ni, repo_node) return self.decide_source(dependency, target, prev_ni, repo_node)
def _changed_content(self, dependency, target, prev_ni, repo_node=None): def _changed_content(self, dependency, target, prev_ni, repo_node=None) -> bool:
return dependency.changed_content(target, prev_ni, repo_node) return dependency.changed_content(target, prev_ni, repo_node)
def _changed_source(self, dependency, target, prev_ni, repo_node=None): def _changed_timestamp_then_content(self, dependency, target, prev_ni, repo_node=None) -> bool:
target_env = dependency.get_build_env()
type = target_env.get_tgt_sig_type()
if type == 'source':
return target_env.decide_source(dependency, target, prev_ni, repo_node)
else:
return target_env.decide_target(dependency, target, prev_ni, repo_node)
def _changed_timestamp_then_content(self, dependency, target, prev_ni, repo_node=None):
return dependency.changed_timestamp_then_content(target, prev_ni, repo_node) return dependency.changed_timestamp_then_content(target, prev_ni, repo_node)
def _changed_timestamp_newer(self, dependency, target, prev_ni, repo_node=None): def _changed_timestamp_newer(self, dependency, target, prev_ni, repo_node=None) -> bool:
return dependency.changed_timestamp_newer(target, prev_ni, repo_node) return dependency.changed_timestamp_newer(target, prev_ni, repo_node)
def _changed_timestamp_match(self, dependency, target, prev_ni, repo_node=None): def _changed_timestamp_match(self, dependency, target, prev_ni, repo_node=None) -> bool:
return dependency.changed_timestamp_match(target, prev_ni, repo_node) return dependency.changed_timestamp_match(target, prev_ni, repo_node)
def Decider(self, function): def Decider(self, function):
@ -1704,7 +1687,7 @@ class Base(SubstitutionEnvironment):
return dlist return dlist
def Dump(self, key=None, format='pretty'): def Dump(self, key=None, format: str='pretty'):
""" Return construction variables serialized to a string. """ Return construction variables serialized to a string.
Args: Args:
@ -1737,7 +1720,7 @@ class Base(SubstitutionEnvironment):
elif fmt == 'json': elif fmt == 'json':
import json import json
def non_serializable(obj): def non_serializable(obj):
return str(type(obj).__qualname__) return '<<non-serializable: %s>>' % type(obj).__qualname__
return json.dumps(cvars, indent=4, default=non_serializable) return json.dumps(cvars, indent=4, default=non_serializable)
else: else:
raise ValueError("Unsupported serialization format: %s." % fmt) raise ValueError("Unsupported serialization format: %s." % fmt)
@ -1764,7 +1747,7 @@ class Base(SubstitutionEnvironment):
return path return path
def ParseConfig(self, command, function=None, unique=True): def ParseConfig(self, command, function=None, unique: bool=True):
"""Parse the result of running a command to update construction vars. """Parse the result of running a command to update construction vars.
Use ``function`` to parse the output of running ``command`` Use ``function`` to parse the output of running ``command``
@ -1790,7 +1773,7 @@ class Base(SubstitutionEnvironment):
return function(self, self.backtick(command), unique) return function(self, self.backtick(command), unique)
def ParseDepends(self, filename, must_exist=None, only_one=False): def ParseDepends(self, filename, must_exist=None, only_one: bool=False):
""" """
Parse a mkdep-style file for explicit dependencies. This is Parse a mkdep-style file for explicit dependencies. This is
completely abusable, and should be unnecessary in the "normal" completely abusable, and should be unnecessary in the "normal"
@ -1802,9 +1785,9 @@ class Base(SubstitutionEnvironment):
""" """
filename = self.subst(filename) filename = self.subst(filename)
try: try:
with open(filename, 'r') as fp: with open(filename) as fp:
lines = LogicalLines(fp).readlines() lines = LogicalLines(fp).readlines()
except IOError: except OSError:
if must_exist: if must_exist:
raise raise
return return
@ -1834,7 +1817,7 @@ class Base(SubstitutionEnvironment):
platform = self.subst(platform) platform = self.subst(platform)
return SCons.Platform.Platform(platform)(self) return SCons.Platform.Platform(platform)(self)
def Prepend(self, **kw): def Prepend(self, **kw) -> None:
"""Prepend values to construction variables in an Environment. """Prepend values to construction variables in an Environment.
The variable is created if it is not already present. The variable is created if it is not already present.
@ -1902,8 +1885,8 @@ class Base(SubstitutionEnvironment):
self.scanner_map_delete(kw) self.scanner_map_delete(kw)
def PrependENVPath(self, name, newpath, envname='ENV', def PrependENVPath(self, name, newpath, envname: str='ENV',
sep=os.pathsep, delete_existing=True): sep=os.pathsep, delete_existing: bool=True) -> None:
"""Prepend path elements to the path *name* in the *envname* """Prepend path elements to the path *name* in the *envname*
dictionary for this environment. Will only add any particular dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help path once, and will normpath and normcase all paths to help
@ -1926,7 +1909,7 @@ class Base(SubstitutionEnvironment):
self._dict[envname][name] = nv self._dict[envname][name] = nv
def PrependUnique(self, delete_existing=False, **kw): def PrependUnique(self, delete_existing: bool=False, **kw) -> None:
"""Prepend values to existing construction variables """Prepend values to existing construction variables
in an Environment, if they're not already there. in an Environment, if they're not already there.
If delete_existing is True, removes existing values first, so If delete_existing is True, removes existing values first, so
@ -1969,7 +1952,7 @@ class Base(SubstitutionEnvironment):
self._dict[key] = val + dk self._dict[key] = val + dk
self.scanner_map_delete(kw) self.scanner_map_delete(kw)
def Replace(self, **kw): def Replace(self, **kw) -> None:
"""Replace existing construction variables in an Environment """Replace existing construction variables in an Environment
with new construction variables and/or values. with new construction variables and/or values.
""" """
@ -2009,7 +1992,7 @@ class Base(SubstitutionEnvironment):
name = name[:-len(old_suffix)] name = name[:-len(old_suffix)]
return os.path.join(dir, new_prefix+name+new_suffix) return os.path.join(dir, new_prefix+name+new_suffix)
def SetDefault(self, **kw): def SetDefault(self, **kw) -> None:
for k in list(kw.keys()): for k in list(kw.keys()):
if k in self._dict: if k in self._dict:
del kw[k] del kw[k]
@ -2159,7 +2142,7 @@ class Base(SubstitutionEnvironment):
nkw = self.subst_kw(kw) nkw = self.subst_kw(kw)
return SCons.Builder.Builder(**nkw) return SCons.Builder.Builder(**nkw)
def CacheDir(self, path, custom_class=None): def CacheDir(self, path, custom_class=None) -> None:
if path is not None: if path is not None:
path = self.subst(path) path = self.subst(path)
self._CacheDir_path = path self._CacheDir_path = path
@ -2174,7 +2157,7 @@ class Base(SubstitutionEnvironment):
# multiple threads, but initializing it before the task walk starts # multiple threads, but initializing it before the task walk starts
self.get_CacheDir() self.get_CacheDir()
def Clean(self, targets, files): def Clean(self, targets, files) -> None:
global CleanTargets global CleanTargets
tlist = self.arg2nodes(targets, self.fs.Entry) tlist = self.arg2nodes(targets, self.fs.Entry)
flist = self.arg2nodes(files, self.fs.Entry) flist = self.arg2nodes(files, self.fs.Entry)
@ -2341,7 +2324,7 @@ class Base(SubstitutionEnvironment):
else: else:
return result[0] return result[0]
def Glob(self, pattern, ondisk=True, source=False, strings=False, exclude=None): def Glob(self, pattern, ondisk: bool=True, source: bool=False, strings: bool=False, exclude=None):
return self.fs.Glob(self.subst(pattern), ondisk, source, strings, exclude) return self.fs.Glob(self.subst(pattern), ondisk, source, strings, exclude)
def Ignore(self, target, dependency): def Ignore(self, target, dependency):
@ -2368,6 +2351,7 @@ class Base(SubstitutionEnvironment):
return ret return ret
def Precious(self, *targets): def Precious(self, *targets):
"""Mark *targets* as precious: do not delete before building."""
tlist = [] tlist = []
for t in targets: for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry)) tlist.extend(self.arg2nodes(t, self.fs.Entry))
@ -2376,6 +2360,7 @@ class Base(SubstitutionEnvironment):
return tlist return tlist
def Pseudo(self, *targets): def Pseudo(self, *targets):
"""Mark *targets* as pseudo: must not exist."""
tlist = [] tlist = []
for t in targets: for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry)) tlist.extend(self.arg2nodes(t, self.fs.Entry))
@ -2383,14 +2368,18 @@ class Base(SubstitutionEnvironment):
t.set_pseudo() t.set_pseudo()
return tlist return tlist
def Repository(self, *dirs, **kw): def Repository(self, *dirs, **kw) -> None:
"""Specify Repository directories to search."""
dirs = self.arg2nodes(list(dirs), self.fs.Dir) dirs = self.arg2nodes(list(dirs), self.fs.Dir)
self.fs.Repository(*dirs, **kw) self.fs.Repository(*dirs, **kw)
def Requires(self, target, prerequisite): def Requires(self, target, prerequisite):
"""Specify that 'prerequisite' must be built before 'target', """Specify that *prerequisite* must be built before *target*.
(but 'target' does not actually depend on 'prerequisite'
and need not be rebuilt if it changes).""" Creates an order-only relationship, not a full dependency.
*prerequisite* must exist before *target* can be built, but
a change to *prerequisite* does not trigger a rebuild of *target*.
"""
tlist = self.arg2nodes(target, self.fs.Entry) tlist = self.arg2nodes(target, self.fs.Entry)
plist = self.arg2nodes(prerequisite, self.fs.Entry) plist = self.arg2nodes(prerequisite, self.fs.Entry)
for t in tlist: for t in tlist:
@ -2406,7 +2395,7 @@ class Base(SubstitutionEnvironment):
nkw = self.subst_kw(kw) nkw = self.subst_kw(kw)
return SCons.Scanner.ScannerBase(*nargs, **nkw) return SCons.Scanner.ScannerBase(*nargs, **nkw)
def SConsignFile(self, name=SCons.SConsign.current_sconsign_filename(), dbm_module=None): def SConsignFile(self, name=SCons.SConsign.current_sconsign_filename(), dbm_module=None) -> None:
if name is not None: if name is not None:
name = self.subst(name) name = self.subst(name)
if not os.path.isabs(name): if not os.path.isabs(name):
@ -2469,17 +2458,17 @@ class Base(SubstitutionEnvironment):
""" """
return SCons.Node.Python.ValueWithMemo(value, built_value, name) return SCons.Node.Python.ValueWithMemo(value, built_value, name)
def VariantDir(self, variant_dir, src_dir, duplicate=1): def VariantDir(self, variant_dir, src_dir, duplicate: int=1) -> None:
variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0] variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0]
src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0] src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0]
self.fs.VariantDir(variant_dir, src_dir, duplicate) self.fs.VariantDir(variant_dir, src_dir, duplicate)
def FindSourceFiles(self, node='.') -> list: def FindSourceFiles(self, node: str='.') -> list:
"""Return a list of all source files.""" """Return a list of all source files."""
node = self.arg2nodes(node, self.fs.Entry)[0] node = self.arg2nodes(node, self.fs.Entry)[0]
sources = [] sources = []
def build_source(ss): def build_source(ss) -> None:
for s in ss: for s in ss:
if isinstance(s, SCons.Node.FS.Dir): if isinstance(s, SCons.Node.FS.Dir):
build_source(s.all_children()) build_source(s.all_children())
@ -2527,7 +2516,7 @@ class OverrideEnvironment(Base):
values from the overrides dictionary. values from the overrides dictionary.
""" """
def __init__(self, subject, overrides=None): def __init__(self, subject, overrides=None) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.OverrideEnvironment') if SCons.Debug.track_instances: logInstanceCreation(self, 'Environment.OverrideEnvironment')
self.__dict__['__subject'] = subject self.__dict__['__subject'] = subject
if overrides is None: if overrides is None:
@ -2551,7 +2540,7 @@ class OverrideEnvironment(Base):
else: else:
return attr return attr
def __setattr__(self, name, value): def __setattr__(self, name, value) -> None:
setattr(self.__dict__['__subject'], name, value) setattr(self.__dict__['__subject'], name, value)
# Methods that make this class act like a dictionary. # Methods that make this class act like a dictionary.
@ -2588,7 +2577,7 @@ class OverrideEnvironment(Base):
except KeyError: except KeyError:
return self.__dict__['__subject'].get(key, default) return self.__dict__['__subject'].get(key, default)
def __contains__(self, key): def __contains__(self, key) -> bool:
if key in self.__dict__['overrides']: if key in self.__dict__['overrides']:
return True return True
return key in self.__dict__['__subject'] return key in self.__dict__['__subject']
@ -2624,10 +2613,10 @@ class OverrideEnvironment(Base):
return default return default
# Overridden private construction environment methods. # Overridden private construction environment methods.
def _update(self, other): def _update(self, other) -> None:
self.__dict__['overrides'].update(other) self.__dict__['overrides'].update(other)
def _update_onlynew(self, other): def _update_onlynew(self, other) -> None:
"""Update a dict with new keys. """Update a dict with new keys.
Unlike the .update method, if the key is already present, Unlike the .update method, if the key is already present,
@ -2646,7 +2635,7 @@ class OverrideEnvironment(Base):
return lvars return lvars
# Overridden public construction environment methods. # Overridden public construction environment methods.
def Replace(self, **kw): def Replace(self, **kw) -> None:
kw = copy_non_reserved_keywords(kw) kw = copy_non_reserved_keywords(kw)
self.__dict__['overrides'].update(semi_deepcopy(kw)) self.__dict__['overrides'].update(semi_deepcopy(kw))
@ -2675,7 +2664,7 @@ def NoSubstitutionProxy(subject):
might have assigned to SCons.Environment.Environment. might have assigned to SCons.Environment.Environment.
""" """
class _NoSubstitutionProxy(Environment): class _NoSubstitutionProxy(Environment):
def __init__(self, subject): def __init__(self, subject) -> None:
self.__dict__['__subject'] = subject self.__dict__['__subject'] = subject
def __getattr__(self, name): def __getattr__(self, name):
@ -2684,14 +2673,14 @@ def NoSubstitutionProxy(subject):
def __setattr__(self, name, value): def __setattr__(self, name, value):
return setattr(self.__dict__['__subject'], name, value) return setattr(self.__dict__['__subject'], name, value)
def executor_to_lvars(self, kwdict): def executor_to_lvars(self, kwdict) -> None:
if 'executor' in kwdict: if 'executor' in kwdict:
kwdict['lvars'] = kwdict['executor'].get_lvars() kwdict['lvars'] = kwdict['executor'].get_lvars()
del kwdict['executor'] del kwdict['executor']
else: else:
kwdict['lvars'] = {} kwdict['lvars'] = {}
def raw_to_mode(self, mapping): def raw_to_mode(self, mapping) -> None:
try: try:
raw = mapping['raw'] raw = mapping['raw']
except KeyError: except KeyError:

View file

@ -72,7 +72,7 @@ class EnvironmentValue:
Hold a single value. We're going to cache parsed version of the file Hold a single value. We're going to cache parsed version of the file
We're going to keep track of variables which feed into this values evaluation We're going to keep track of variables which feed into this values evaluation
""" """
def __init__(self, value): def __init__(self, value) -> None:
self.value = value self.value = value
self.var_type = ValueTypes.UNKNOWN self.var_type = ValueTypes.UNKNOWN
@ -82,7 +82,7 @@ class EnvironmentValue:
self.parse_value() self.parse_value()
def parse_value(self): def parse_value(self) -> None:
""" """
Scan the string and break into component values Scan the string and break into component values
""" """
@ -99,7 +99,7 @@ class EnvironmentValue:
# likely callable? either way we don't parse # likely callable? either way we don't parse
self._parsed = self.value self._parsed = self.value
def parse_trial(self): def parse_trial(self) -> None:
""" """
Try alternate parsing methods. Try alternate parsing methods.
:return: :return:
@ -113,7 +113,7 @@ class EnvironmentValues:
""" """
A class to hold all the environment variables A class to hold all the environment variables
""" """
def __init__(self, **kw): def __init__(self, **kw) -> None:
self._dict = {} self._dict = {}
for k in kw: for k in kw:
self._dict[k] = EnvironmentValue(kw[k]) self._dict[k] = EnvironmentValue(kw[k])

View file

@ -26,7 +26,7 @@ import unittest
from SCons.EnvironmentValues import EnvironmentValues from SCons.EnvironmentValues import EnvironmentValues
class MyTestCase(unittest.TestCase): class MyTestCase(unittest.TestCase):
def test_simple_environmentValues(self): def test_simple_environmentValues(self) -> None:
"""Test comparing SubstitutionEnvironments """Test comparing SubstitutionEnvironments
""" """

View file

@ -27,7 +27,10 @@ Used to handle internal and user errors in SCons.
""" """
import shutil import shutil
import SCons.Util from typing import Optional
from SCons.Util.sctypes import to_String, is_String
from SCons.Util.sctyping import ExecutorType
# Note that not all Errors are defined here, some are at the point of use # Note that not all Errors are defined here, some are at the point of use
@ -71,13 +74,13 @@ class BuildError(Exception):
""" """
def __init__(self, def __init__(self,
node=None, errstr="Unknown error", status=2, exitstatus=2, node=None, errstr: str="Unknown error", status: int=2, exitstatus: int=2,
filename=None, executor=None, action=None, command=None, filename=None, executor: Optional[ExecutorType] = None, action=None, command=None,
exc_info=(None, None, None)): exc_info=(None, None, None)) -> None:
# py3: errstr should be string and not bytes. # py3: errstr should be string and not bytes.
self.errstr = SCons.Util.to_String(errstr) self.errstr = to_String(errstr)
self.status = status self.status = status
self.exitstatus = exitstatus self.exitstatus = exitstatus
self.filename = filename self.filename = filename
@ -91,7 +94,7 @@ class BuildError(Exception):
super().__init__(node, errstr, status, exitstatus, filename, super().__init__(node, errstr, status, exitstatus, filename,
executor, action, command, exc_info) executor, action, command, exc_info)
def __str__(self): def __str__(self) -> str:
if self.filename: if self.filename:
return self.filename + ': ' + self.errstr return self.filename + ': ' + self.errstr
else: else:
@ -113,7 +116,7 @@ class MSVCError(IOError):
pass pass
class ExplicitExit(Exception): class ExplicitExit(Exception):
def __init__(self, node=None, status=None, *args): def __init__(self, node=None, status=None, *args) -> None:
self.node = node self.node = node
self.status = status self.status = status
self.exitstatus = status self.exitstatus = status
@ -189,7 +192,7 @@ def convert_to_BuildError(status, exc_info=None):
status=2, status=2,
exitstatus=2, exitstatus=2,
exc_info=exc_info) exc_info=exc_info)
elif SCons.Util.is_String(status): elif is_String(status):
buildError = BuildError( buildError = BuildError(
errstr=status, errstr=status,
status=2, status=2,

View file

@ -24,6 +24,7 @@
"""Execute actions with specific lists of target and source Nodes.""" """Execute actions with specific lists of target and source Nodes."""
import collections import collections
from typing import Dict
import SCons.Errors import SCons.Errors
import SCons.Memoize import SCons.Memoize
@ -31,6 +32,7 @@ import SCons.Util
from SCons.compat import NoSlotsPyPy from SCons.compat import NoSlotsPyPy
import SCons.Debug import SCons.Debug
from SCons.Debug import logInstanceCreation from SCons.Debug import logInstanceCreation
from SCons.Util.sctyping import ExecutorType
class Batch: class Batch:
"""Remembers exact association between targets """Remembers exact association between targets
@ -39,7 +41,7 @@ class Batch:
__slots__ = ('targets', __slots__ = ('targets',
'sources') 'sources')
def __init__(self, targets=[], sources=[]): def __init__(self, targets=[], sources=[]) -> None:
self.targets = targets self.targets = targets
self.sources = sources self.sources = sources
@ -55,7 +57,7 @@ class TSList(collections.UserList):
a list during variable expansion. We're not really using any a list during variable expansion. We're not really using any
collections.UserList methods in practice. collections.UserList methods in practice.
""" """
def __init__(self, func): def __init__(self, func) -> None:
self.func = func self.func = func
def __getattr__(self, attr): def __getattr__(self, attr):
nl = self.func() nl = self.func()
@ -63,10 +65,10 @@ class TSList(collections.UserList):
def __getitem__(self, i): def __getitem__(self, i):
nl = self.func() nl = self.func()
return nl[i] return nl[i]
def __str__(self): def __str__(self) -> str:
nl = self.func() nl = self.func()
return str(nl) return str(nl)
def __repr__(self): def __repr__(self) -> str:
nl = self.func() nl = self.func()
return repr(nl) return repr(nl)
@ -74,17 +76,17 @@ class TSObject:
"""A class that implements $TARGET or $SOURCE expansions by wrapping """A class that implements $TARGET or $SOURCE expansions by wrapping
an Executor method. an Executor method.
""" """
def __init__(self, func): def __init__(self, func) -> None:
self.func = func self.func = func
def __getattr__(self, attr): def __getattr__(self, attr):
n = self.func() n = self.func()
return getattr(n, attr) return getattr(n, attr)
def __str__(self): def __str__(self) -> str:
n = self.func() n = self.func()
if n: if n:
return str(n) return str(n)
return '' return ''
def __repr__(self): def __repr__(self) -> str:
n = self.func() n = self.func()
if n: if n:
return repr(n) return repr(n)
@ -104,7 +106,7 @@ def rfile(node):
return rfile() return rfile()
def execute_nothing(obj, target, kw): def execute_nothing(obj, target, kw) -> int:
return 0 return 0
def execute_action_list(obj, target, kw): def execute_action_list(obj, target, kw):
@ -138,14 +140,14 @@ def execute_actions_str(obj):
env) env)
for action in obj.get_action_list()]) for action in obj.get_action_list()])
def execute_null_str(obj): def execute_null_str(obj) -> str:
return '' return ''
_execute_str_map = {0 : execute_null_str, _execute_str_map = {0 : execute_null_str,
1 : execute_actions_str} 1 : execute_actions_str}
class Executor(object, metaclass=NoSlotsPyPy): class Executor(metaclass=NoSlotsPyPy):
"""A class for controlling instances of executing an action. """A class for controlling instances of executing an action.
This largely exists to hold a single association of an action, This largely exists to hold a single association of an action,
@ -170,7 +172,7 @@ class Executor(object, metaclass=NoSlotsPyPy):
'_execute_str') '_execute_str')
def __init__(self, action, env=None, overridelist=[{}], def __init__(self, action, env=None, overridelist=[{}],
targets=[], sources=[], builder_kw={}): targets=[], sources=[], builder_kw={}) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Executor.Executor') if SCons.Debug.track_instances: logInstanceCreation(self, 'Executor.Executor')
self.set_action_list(action) self.set_action_list(action)
self.pre_actions = [] self.pre_actions = []
@ -202,7 +204,7 @@ class Executor(object, metaclass=NoSlotsPyPy):
} }
return self.lvars return self.lvars
def _get_changes(self): def _get_changes(self) -> None:
cs = [] cs = []
ct = [] ct = []
us = [] us = []
@ -383,10 +385,10 @@ class Executor(object, metaclass=NoSlotsPyPy):
def __call__(self, target, **kw): def __call__(self, target, **kw):
return _do_execute_map[self._do_execute](self, target, kw) return _do_execute_map[self._do_execute](self, target, kw)
def cleanup(self): def cleanup(self) -> None:
self._memo = {} self._memo = {}
def add_sources(self, sources): def add_sources(self, sources) -> None:
"""Add source files to this Executor's list. This is necessary """Add source files to this Executor's list. This is necessary
for "multi" Builders that can be called repeatedly to build up for "multi" Builders that can be called repeatedly to build up
a source file list for a given target.""" a source file list for a given target."""
@ -399,7 +401,7 @@ class Executor(object, metaclass=NoSlotsPyPy):
def get_sources(self): def get_sources(self):
return self.batches[0].sources return self.batches[0].sources
def add_batch(self, targets, sources): def add_batch(self, targets, sources) -> None:
"""Add pair of associated target and source to this Executor's list. """Add pair of associated target and source to this Executor's list.
This is necessary for "batch" Builders that can be called repeatedly This is necessary for "batch" Builders that can be called repeatedly
to build up a list of matching target and source files that will be to build up a list of matching target and source files that will be
@ -417,18 +419,18 @@ class Executor(object, metaclass=NoSlotsPyPy):
msg = "Source `%s' not found, needed by target `%s'." msg = "Source `%s' not found, needed by target `%s'."
raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0])) raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0]))
def add_pre_action(self, action): def add_pre_action(self, action) -> None:
self.pre_actions.append(action) self.pre_actions.append(action)
def add_post_action(self, action): def add_post_action(self, action) -> None:
self.post_actions.append(action) self.post_actions.append(action)
# another extra indirection for new-style objects and nullify... # another extra indirection for new-style objects and nullify...
def __str__(self): def __str__(self) -> str:
return _execute_str_map[self._execute_str](self) return _execute_str_map[self._execute_str](self)
def nullify(self): def nullify(self) -> None:
self.cleanup() self.cleanup()
self._do_execute = 0 self._do_execute = 0
self._execute_str = 0 self._execute_str = 0
@ -459,23 +461,23 @@ class Executor(object, metaclass=NoSlotsPyPy):
self._memo['get_contents'] = result self._memo['get_contents'] = result
return result return result
def get_timestamp(self): def get_timestamp(self) -> int:
"""Fetch a time stamp for this Executor. We don't have one, of """Fetch a time stamp for this Executor. We don't have one, of
course (only files do), but this is the interface used by the course (only files do), but this is the interface used by the
timestamp module. timestamp module.
""" """
return 0 return 0
def scan_targets(self, scanner): def scan_targets(self, scanner) -> None:
# TODO(batch): scan by batches # TODO(batch): scan by batches
self.scan(scanner, self.get_all_targets()) self.scan(scanner, self.get_all_targets())
def scan_sources(self, scanner): def scan_sources(self, scanner) -> None:
# TODO(batch): scan by batches # TODO(batch): scan by batches
if self.batches[0].sources: if self.batches[0].sources:
self.scan(scanner, self.get_all_sources()) self.scan(scanner, self.get_all_sources())
def scan(self, scanner, node_list): def scan(self, scanner, node_list) -> None:
"""Scan a list of this Executor's files (targets or sources) for """Scan a list of this Executor's files (targets or sources) for
implicit dependencies and update all of the targets with them. implicit dependencies and update all of the targets with them.
This essentially short-circuits an N*M scan of the sources for This essentially short-circuits an N*M scan of the sources for
@ -548,12 +550,12 @@ class Executor(object, metaclass=NoSlotsPyPy):
_batch_executors = {} _batch_executors: Dict[str, ExecutorType] = {}
def GetBatchExecutor(key): def GetBatchExecutor(key: str) -> ExecutorType:
return _batch_executors[key] return _batch_executors[key]
def AddBatchExecutor(key, executor): def AddBatchExecutor(key: str, executor: ExecutorType) -> None:
assert key not in _batch_executors assert key not in _batch_executors
_batch_executors[key] = executor _batch_executors[key] = executor
@ -576,7 +578,7 @@ def get_NullEnvironment():
nullenv = NullEnvironment() nullenv = NullEnvironment()
return nullenv return nullenv
class Null(object, metaclass=NoSlotsPyPy): class Null(metaclass=NoSlotsPyPy):
"""A null Executor, with a null build Environment, that does """A null Executor, with a null build Environment, that does
nothing when the rest of the methods call it. nothing when the rest of the methods call it.
@ -601,7 +603,7 @@ class Null(object, metaclass=NoSlotsPyPy):
'_do_execute', '_do_execute',
'_execute_str') '_execute_str')
def __init__(self, *args, **kw): def __init__(self, *args, **kw) -> None:
if SCons.Debug.track_instances: if SCons.Debug.track_instances:
logInstanceCreation(self, 'Executor.Null') logInstanceCreation(self, 'Executor.Null')
self.batches = [Batch(kw['targets'][:], [])] self.batches = [Batch(kw['targets'][:], [])]
@ -609,9 +611,9 @@ class Null(object, metaclass=NoSlotsPyPy):
return get_NullEnvironment() return get_NullEnvironment()
def get_build_scanner_path(self): def get_build_scanner_path(self):
return None return None
def cleanup(self): def cleanup(self) -> None:
pass pass
def prepare(self): def prepare(self) -> None:
pass pass
def get_unignored_sources(self, *args, **kw): def get_unignored_sources(self, *args, **kw):
return tuple(()) return tuple(())
@ -629,11 +631,11 @@ class Null(object, metaclass=NoSlotsPyPy):
return [] return []
def get_action_side_effects(self): def get_action_side_effects(self):
return [] return []
def __call__(self, *args, **kw): def __call__(self, *args, **kw) -> int:
return 0 return 0
def get_contents(self): def get_contents(self) -> str:
return '' return ''
def _morph(self): def _morph(self) -> None:
"""Morph this Null executor to a real Executor object.""" """Morph this Null executor to a real Executor object."""
batches = self.batches batches = self.batches
self.__class__ = Executor self.__class__ = Executor
@ -643,13 +645,13 @@ class Null(object, metaclass=NoSlotsPyPy):
# The following methods require morphing this Null Executor to a # The following methods require morphing this Null Executor to a
# real Executor object. # real Executor object.
def add_pre_action(self, action): def add_pre_action(self, action) -> None:
self._morph() self._morph()
self.add_pre_action(action) self.add_pre_action(action)
def add_post_action(self, action): def add_post_action(self, action) -> None:
self._morph() self._morph()
self.add_post_action(action) self.add_post_action(action)
def set_action_list(self, action): def set_action_list(self, action) -> None:
self._morph() self._morph()
self.set_action_list(action) self.set_action_list(action)

View file

@ -111,7 +111,7 @@ class Counter:
fill in the correct class name and method name that represents fill in the correct class name and method name that represents
the name of the function being counted. the name of the function being counted.
""" """
def __init__(self, cls_name, method_name): def __init__(self, cls_name, method_name) -> None:
""" """
""" """
self.cls_name = cls_name self.cls_name = cls_name
@ -120,8 +120,8 @@ class Counter:
self.miss = 0 self.miss = 0
def key(self): def key(self):
return self.cls_name+'.'+self.method_name return self.cls_name+'.'+self.method_name
def display(self): def display(self) -> None:
print(" {:7d} hits {:7d} misses {}()".format(self.hit, self.miss, self.key())) print(f" {self.hit:7d} hits {self.miss:7d} misses {self.key()}()")
def __eq__(self, other): def __eq__(self, other):
try: try:
return self.key() == other.key() return self.key() == other.key()
@ -136,7 +136,7 @@ class CountValue(Counter):
the class's methods that memoizes its return value by simply storing the class's methods that memoizes its return value by simply storing
the return value in its _memo dictionary. the return value in its _memo dictionary.
""" """
def count(self, *args, **kw): def count(self, *args, **kw) -> None:
""" Counts whether the memoized value has already been """ Counts whether the memoized value has already been
set (a hit) or not (a miss). set (a hit) or not (a miss).
""" """
@ -156,12 +156,12 @@ class CountDict(Counter):
indexed by some key that can be computed from one or more of indexed by some key that can be computed from one or more of
its input arguments. its input arguments.
""" """
def __init__(self, cls_name, method_name, keymaker): def __init__(self, cls_name, method_name, keymaker) -> None:
""" """
""" """
super().__init__(cls_name, method_name) super().__init__(cls_name, method_name)
self.keymaker = keymaker self.keymaker = keymaker
def count(self, *args, **kw): def count(self, *args, **kw) -> None:
""" Counts whether the computed key value is already present """ Counts whether the computed key value is already present
in the memoization dictionary (a hit) or not (a miss). in the memoization dictionary (a hit) or not (a miss).
""" """
@ -177,7 +177,7 @@ class CountDict(Counter):
else: else:
self.miss = self.miss + 1 self.miss = self.miss + 1
def Dump(title=None): def Dump(title=None) -> None:
""" Dump the hit/miss count for all the counters """ Dump the hit/miss count for all the counters
collected so far. collected so far.
""" """
@ -186,7 +186,7 @@ def Dump(title=None):
for counter in sorted(CounterList): for counter in sorted(CounterList):
CounterList[counter].display() CounterList[counter].display()
def EnableMemoization(): def EnableMemoization() -> None:
global use_memoizer global use_memoizer
use_memoizer = 1 use_memoizer = 1

View file

@ -78,7 +78,7 @@ class AliasNodeInfo(SCons.Node.NodeInfoBase):
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
""" """
Restore the attributes from a pickled state. Restore the attributes from a pickled state.
""" """
@ -98,7 +98,7 @@ class Alias(SCons.Node.Node):
NodeInfo = AliasNodeInfo NodeInfo = AliasNodeInfo
BuildInfo = AliasBuildInfo BuildInfo = AliasBuildInfo
def __init__(self, name): def __init__(self, name) -> None:
super().__init__() super().__init__()
self.name = name self.name = name
self.changed_since_last_build = 1 self.changed_since_last_build = 1
@ -107,20 +107,20 @@ class Alias(SCons.Node.Node):
def str_for_display(self): def str_for_display(self):
return '"' + self.__str__() + '"' return '"' + self.__str__() + '"'
def __str__(self): def __str__(self) -> str:
return self.name return self.name
def make_ready(self): def make_ready(self) -> None:
self.get_csig() self.get_csig()
really_build = SCons.Node.Node.build really_build = SCons.Node.Node.build
is_up_to_date = SCons.Node.Node.children_are_up_to_date is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir): def is_under(self, dir) -> bool:
# Make Alias nodes get built regardless of # Make Alias nodes get built regardless of
# what directory scons was run from. Alias nodes # what directory scons was run from. Alias nodes
# are outside the filesystem: # are outside the filesystem:
return 1 return True
def get_contents(self): def get_contents(self):
"""The contents of an alias is the concatenation """The contents of an alias is the concatenation
@ -128,7 +128,7 @@ class Alias(SCons.Node.Node):
childsigs = [n.get_csig() for n in self.children()] childsigs = [n.get_csig() for n in self.children()]
return ''.join(childsigs) return ''.join(childsigs)
def sconsign(self): def sconsign(self) -> None:
"""An Alias is not recorded in .sconsign files""" """An Alias is not recorded in .sconsign files"""
pass pass
@ -136,11 +136,11 @@ class Alias(SCons.Node.Node):
# #
# #
def build(self): def build(self) -> None:
"""A "builder" for aliases.""" """A "builder" for aliases."""
pass pass
def convert(self): def convert(self) -> None:
try: del self.builder try: del self.builder
except AttributeError: pass except AttributeError: pass
self.reset_executor() self.reset_executor()

View file

@ -81,11 +81,11 @@ class EntryProxyAttributeError(AttributeError):
An AttributeError subclass for recording and displaying the name An AttributeError subclass for recording and displaying the name
of the underlying Entry involved in an AttributeError exception. of the underlying Entry involved in an AttributeError exception.
""" """
def __init__(self, entry_proxy, attribute): def __init__(self, entry_proxy, attribute) -> None:
super().__init__() super().__init__()
self.entry_proxy = entry_proxy self.entry_proxy = entry_proxy
self.attribute = attribute self.attribute = attribute
def __str__(self): def __str__(self) -> str:
entry = self.entry_proxy.get() entry = self.entry_proxy.get()
fmt = "%s instance %s has no attribute %s" fmt = "%s instance %s has no attribute %s"
return fmt % (entry.__class__.__name__, return fmt % (entry.__class__.__name__,
@ -116,7 +116,7 @@ default_max_drift = 2*24*60*60
# #
Save_Strings = None Save_Strings = None
def save_strings(val): def save_strings(val) -> None:
global Save_Strings global Save_Strings
Save_Strings = val Save_Strings = val
@ -130,7 +130,7 @@ def save_strings(val):
do_splitdrive = None do_splitdrive = None
_my_splitdrive =None _my_splitdrive =None
def initialize_do_splitdrive(): def initialize_do_splitdrive() -> None:
global do_splitdrive global do_splitdrive
global has_unc global has_unc
drive, path = os.path.splitdrive('X:/foo') drive, path = os.path.splitdrive('X:/foo')
@ -231,7 +231,7 @@ needs_normpath_match = needs_normpath_check.match
# TODO: See if theres a reasonable way to enable using links on win32/64 # TODO: See if theres a reasonable way to enable using links on win32/64
if hasattr(os, 'link') and sys.platform != 'win32': if hasattr(os, 'link') and sys.platform != 'win32':
def _hardlink_func(fs, src, dst): def _hardlink_func(fs, src, dst) -> None:
# If the source is a symlink, we can't just hard-link to it # If the source is a symlink, we can't just hard-link to it
# because a relative symlink may point somewhere completely # because a relative symlink may point somewhere completely
# different. We must disambiguate the symlink and then # different. We must disambiguate the symlink and then
@ -247,12 +247,12 @@ else:
_hardlink_func = None _hardlink_func = None
if hasattr(os, 'symlink') and sys.platform != 'win32': if hasattr(os, 'symlink') and sys.platform != 'win32':
def _softlink_func(fs, src, dst): def _softlink_func(fs, src, dst) -> None:
fs.symlink(src, dst) fs.symlink(src, dst)
else: else:
_softlink_func = None _softlink_func = None
def _copy_func(fs, src, dest): def _copy_func(fs, src, dest) -> None:
shutil.copy2(src, dest) shutil.copy2(src, dest)
st = fs.stat(src) st = fs.stat(src)
fs.chmod(dest, stat.S_IMODE(st.st_mode) | stat.S_IWRITE) fs.chmod(dest, stat.S_IMODE(st.st_mode) | stat.S_IWRITE)
@ -286,7 +286,7 @@ def set_duplicate(duplicate):
if link_dict[func]: if link_dict[func]:
Link_Funcs.append(link_dict[func]) Link_Funcs.append(link_dict[func])
def LinkFunc(target, source, env): def LinkFunc(target, source, env) -> int:
""" """
Relative paths cause problems with symbolic links, so Relative paths cause problems with symbolic links, so
we use absolute paths, which may be a problem for people we use absolute paths, which may be a problem for people
@ -308,7 +308,7 @@ def LinkFunc(target, source, env):
try: try:
func(fs, src, dest) func(fs, src, dest)
break break
except (IOError, OSError): except OSError:
# An OSError indicates something happened like a permissions # An OSError indicates something happened like a permissions
# problem or an attempt to symlink across file-system # problem or an attempt to symlink across file-system
# boundaries. An IOError indicates something like the file # boundaries. An IOError indicates something like the file
@ -321,27 +321,36 @@ def LinkFunc(target, source, env):
return 0 return 0
Link = SCons.Action.Action(LinkFunc, None) Link = SCons.Action.Action(LinkFunc, None)
def LocalString(target, source, env): def LocalString(target, source, env) -> str:
return 'Local copy of %s from %s' % (target[0], source[0]) return 'Local copy of %s from %s' % (target[0], source[0])
LocalCopy = SCons.Action.Action(LinkFunc, LocalString) LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
def UnlinkFunc(target, source, env): def UnlinkFunc(target, source, env) -> int:
t = target[0] t = target[0]
t.fs.unlink(t.get_abspath()) t.fs.unlink(t.get_abspath())
return 0 return 0
Unlink = SCons.Action.Action(UnlinkFunc, None) Unlink = SCons.Action.Action(UnlinkFunc, None)
def MkdirFunc(target, source, env): def MkdirFunc(target, source, env) -> int:
t = target[0] t = target[0]
# This os.path.exists test looks redundant, but it's possible # - It's possible when using Install() to install multiple
# when using Install() to install multiple dirs outside the # dirs outside the source tree to get a case where t.exists()
# source tree to get a case where t.exists() is true but # is false but the path does already exist.
# the path does already exist, so this prevents spurious # - It's also possible for multiple SCons processes to try to create
# build failures in that case. See test/Install/multi-dir. # multiple build directories when processing SConscript files with
if not t.exists() and not os.path.exists(t.get_abspath()): # variant dirs.
t.fs.mkdir(t.get_abspath()) # Catching OS exceptions and ensuring directory existence prevents
# build failures in these cases. See test/Install/multi-dir.
if not t.exists():
abs_path = t.get_abspath()
try:
t.fs.mkdir(abs_path)
except FileExistsError:
pass
return 0 return 0
Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
@ -385,7 +394,7 @@ class DiskChecker:
This Class will hold functions to determine what this particular disk This Class will hold functions to determine what this particular disk
checking implementation should do when enabled or disabled. checking implementation should do when enabled or disabled.
""" """
def __init__(self, disk_check_type, do_check_function, ignore_check_function): def __init__(self, disk_check_type, do_check_function, ignore_check_function) -> None:
self.disk_check_type = disk_check_type self.disk_check_type = disk_check_type
self.do_check_function = do_check_function self.do_check_function = do_check_function
self.ignore_check_function = ignore_check_function self.ignore_check_function = ignore_check_function
@ -394,7 +403,7 @@ class DiskChecker:
def __call__(self, *args, **kw): def __call__(self, *args, **kw):
return self.func(*args, **kw) return self.func(*args, **kw)
def enable(self, disk_check_type_list): def enable(self, disk_check_type_list) -> None:
""" """
If the current object's disk_check_type matches any in the list passed If the current object's disk_check_type matches any in the list passed
:param disk_check_type_list: List of disk checks to enable :param disk_check_type_list: List of disk checks to enable
@ -423,7 +432,7 @@ def do_diskcheck_match(node, predicate, errorfmt):
raise TypeError(errorfmt % node.get_abspath()) raise TypeError(errorfmt % node.get_abspath())
def ignore_diskcheck_match(node, predicate, errorfmt): def ignore_diskcheck_match(node, predicate, errorfmt) -> None:
pass pass
@ -434,7 +443,7 @@ diskcheckers = [
] ]
def set_diskcheck(enabled_checkers): def set_diskcheck(enabled_checkers) -> None:
for dc in diskcheckers: for dc in diskcheckers:
dc.enable(enabled_checkers) dc.enable(enabled_checkers)
@ -584,7 +593,7 @@ class Base(SCons.Node.Node):
'_proxy', '_proxy',
'_func_sconsign'] '_func_sconsign']
def __init__(self, name, directory, fs): def __init__(self, name, directory, fs) -> None:
"""Initialize a generic Node.FS.Base object. """Initialize a generic Node.FS.Base object.
Call the superclass initialization, take care of setting up Call the superclass initialization, take care of setting up
@ -663,7 +672,7 @@ class Base(SCons.Node.Node):
raise AttributeError("%r object has no attribute %r" % raise AttributeError("%r object has no attribute %r" %
(self.__class__, attr)) (self.__class__, attr))
def __str__(self): def __str__(self) -> str:
"""A Node.FS.Base object's string representation is its path """A Node.FS.Base object's string representation is its path
name.""" name."""
global Save_Strings global Save_Strings
@ -762,30 +771,30 @@ class Base(SCons.Node.Node):
else: else:
return None return None
def isdir(self): def isdir(self) -> bool:
st = self.stat() st = self.stat()
return st is not None and stat.S_ISDIR(st.st_mode) return st is not None and stat.S_ISDIR(st.st_mode)
def isfile(self): def isfile(self) -> bool:
st = self.stat() st = self.stat()
return st is not None and stat.S_ISREG(st.st_mode) return st is not None and stat.S_ISREG(st.st_mode)
if hasattr(os, 'symlink'): if hasattr(os, 'symlink'):
def islink(self): def islink(self) -> bool:
st = self.lstat() st = self.lstat()
return st is not None and stat.S_ISLNK(st.st_mode) return st is not None and stat.S_ISLNK(st.st_mode)
else: else:
def islink(self): def islink(self) -> bool:
return False # no symlinks return False # no symlinks
def is_under(self, dir): def is_under(self, dir) -> bool:
if self is dir: if self is dir:
return 1 return True
else: else:
return self.dir.is_under(dir) return self.dir.is_under(dir)
def set_local(self): def set_local(self) -> None:
self._local = 1 self._local = True
def srcnode(self): def srcnode(self):
"""If this node is in a build path, return the node """If this node is in a build path, return the node
@ -817,7 +826,7 @@ class Base(SCons.Node.Node):
pathname += p.dirname pathname += p.dirname
return pathname + path_elems[-1].name return pathname + path_elems[-1].name
def set_src_builder(self, builder): def set_src_builder(self, builder) -> None:
"""Set the source code builder for this node.""" """Set the source code builder for this node."""
self.sbuilder = builder self.sbuilder = builder
if not self.has_builder(): if not self.has_builder():
@ -951,7 +960,7 @@ class Base(SCons.Node.Node):
self._memo['rentry'] = result self._memo['rentry'] = result
return result return result
def _glob1(self, pattern, ondisk=True, source=False, strings=False): def _glob1(self, pattern, ondisk: bool=True, source: bool=False, strings: bool=False):
return [] return []
# Dict that provides a simple backward compatibility # Dict that provides a simple backward compatibility
@ -989,12 +998,12 @@ class Entry(Base):
'released_target_info', 'released_target_info',
'contentsig'] 'contentsig']
def __init__(self, name, directory, fs): def __init__(self, name, directory, fs) -> None:
super().__init__(name, directory, fs) super().__init__(name, directory, fs)
self._func_exists = 3 self._func_exists = 3
self._func_get_contents = 1 self._func_get_contents = 1
def diskcheck_match(self): def diskcheck_match(self) -> None:
pass pass
def disambiguate(self, must_exist=None): def disambiguate(self, must_exist=None):
@ -1048,7 +1057,7 @@ class Entry(Base):
contents of the file.""" contents of the file."""
return SCons.Node._get_contents_map[self._func_get_contents](self) return SCons.Node._get_contents_map[self._func_get_contents](self)
def get_text_contents(self): def get_text_contents(self) -> str:
"""Fetch the decoded text contents of a Unicode encoded Entry. """Fetch the decoded text contents of a Unicode encoded Entry.
Since this should return the text contents from the file Since this should return the text contents from the file
@ -1064,9 +1073,10 @@ class Entry(Base):
# hand or catch the exception. # hand or catch the exception.
return '' return ''
else: else:
# now we're a different node type, call its method to get the text.
return self.get_text_contents() return self.get_text_contents()
def must_be_same(self, klass): def must_be_same(self, klass) -> None:
"""Called to make sure a Node is a Dir. Since we're an """Called to make sure a Node is a Dir. Since we're an
Entry, we can morph into one.""" Entry, we can morph into one."""
if self.__class__ is not klass: if self.__class__ is not klass:
@ -1097,7 +1107,7 @@ class Entry(Base):
def new_ninfo(self): def new_ninfo(self):
return self.disambiguate().new_ninfo() return self.disambiguate().new_ninfo()
def _glob1(self, pattern, ondisk=True, source=False, strings=False): def _glob1(self, pattern, ondisk: bool=True, source: bool=False, strings: bool=False):
return self.disambiguate()._glob1(pattern, ondisk, source, strings) return self.disambiguate()._glob1(pattern, ondisk, source, strings)
def get_subst_proxy(self): def get_subst_proxy(self):
@ -1144,10 +1154,10 @@ class LocalFS:
def getsize(self, path): def getsize(self, path):
return os.path.getsize(path) return os.path.getsize(path)
def isdir(self, path): def isdir(self, path) -> bool:
return os.path.isdir(path) return os.path.isdir(path)
def isfile(self, path): def isfile(self, path) -> bool:
return os.path.isfile(path) return os.path.isfile(path)
def link(self, src, dst): def link(self, src, dst):
@ -1162,10 +1172,10 @@ class LocalFS:
def scandir(self, path): def scandir(self, path):
return os.scandir(path) return os.scandir(path)
def makedirs(self, path, mode=0o777, exist_ok=False): def makedirs(self, path, mode: int=0o777, exist_ok: bool=False):
return os.makedirs(path, mode=mode, exist_ok=exist_ok) return os.makedirs(path, mode=mode, exist_ok=exist_ok)
def mkdir(self, path, mode=0o777): def mkdir(self, path, mode: int=0o777):
return os.mkdir(path, mode=mode) return os.mkdir(path, mode=mode)
def rename(self, old, new): def rename(self, old, new):
@ -1185,28 +1195,28 @@ class LocalFS:
if hasattr(os, 'symlink'): if hasattr(os, 'symlink'):
def islink(self, path): def islink(self, path) -> bool:
return os.path.islink(path) return os.path.islink(path)
else: else:
def islink(self, path): def islink(self, path) -> bool:
return False # no symlinks return False # no symlinks
if hasattr(os, 'readlink'): if hasattr(os, 'readlink'):
def readlink(self, file): def readlink(self, file) -> str:
return os.readlink(file) return os.readlink(file)
else: else:
def readlink(self, file): def readlink(self, file) -> str:
return '' return ''
class FS(LocalFS): class FS(LocalFS):
def __init__(self, path = None): def __init__(self, path = None) -> None:
"""Initialize the Node.FS subsystem. """Initialize the Node.FS subsystem.
The supplied path is the top of the source tree, where we The supplied path is the top of the source tree, where we
@ -1238,13 +1248,13 @@ class FS(LocalFS):
DirNodeInfo.fs = self DirNodeInfo.fs = self
FileNodeInfo.fs = self FileNodeInfo.fs = self
def set_SConstruct_dir(self, dir): def set_SConstruct_dir(self, dir) -> None:
self.SConstruct_dir = dir self.SConstruct_dir = dir
def get_max_drift(self): def get_max_drift(self):
return self.max_drift return self.max_drift
def set_max_drift(self, max_drift): def set_max_drift(self, max_drift) -> None:
self.max_drift = max_drift self.max_drift = max_drift
def getcwd(self): def getcwd(self):
@ -1253,7 +1263,7 @@ class FS(LocalFS):
else: else:
return "<no cwd>" return "<no cwd>"
def chdir(self, dir, change_os_dir=False): def chdir(self, dir, change_os_dir: bool=False):
"""Change the current working directory for lookups. """Change the current working directory for lookups.
If change_os_dir is true, we will also change the "real" cwd If change_os_dir is true, we will also change the "real" cwd
to match. to match.
@ -1285,7 +1295,7 @@ class FS(LocalFS):
self.Root[''] = root self.Root[''] = root
return root return root
def _lookup(self, p, directory, fsclass, create=1): def _lookup(self, p, directory, fsclass, create: bool = True):
""" """
The generic entry point for Node lookup with user-supplied data. The generic entry point for Node lookup with user-supplied data.
@ -1421,7 +1431,7 @@ class FS(LocalFS):
return root._lookup_abs(p, fsclass, create) return root._lookup_abs(p, fsclass, create)
def Entry(self, name, directory = None, create = 1): def Entry(self, name, directory = None, create: bool = True):
"""Look up or create a generic Entry node with the specified name. """Look up or create a generic Entry node with the specified name.
If the name is a relative path (begins with ./, ../, or a file If the name is a relative path (begins with ./, ../, or a file
name), then it is looked up relative to the supplied directory name), then it is looked up relative to the supplied directory
@ -1430,7 +1440,7 @@ class FS(LocalFS):
""" """
return self._lookup(name, directory, Entry, create) return self._lookup(name, directory, Entry, create)
def File(self, name, directory = None, create = 1): def File(self, name, directory = None, create: bool = True):
"""Look up or create a File node with the specified name. If """Look up or create a File node with the specified name. If
the name is a relative path (begins with ./, ../, or a file name), the name is a relative path (begins with ./, ../, or a file name),
then it is looked up relative to the supplied directory node, then it is looked up relative to the supplied directory node,
@ -1442,7 +1452,7 @@ class FS(LocalFS):
""" """
return self._lookup(name, directory, File, create) return self._lookup(name, directory, File, create)
def Dir(self, name, directory = None, create = True): def Dir(self, name, directory = None, create: bool = True):
"""Look up or create a Dir node with the specified name. If """Look up or create a Dir node with the specified name. If
the name is a relative path (begins with ./, ../, or a file name), the name is a relative path (begins with ./, ../, or a file name),
then it is looked up relative to the supplied directory node, then it is looked up relative to the supplied directory node,
@ -1454,7 +1464,7 @@ class FS(LocalFS):
""" """
return self._lookup(name, directory, Dir, create) return self._lookup(name, directory, Dir, create)
def VariantDir(self, variant_dir, src_dir, duplicate=1): def VariantDir(self, variant_dir, src_dir, duplicate: int=1):
"""Link the supplied variant directory to the source directory """Link the supplied variant directory to the source directory
for purposes of building files.""" for purposes of building files."""
@ -1470,28 +1480,31 @@ class FS(LocalFS):
raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)) raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir))
variant_dir.link(src_dir, duplicate) variant_dir.link(src_dir, duplicate)
def Repository(self, *dirs): def Repository(self, *dirs) -> None:
"""Specify Repository directories to search.""" """Specify Repository directories to search."""
for d in dirs: for d in dirs:
if not isinstance(d, SCons.Node.Node): if not isinstance(d, SCons.Node.Node):
d = self.Dir(d) d = self.Dir(d)
self.Top.addRepository(d) self.Top.addRepository(d)
def PyPackageDir(self, modulename): def PyPackageDir(self, modulename) -> Optional[Dir]:
r"""Locate the directory of a given python module name r"""Locate the directory of Python module *modulename*.
For example scons might resolve to For example 'SCons' might resolve to
Windows: C:\Python27\Lib\site-packages\scons-2.5.1 Windows: C:\Python311\Lib\site-packages\SCons
Linux: /usr/lib/scons Linux: /usr/lib64/python3.11/site-packages/SCons
This can be useful when we want to determine a toolpath based on a python module name""" Can be used to determine a toolpath based on a Python module name.
dirpath = '' This is the backend called by the public API function
:meth:`~Environment.Base.PyPackageDir`.
# Python3 Code """
modspec = importlib.util.find_spec(modulename) modspec = importlib.util.find_spec(modulename)
dirpath = os.path.dirname(modspec.origin) if modspec:
return self._lookup(dirpath, None, Dir, True) origin = os.path.dirname(modspec.origin)
return self._lookup(origin, directory=None, fsclass=Dir, create=True)
else:
return None
def variant_dir_target_climb(self, orig, dir, tail): def variant_dir_target_climb(self, orig, dir, tail):
@ -1521,7 +1534,7 @@ class FS(LocalFS):
message = fmt % ' '.join(map(str, targets)) message = fmt % ' '.join(map(str, targets))
return targets, message return targets, message
def Glob(self, pathname, ondisk=True, source=True, strings=False, exclude=None, cwd=None): def Glob(self, pathname, ondisk: bool=True, source: bool=True, strings: bool=False, exclude=None, cwd=None):
""" """
Globs Globs
@ -1555,7 +1568,7 @@ class DirBuildInfo(SCons.Node.BuildInfoBase):
glob_magic_check = re.compile('[*?[]') glob_magic_check = re.compile('[*?[]')
def has_glob_magic(s): def has_glob_magic(s) -> bool:
return glob_magic_check.search(s) is not None return glob_magic_check.search(s) is not None
class Dir(Base): class Dir(Base):
@ -1580,12 +1593,12 @@ class Dir(Base):
NodeInfo = DirNodeInfo NodeInfo = DirNodeInfo
BuildInfo = DirBuildInfo BuildInfo = DirBuildInfo
def __init__(self, name, directory, fs): def __init__(self, name, directory, fs) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Dir') if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Dir')
super().__init__(name, directory, fs) super().__init__(name, directory, fs)
self._morph() self._morph()
def _morph(self): def _morph(self) -> None:
"""Turn a file system Node (either a freshly initialized directory """Turn a file system Node (either a freshly initialized directory
object or a separate Entry object) into a proper directory object. object or a separate Entry object) into a proper directory object.
@ -1649,11 +1662,11 @@ class Dir(Base):
l.insert(0, a) l.insert(0, a)
self.get_executor().set_action_list(l) self.get_executor().set_action_list(l)
def diskcheck_match(self): def diskcheck_match(self) -> None:
diskcheck_match(self, self.isfile, diskcheck_match(self, self.isfile,
"File %s found where directory expected.") "File %s found where directory expected.")
def __clearRepositoryCache(self, duplicate=None): def __clearRepositoryCache(self, duplicate=None) -> None:
"""Called when we change the repository(ies) for a directory. """Called when we change the repository(ies) for a directory.
This clears any cached information that is invalidated by changing This clears any cached information that is invalidated by changing
the repository.""" the repository."""
@ -1671,7 +1684,7 @@ class Dir(Base):
if duplicate is not None: if duplicate is not None:
node.duplicate = duplicate node.duplicate = duplicate
def __resetDuplicate(self, node): def __resetDuplicate(self, node) -> None:
if node != self: if node != self:
node.duplicate = node.get_dir().duplicate node.duplicate = node.get_dir().duplicate
@ -1682,7 +1695,7 @@ class Dir(Base):
""" """
return self.fs.Entry(name, self) return self.fs.Entry(name, self)
def Dir(self, name, create=True): def Dir(self, name, create: bool=True):
""" """
Looks up or creates a directory node named 'name' relative to Looks up or creates a directory node named 'name' relative to
this directory. this directory.
@ -1696,7 +1709,7 @@ class Dir(Base):
""" """
return self.fs.File(name, self) return self.fs.File(name, self)
def link(self, srcdir, duplicate): def link(self, srcdir, duplicate) -> None:
"""Set this directory as the variant directory for the """Set this directory as the variant directory for the
supplied source directory.""" supplied source directory."""
self.srcdir = srcdir self.srcdir = srcdir
@ -1734,7 +1747,7 @@ class Dir(Base):
return result return result
def addRepository(self, dir): def addRepository(self, dir) -> None:
if dir != self and dir not in self.repositories: if dir != self and dir not in self.repositories:
self.repositories.append(dir) self.repositories.append(dir)
dir._tpath = '.' dir._tpath = '.'
@ -1834,10 +1847,10 @@ class Dir(Base):
# Taskmaster interface subsystem # Taskmaster interface subsystem
# #
def prepare(self): def prepare(self) -> None:
pass pass
def build(self, **kw): def build(self, **kw) -> None:
"""A null "builder" for directories.""" """A null "builder" for directories."""
global MkdirBuilder global MkdirBuilder
if self.builder is not MkdirBuilder: if self.builder is not MkdirBuilder:
@ -1911,19 +1924,18 @@ class Dir(Base):
contents = self.get_contents() contents = self.get_contents()
return hash_signature(contents) return hash_signature(contents)
def do_duplicate(self, src): def do_duplicate(self, src) -> None:
pass pass
def is_up_to_date(self): def is_up_to_date(self) -> bool:
"""If any child is not up-to-date, then this directory isn't, """If any child is not up-to-date, then this directory isn't, either."""
either."""
if self.builder is not MkdirBuilder and not self.exists(): if self.builder is not MkdirBuilder and not self.exists():
return 0 return False
up_to_date = SCons.Node.up_to_date up_to_date = SCons.Node.up_to_date
for kid in self.children(): for kid in self.children():
if kid.get_state() > up_to_date: if kid.get_state() > up_to_date:
return 0 return False
return 1 return True
def rdir(self): def rdir(self):
if not self.exists(): if not self.exists():
@ -2145,7 +2157,7 @@ class Dir(Base):
return None return None
return node return node
def walk(self, func, arg): def walk(self, func, arg) -> None:
""" """
Walk this directory tree by calling the specified function Walk this directory tree by calling the specified function
for each directory in the tree. for each directory in the tree.
@ -2171,7 +2183,7 @@ class Dir(Base):
for dirname in [n for n in names if isinstance(entries[n], Dir)]: for dirname in [n for n in names if isinstance(entries[n], Dir)]:
entries[dirname].walk(func, arg) entries[dirname].walk(func, arg)
def glob(self, pathname, ondisk=True, source=False, strings=False, exclude=None) -> list: def glob(self, pathname, ondisk: bool=True, source: bool=False, strings: bool=False, exclude=None) -> list:
"""Returns a list of Nodes (or strings) matching a pathname pattern. """Returns a list of Nodes (or strings) matching a pathname pattern.
Pathname patterns follow POSIX shell syntax:: Pathname patterns follow POSIX shell syntax::
@ -2234,7 +2246,7 @@ class Dir(Base):
result = [x for x in result if not any(fnmatch.fnmatch(str(x), str(e)) for e in SCons.Util.flatten(excludes))] result = [x for x in result if not any(fnmatch.fnmatch(str(x), str(e)) for e in SCons.Util.flatten(excludes))]
return sorted(result, key=lambda a: str(a)) return sorted(result, key=lambda a: str(a))
def _glob1(self, pattern, ondisk=True, source=False, strings=False): def _glob1(self, pattern, ondisk: bool=True, source: bool=False, strings: bool=False):
""" """
Globs for and returns a list of entry names matching a single Globs for and returns a list of entry names matching a single
pattern in this directory. pattern in this directory.
@ -2313,7 +2325,7 @@ class RootDir(Dir):
__slots__ = ('_lookupDict', 'abspath', 'path') __slots__ = ('_lookupDict', 'abspath', 'path')
def __init__(self, drive, fs): def __init__(self, drive, fs) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir') if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir')
SCons.Node.Node.__init__(self) SCons.Node.Node.__init__(self)
@ -2371,7 +2383,7 @@ class RootDir(Dir):
if not has_unc: if not has_unc:
self._lookupDict['//'] = self self._lookupDict['//'] = self
def _morph(self): def _morph(self) -> None:
"""Turn a file system Node (either a freshly initialized directory """Turn a file system Node (either a freshly initialized directory
object or a separate Entry object) into a proper directory object. object or a separate Entry object) into a proper directory object.
@ -2412,12 +2424,12 @@ class RootDir(Dir):
self.get_executor().set_action_list(l) self.get_executor().set_action_list(l)
def must_be_same(self, klass): def must_be_same(self, klass) -> None:
if klass is Dir: if klass is Dir:
return return
Base.must_be_same(self, klass) Base.must_be_same(self, klass)
def _lookup_abs(self, p, klass, create=True): def _lookup_abs(self, p, klass, create: bool=True):
""" """
Fast (?) lookup of a *normalized* absolute path. Fast (?) lookup of a *normalized* absolute path.
@ -2459,7 +2471,7 @@ class RootDir(Dir):
result.must_be_same(klass) result.must_be_same(klass)
return result return result
def __str__(self): def __str__(self) -> str:
return self._abspath return self._abspath
def entry_abspath(self, name): def entry_abspath(self, name):
@ -2474,11 +2486,8 @@ class RootDir(Dir):
def entry_tpath(self, name): def entry_tpath(self, name):
return self._tpath + name return self._tpath + name
def is_under(self, dir): def is_under(self, dir) -> bool:
if self is dir: return True if self is dir else False
return 1
else:
return 0
def up(self): def up(self):
return None return None
@ -2531,7 +2540,7 @@ class FileNodeInfo(SCons.Node.NodeInfoBase):
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
""" """
Restore the attributes from a pickled state. Restore the attributes from a pickled state.
""" """
@ -2544,7 +2553,7 @@ class FileNodeInfo(SCons.Node.NodeInfoBase):
def __eq__(self, other): def __eq__(self, other):
return self.csig == other.csig and self.timestamp == other.timestamp and self.size == other.size return self.csig == other.csig and self.timestamp == other.timestamp and self.size == other.size
def __ne__(self, other): def __ne__(self, other) -> bool:
return not self.__eq__(other) return not self.__eq__(other)
@ -2577,7 +2586,7 @@ class FileBuildInfo(SCons.Node.BuildInfoBase):
return super().__setattr__(key, value) return super().__setattr__(key, value)
def convert_to_sconsign(self): def convert_to_sconsign(self) -> None:
""" """
Converts this FileBuildInfo object for writing to a .sconsign file Converts this FileBuildInfo object for writing to a .sconsign file
@ -2604,7 +2613,7 @@ class FileBuildInfo(SCons.Node.BuildInfoBase):
else: else:
setattr(self, attr, list(map(node_to_str, val))) setattr(self, attr, list(map(node_to_str, val)))
def convert_from_sconsign(self, dir, name): def convert_from_sconsign(self, dir, name) -> None:
""" """
Converts a newly-read FileBuildInfo object for in-SCons use Converts a newly-read FileBuildInfo object for in-SCons use
@ -2613,7 +2622,7 @@ class FileBuildInfo(SCons.Node.BuildInfoBase):
""" """
pass pass
def prepare_dependencies(self): def prepare_dependencies(self) -> None:
""" """
Prepares a FileBuildInfo object for explaining what changed Prepares a FileBuildInfo object for explaining what changed
@ -2642,7 +2651,7 @@ class FileBuildInfo(SCons.Node.BuildInfoBase):
nodes.append(s) nodes.append(s)
setattr(self, nattr, nodes) setattr(self, nattr, nodes)
def format(self, names=0): def format(self, names: int=0):
result = [] result = []
bkids = self.bsources + self.bdepends + self.bimplicit bkids = self.bsources + self.bdepends + self.bimplicit
bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
@ -2680,11 +2689,11 @@ class File(Base):
# Although the command-line argument is in kilobytes, this is in bytes. # Although the command-line argument is in kilobytes, this is in bytes.
hash_chunksize = 65536 hash_chunksize = 65536
def diskcheck_match(self): def diskcheck_match(self) -> None:
diskcheck_match(self, self.isdir, diskcheck_match(self, self.isdir,
"Directory %s found where file expected.") "Directory %s found where file expected.")
def __init__(self, name, directory, fs): def __init__(self, name, directory, fs) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.File') if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.File')
super().__init__(name, directory, fs) super().__init__(name, directory, fs)
self._morph() self._morph()
@ -2694,7 +2703,7 @@ class File(Base):
the directory of this file.""" the directory of this file."""
return self.dir.Entry(name) return self.dir.Entry(name)
def Dir(self, name, create=True): def Dir(self, name, create: bool=True):
"""Create a directory node named 'name' relative to """Create a directory node named 'name' relative to
the directory of this file.""" the directory of this file."""
return self.dir.Dir(name, create=create) return self.dir.Dir(name, create=create)
@ -2709,11 +2718,11 @@ class File(Base):
the directory of this file.""" the directory of this file."""
return self.dir.File(name) return self.dir.File(name)
def _morph(self): def _morph(self) -> None:
"""Turn a file system node into a File object.""" """Turn a file system node into a File object."""
self.scanner_paths = {} self.scanner_paths = {}
if not hasattr(self, '_local'): if not hasattr(self, '_local'):
self._local = 0 self._local = False
if not hasattr(self, 'released_target_info'): if not hasattr(self, 'released_target_info'):
self.released_target_info = False self.released_target_info = False
@ -2746,43 +2755,18 @@ class File(Base):
return SCons.Node._get_contents_map[self._func_get_contents](self) return SCons.Node._get_contents_map[self._func_get_contents](self)
def get_text_contents(self) -> str: def get_text_contents(self) -> str:
"""Return the contents of the file in text form. """Return the contents of the file as text."""
return SCons.Util.to_Text(self.get_contents())
This attempts to figure out what the encoding of the text is
based upon the BOM bytes, and then decodes the contents so that
it's a valid python string.
"""
contents = self.get_contents()
# The behavior of various decode() methods and functions
# w.r.t. the initial BOM bytes is different for different
# encodings and/or Python versions. ('utf-8' does not strip
# them, but has a 'utf-8-sig' which does; 'utf-16' seems to
# strip them; etc.) Just sidestep all the complication by
# explicitly stripping the BOM before we decode().
if contents[:len(codecs.BOM_UTF8)] == codecs.BOM_UTF8:
return contents[len(codecs.BOM_UTF8):].decode('utf-8')
if contents[:len(codecs.BOM_UTF16_LE)] == codecs.BOM_UTF16_LE:
return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le')
if contents[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be')
try:
return contents.decode('utf-8')
except UnicodeDecodeError as e:
try:
return contents.decode('latin-1')
except UnicodeDecodeError as e:
return contents.decode('utf-8', errors='backslashreplace')
def get_content_hash(self) -> str: def get_content_hash(self) -> str:
""" """Compute and return the hash for this file."""
Compute and return the hash for this file.
"""
if not self.rexists(): if not self.rexists():
# special marker to help distinguish from empty file
return hash_signature(SCons.Util.NOFILE) return hash_signature(SCons.Util.NOFILE)
fname = self.rfile().get_abspath() fname = self.rfile().get_abspath()
try: try:
cs = hash_file_signature(fname, chunksize=File.hash_chunksize) cs = hash_file_signature(fname, chunksize=File.hash_chunksize)
except EnvironmentError as e: except OSError as e:
if not e.filename: if not e.filename:
e.filename = fname e.filename = fname
raise raise
@ -2939,7 +2923,7 @@ class File(Base):
try: try:
sconsign_entry = self.dir.sconsign().get_entry(self.name) sconsign_entry = self.dir.sconsign().get_entry(self.name)
except (KeyError, EnvironmentError): except (KeyError, OSError):
import SCons.SConsign import SCons.SConsign
sconsign_entry = SCons.SConsign.SConsignEntry() sconsign_entry = SCons.SConsign.SConsignEntry()
sconsign_entry.binfo = self.new_binfo() sconsign_entry.binfo = self.new_binfo()
@ -2997,12 +2981,12 @@ class File(Base):
return result return result
def _createDir(self): def _createDir(self) -> None:
# ensure that the directories for this node are # ensure that the directories for this node are
# created. # created.
self.dir._create() self.dir._create()
def push_to_cache(self): def push_to_cache(self) -> None:
"""Try to push the node into a cache """Try to push the node into a cache
""" """
# This should get called before the Nodes' .built() method is # This should get called before the Nodes' .built() method is
@ -3018,22 +3002,22 @@ class File(Base):
if self.exists(): if self.exists():
self.get_build_env().get_CacheDir().push(self) self.get_build_env().get_CacheDir().push(self)
def retrieve_from_cache(self): def retrieve_from_cache(self) -> bool:
"""Try to retrieve the node's content from a cache """Try to retrieve the node's content from a cache
This method is called from multiple threads in a parallel build, This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in so only do thread safe stuff here. Do thread unsafe stuff in
built(). built().
Returns true if the node was successfully retrieved. Returns True if the node was successfully retrieved.
""" """
if self.nocache: if self.nocache:
return None return False
if not self.is_derived(): if not self.is_derived():
return None return False
return self.get_build_env().get_CacheDir().retrieve(self) return self.get_build_env().get_CacheDir().retrieve(self)
def visited(self): def visited(self) -> None:
if self.exists() and self.executor is not None: if self.exists() and self.executor is not None:
self.get_build_env().get_CacheDir().push_if_forced(self) self.get_build_env().get_CacheDir().push_if_forced(self)
@ -3056,7 +3040,7 @@ class File(Base):
SCons.Node.store_info_map[self.store_info](self) SCons.Node.store_info_map[self.store_info](self)
def release_target_info(self): def release_target_info(self) -> None:
"""Called just after this node has been marked """Called just after this node has been marked
up-to-date or was built completely. up-to-date or was built completely.
@ -3123,7 +3107,7 @@ class File(Base):
self.builder_set(scb) self.builder_set(scb)
return scb return scb
def has_src_builder(self): def has_src_builder(self) -> bool:
"""Return whether this Node has a source builder or not. """Return whether this Node has a source builder or not.
If this Node doesn't have an explicit source code builder, this If this Node doesn't have an explicit source code builder, this
@ -3150,7 +3134,7 @@ class File(Base):
def _rmv_existing(self): def _rmv_existing(self):
self.clear_memoized_values() self.clear_memoized_values()
if SCons.Node.print_duplicate: if SCons.Node.print_duplicate:
print("dup: removing existing target {}".format(self)) print(f"dup: removing existing target {self}")
e = Unlink(self, [], None) e = Unlink(self, [], None)
if isinstance(e, SCons.Errors.BuildError): if isinstance(e, SCons.Errors.BuildError):
raise e raise e
@ -3159,7 +3143,7 @@ class File(Base):
# Taskmaster interface subsystem # Taskmaster interface subsystem
# #
def make_ready(self): def make_ready(self) -> None:
self.has_src_builder() self.has_src_builder()
self.get_binfo() self.get_binfo()
@ -3178,7 +3162,7 @@ class File(Base):
try: try:
self._createDir() self._createDir()
except SCons.Errors.StopError as drive: except SCons.Errors.StopError as drive:
raise SCons.Errors.StopError("No drive `{}' for target `{}'.".format(drive, self)) raise SCons.Errors.StopError(f"No drive `{drive}' for target `{self}'.")
# #
# #
@ -3194,11 +3178,11 @@ class File(Base):
def do_duplicate(self, src): def do_duplicate(self, src):
self._createDir() self._createDir()
if SCons.Node.print_duplicate: if SCons.Node.print_duplicate:
print("dup: relinking variant '{}' from '{}'".format(self, src)) print(f"dup: relinking variant '{self}' from '{src}'")
Unlink(self, None, None) Unlink(self, None, None)
e = Link(self, src, None) e = Link(self, src, None)
if isinstance(e, SCons.Errors.BuildError): if isinstance(e, SCons.Errors.BuildError):
raise SCons.Errors.StopError("Cannot duplicate `{}' in `{}': {}.".format(src.get_internal_path(), self.dir._path, e.errstr)) raise SCons.Errors.StopError(f"Cannot duplicate `{src.get_internal_path()}' in `{self.dir._path}': {e.errstr}.")
self.linked = 1 self.linked = 1
# The Link() action may or may not have actually # The Link() action may or may not have actually
# created the file, depending on whether the -n # created the file, depending on whether the -n
@ -3264,7 +3248,7 @@ class File(Base):
contents = self.get_contents() contents = self.get_contents()
else: else:
csig = self.get_content_hash() csig = self.get_content_hash()
except IOError: except OSError:
# This can happen if there's actually a directory on-disk, # This can happen if there's actually a directory on-disk,
# which can be the case if they've disabled disk checks, # which can be the case if they've disabled disk checks,
# or if an action with a File target actually happens to # or if an action with a File target actually happens to
@ -3282,11 +3266,11 @@ class File(Base):
# DECISION SUBSYSTEM # DECISION SUBSYSTEM
# #
def builder_set(self, builder): def builder_set(self, builder) -> None:
SCons.Node.Node.builder_set(self, builder) SCons.Node.Node.builder_set(self, builder)
self.changed_since_last_build = 5 self.changed_since_last_build = 5
def built(self): def built(self) -> None:
"""Called just after this File node is successfully built. """Called just after this File node is successfully built.
Just like for 'release_target_info' we try to release Just like for 'release_target_info' we try to release
@ -3310,7 +3294,7 @@ class File(Base):
self.scanner_paths = None self.scanner_paths = None
def changed(self, node=None, allowcache=False): def changed(self, node=None, allowcache: bool=False) -> bool:
""" """
Returns if the node is up-to-date with respect to the BuildInfo Returns if the node is up-to-date with respect to the BuildInfo
stored last time it was built. stored last time it was built.
@ -3332,14 +3316,14 @@ class File(Base):
self._memo['changed'] = has_changed self._memo['changed'] = has_changed
return has_changed return has_changed
def changed_content(self, target, prev_ni, repo_node=None): def changed_content(self, target, prev_ni, repo_node=None) -> bool:
cur_csig = self.get_csig() cur_csig = self.get_csig()
try: try:
return cur_csig != prev_ni.csig return cur_csig != prev_ni.csig
except AttributeError: except AttributeError:
return 1 return True
def changed_state(self, target, prev_ni, repo_node=None): def changed_state(self, target, prev_ni, repo_node=None) -> bool:
return self.state != SCons.Node.up_to_date return self.state != SCons.Node.up_to_date
@ -3466,7 +3450,7 @@ class File(Base):
return df return df
def changed_timestamp_then_content(self, target, prev_ni, node=None): def changed_timestamp_then_content(self, target, prev_ni, node=None) -> bool:
""" """
Used when decider for file is Timestamp-MD5 Used when decider for file is Timestamp-MD5
@ -3527,13 +3511,13 @@ class File(Base):
return False return False
return self.changed_content(target, new_prev_ni) return self.changed_content(target, new_prev_ni)
def changed_timestamp_newer(self, target, prev_ni, repo_node=None): def changed_timestamp_newer(self, target, prev_ni, repo_node=None) -> bool:
try: try:
return self.get_timestamp() > target.get_timestamp() return self.get_timestamp() > target.get_timestamp()
except AttributeError: except AttributeError:
return 1 return True
def changed_timestamp_match(self, target, prev_ni, repo_node=None): def changed_timestamp_match(self, target, prev_ni, repo_node=None) -> bool:
""" """
Return True if the timestamps don't match or if there is no previous timestamp Return True if the timestamps don't match or if there is no previous timestamp
:param target: :param target:
@ -3543,13 +3527,13 @@ class File(Base):
try: try:
return self.get_timestamp() != prev_ni.timestamp return self.get_timestamp() != prev_ni.timestamp
except AttributeError: except AttributeError:
return 1 return True
def is_up_to_date(self) -> bool:
"""Check for whether the Node is current.
def is_up_to_date(self):
"""Check for whether the Node is current
In all cases self is the target we're checking to see if it's up to date In all cases self is the target we're checking to see if it's up to date
""" """
T = 0 T = 0
if T: Trace('is_up_to_date(%s):' % self) if T: Trace('is_up_to_date(%s):' % self)
if not self.exists(): if not self.exists():
@ -3570,10 +3554,10 @@ class File(Base):
raise e raise e
SCons.Node.store_info_map[self.store_info](self) SCons.Node.store_info_map[self.store_info](self)
if T: Trace(' 1\n') if T: Trace(' 1\n')
return 1 return True
self.changed() self.changed()
if T: Trace(' None\n') if T: Trace(' None\n')
return None return False
else: else:
r = self.changed() r = self.changed()
if T: Trace(' self.exists(): %s\n' % r) if T: Trace(' self.exists(): %s\n' % r)
@ -3728,7 +3712,7 @@ class FileFinder:
""" """
""" """
def __init__(self): def __init__(self) -> None:
self._memo = {} self._memo = {}
def filedir_lookup(self, p, fd=None): def filedir_lookup(self, p, fd=None):
@ -3826,7 +3810,7 @@ class FileFinder:
find_file = FileFinder().find_file find_file = FileFinder().find_file
def invalidate_node_memos(targets): def invalidate_node_memos(targets) -> None:
""" """
Invalidate the memoized values of all Nodes (files or directories) Invalidate the memoized values of all Nodes (files or directories)
that are associated with the given entries. Has been added to that are associated with the given entries. Has been added to

View file

@ -58,7 +58,7 @@ class ValueNodeInfo(SCons.Node.NodeInfoBase):
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
""" """
Restore the attributes from a pickled state. Restore the attributes from a pickled state.
""" """
@ -87,7 +87,7 @@ class Value(SCons.Node.Node):
NodeInfo = ValueNodeInfo NodeInfo = ValueNodeInfo
BuildInfo = ValueBuildInfo BuildInfo = ValueBuildInfo
def __init__(self, value, built_value=None, name=None): def __init__(self, value, built_value=None, name=None) -> None:
super().__init__() super().__init__()
self.value = value self.value = value
self.changed_since_last_build = 6 self.changed_since_last_build = 6
@ -105,25 +105,25 @@ class Value(SCons.Node.Node):
def str_for_display(self): def str_for_display(self):
return repr(self.value) return repr(self.value)
def __str__(self): def __str__(self) -> str:
return str(self.value) return str(self.value)
def make_ready(self): def make_ready(self) -> None:
self.get_csig() self.get_csig()
def build(self, **kw): def build(self, **kw) -> None:
if not hasattr(self, 'built_value'): if not hasattr(self, 'built_value'):
SCons.Node.Node.build(self, **kw) SCons.Node.Node.build(self, **kw)
is_up_to_date = SCons.Node.Node.children_are_up_to_date is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir): def is_under(self, dir) -> bool:
# Make Value nodes get built regardless of # Make Value nodes get built regardless of
# what directory scons was run from. Value nodes # what directory scons was run from. Value nodes
# are outside the filesystem: # are outside the filesystem:
return 1 return True
def write(self, built_value): def write(self, built_value) -> None:
"""Set the value of the node.""" """Set the value of the node."""
self.built_value = built_value self.built_value = built_value

View file

@ -43,6 +43,7 @@ be able to depend on any other type of "thing."
import collections import collections
import copy import copy
from itertools import chain, zip_longest from itertools import chain, zip_longest
from typing import Optional
import SCons.Debug import SCons.Debug
import SCons.Executor import SCons.Executor
@ -50,6 +51,7 @@ import SCons.Memoize
from SCons.compat import NoSlotsPyPy from SCons.compat import NoSlotsPyPy
from SCons.Debug import logInstanceCreation, Trace from SCons.Debug import logInstanceCreation, Trace
from SCons.Util import hash_signature, is_List, UniqueList, render_tree from SCons.Util import hash_signature, is_List, UniqueList, render_tree
from SCons.Util.sctyping import ExecutorType
print_duplicate = 0 print_duplicate = 0
@ -94,7 +96,7 @@ implicit_deps_changed = 0
# A variable that can be set to an interface-specific function be called # A variable that can be set to an interface-specific function be called
# to annotate a Node with information about its creation. # to annotate a Node with information about its creation.
def do_nothing_node(node): pass def do_nothing_node(node) -> None: pass
Annotate = do_nothing_node Annotate = do_nothing_node
@ -109,7 +111,7 @@ interactive = False
def is_derived_none(node): def is_derived_none(node):
raise NotImplementedError raise NotImplementedError
def is_derived_node(node): def is_derived_node(node) -> bool:
""" """
Returns true if this node is derived (i.e. built). Returns true if this node is derived (i.e. built).
""" """
@ -118,16 +120,16 @@ def is_derived_node(node):
_is_derived_map = {0 : is_derived_none, _is_derived_map = {0 : is_derived_none,
1 : is_derived_node} 1 : is_derived_node}
def exists_none(node): def exists_none(node) -> bool:
raise NotImplementedError raise NotImplementedError
def exists_always(node): def exists_always(node) -> bool:
return 1 return True
def exists_base(node): def exists_base(node) -> bool:
return node.stat() is not None return node.stat() is not None
def exists_entry(node): def exists_entry(node) -> bool:
"""Return if the Entry exists. Check the file system to see """Return if the Entry exists. Check the file system to see
what we should turn into first. Assume a file if there's no what we should turn into first. Assume a file if there's no
directory.""" directory."""
@ -135,7 +137,7 @@ def exists_entry(node):
return _exists_map[node._func_exists](node) return _exists_map[node._func_exists](node)
def exists_file(node): def exists_file(node) -> bool:
# Duplicate from source path if we are set up to do this. # Duplicate from source path if we are set up to do this.
if node.duplicate and not node.is_derived() and not node.linked: if node.duplicate and not node.is_derived() and not node.linked:
src = node.srcnode() src = node.srcnode()
@ -212,7 +214,7 @@ def get_contents_file(node):
try: try:
with open(fname, "rb") as fp: with open(fname, "rb") as fp:
contents = fp.read() contents = fp.read()
except EnvironmentError as e: except OSError as e:
if not e.filename: if not e.filename:
e.filename = fname e.filename = fname
raise raise
@ -245,7 +247,7 @@ _target_from_source_map = {0 : target_from_source_none,
# #
# First, the single decider functions # First, the single decider functions
# #
def changed_since_last_build_node(node, target, prev_ni, repo_node=None): def changed_since_last_build_node(node, target, prev_ni, repo_node=None) -> bool:
""" """
Must be overridden in a specific subclass to return True if this Must be overridden in a specific subclass to return True if this
@ -266,37 +268,37 @@ def changed_since_last_build_node(node, target, prev_ni, repo_node=None):
raise NotImplementedError raise NotImplementedError
def changed_since_last_build_alias(node, target, prev_ni, repo_node=None): def changed_since_last_build_alias(node, target, prev_ni, repo_node=None) -> bool:
cur_csig = node.get_csig() cur_csig = node.get_csig()
try: try:
return cur_csig != prev_ni.csig return cur_csig != prev_ni.csig
except AttributeError: except AttributeError:
return 1 return True
def changed_since_last_build_entry(node, target, prev_ni, repo_node=None): def changed_since_last_build_entry(node, target, prev_ni, repo_node=None) -> bool:
node.disambiguate() node.disambiguate()
return _decider_map[node.changed_since_last_build](node, target, prev_ni, repo_node) return _decider_map[node.changed_since_last_build](node, target, prev_ni, repo_node)
def changed_since_last_build_state_changed(node, target, prev_ni, repo_node=None): def changed_since_last_build_state_changed(node, target, prev_ni, repo_node=None) -> bool:
return node.state != SCons.Node.up_to_date return node.state != SCons.Node.up_to_date
def decide_source(node, target, prev_ni, repo_node=None): def decide_source(node, target, prev_ni, repo_node=None) -> bool:
return target.get_build_env().decide_source(node, target, prev_ni, repo_node) return target.get_build_env().decide_source(node, target, prev_ni, repo_node)
def decide_target(node, target, prev_ni, repo_node=None): def decide_target(node, target, prev_ni, repo_node=None) -> bool:
return target.get_build_env().decide_target(node, target, prev_ni, repo_node) return target.get_build_env().decide_target(node, target, prev_ni, repo_node)
def changed_since_last_build_python(node, target, prev_ni, repo_node=None): def changed_since_last_build_python(node, target, prev_ni, repo_node=None) -> bool:
cur_csig = node.get_csig() cur_csig = node.get_csig()
try: try:
return cur_csig != prev_ni.csig return cur_csig != prev_ni.csig
except AttributeError: except AttributeError:
return 1 return True
# #
@ -326,10 +328,10 @@ do_store_info = True
# First, the single info functions # First, the single info functions
# #
def store_info_pass(node): def store_info_pass(node) -> None:
pass pass
def store_info_file(node): def store_info_file(node) -> None:
# Merge our build information into the already-stored entry. # Merge our build information into the already-stored entry.
# This accommodates "chained builds" where a file that's a target # This accommodates "chained builds" where a file that's a target
# in one build (SConstruct file) is a source in a different build. # in one build (SConstruct file) is a source in a different build.
@ -353,7 +355,7 @@ class NodeInfoBase:
__slots__ = ('__weakref__',) __slots__ = ('__weakref__',)
current_version_id = 2 current_version_id = 2
def update(self, node): def update(self, node) -> None:
try: try:
field_list = self.field_list field_list = self.field_list
except AttributeError: except AttributeError:
@ -370,10 +372,10 @@ class NodeInfoBase:
else: else:
setattr(self, f, func()) setattr(self, f, func())
def convert(self, node, val): def convert(self, node, val) -> None:
pass pass
def merge(self, other): def merge(self, other) -> None:
""" """
Merge the fields of another object into this object. Already existing Merge the fields of another object into this object. Already existing
information is overwritten by the other instance's data. information is overwritten by the other instance's data.
@ -383,7 +385,7 @@ class NodeInfoBase:
state = other.__getstate__() state = other.__getstate__()
self.__setstate__(state) self.__setstate__(state)
def format(self, field_list=None, names=0): def format(self, field_list=None, names: int=0):
if field_list is None: if field_list is None:
try: try:
field_list = self.field_list field_list = self.field_list
@ -426,7 +428,7 @@ class NodeInfoBase:
pass pass
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
""" """
Restore the attributes from a pickled state. The version is discarded. Restore the attributes from a pickled state. The version is discarded.
""" """
@ -452,7 +454,7 @@ class BuildInfoBase:
"bsources", "bdepends", "bact", "bimplicit", "__weakref__") "bsources", "bdepends", "bact", "bimplicit", "__weakref__")
current_version_id = 2 current_version_id = 2
def __init__(self): def __init__(self) -> None:
# Create an object attribute from the class attribute so it ends up # Create an object attribute from the class attribute so it ends up
# in the pickled data in the .sconsign file. # in the pickled data in the .sconsign file.
self.bsourcesigs = [] self.bsourcesigs = []
@ -460,7 +462,7 @@ class BuildInfoBase:
self.bimplicitsigs = [] self.bimplicitsigs = []
self.bactsig = None self.bactsig = None
def merge(self, other): def merge(self, other) -> None:
""" """
Merge the fields of another object into this object. Already existing Merge the fields of another object into this object. Already existing
information is overwritten by the other instance's data. information is overwritten by the other instance's data.
@ -490,7 +492,7 @@ class BuildInfoBase:
pass pass
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
""" """
Restore the attributes from a pickled state. Restore the attributes from a pickled state.
""" """
@ -501,7 +503,7 @@ class BuildInfoBase:
setattr(self, key, value) setattr(self, key, value)
class Node(object, metaclass=NoSlotsPyPy): class Node(metaclass=NoSlotsPyPy):
"""The base Node class, for entities that we know how to """The base Node class, for entities that we know how to
build, or use to build other Nodes. build, or use to build other Nodes.
""" """
@ -553,7 +555,7 @@ class Node(object, metaclass=NoSlotsPyPy):
__slots__ = ('shared', '__dict__') __slots__ = ('shared', '__dict__')
def __init__(self): def __init__(self) -> None:
if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.Node') if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.Node')
# Note that we no longer explicitly initialize a self.builder # Note that we no longer explicitly initialize a self.builder
# attribute to None here. That's because the self.builder # attribute to None here. That's because the self.builder
@ -615,7 +617,7 @@ class Node(object, metaclass=NoSlotsPyPy):
def disambiguate(self, must_exist=None): def disambiguate(self, must_exist=None):
return self return self
def get_suffix(self): def get_suffix(self) -> str:
return '' return ''
@SCons.Memoize.CountMethodCall @SCons.Memoize.CountMethodCall
@ -634,11 +636,11 @@ class Node(object, metaclass=NoSlotsPyPy):
"""Fetch the appropriate scanner path for this node.""" """Fetch the appropriate scanner path for this node."""
return self.get_executor().get_build_scanner_path(scanner) return self.get_executor().get_build_scanner_path(scanner)
def set_executor(self, executor): def set_executor(self, executor: ExecutorType) -> None:
"""Set the action executor for this node.""" """Set the action executor for this node."""
self.executor = executor self.executor = executor
def get_executor(self, create=1): def get_executor(self, create: int=1) -> ExecutorType:
"""Fetch the action executor for this node. Create one if """Fetch the action executor for this node. Create one if
there isn't already one, and requested to do so.""" there isn't already one, and requested to do so."""
try: try:
@ -649,7 +651,7 @@ class Node(object, metaclass=NoSlotsPyPy):
try: try:
act = self.builder.action act = self.builder.action
except AttributeError: except AttributeError:
executor = SCons.Executor.Null(targets=[self]) executor = SCons.Executor.Null(targets=[self]) # type: ignore
else: else:
executor = SCons.Executor.Executor(act, executor = SCons.Executor.Executor(act,
self.env or self.builder.env, self.env or self.builder.env,
@ -659,7 +661,7 @@ class Node(object, metaclass=NoSlotsPyPy):
self.executor = executor self.executor = executor
return executor return executor
def executor_cleanup(self): def executor_cleanup(self) -> None:
"""Let the executor clean up any cached information.""" """Let the executor clean up any cached information."""
try: try:
executor = self.get_executor(create=None) executor = self.get_executor(create=None)
@ -669,19 +671,19 @@ class Node(object, metaclass=NoSlotsPyPy):
if executor is not None: if executor is not None:
executor.cleanup() executor.cleanup()
def reset_executor(self): def reset_executor(self) -> None:
"""Remove cached executor; forces recompute when needed.""" """Remove cached executor; forces recompute when needed."""
try: try:
delattr(self, 'executor') delattr(self, 'executor')
except AttributeError: except AttributeError:
pass pass
def push_to_cache(self): def push_to_cache(self) -> None:
"""Try to push a node into a cache """Try to push a node into a cache
""" """
pass pass
def retrieve_from_cache(self): def retrieve_from_cache(self) -> bool:
"""Try to retrieve the node's content from a cache """Try to retrieve the node's content from a cache
This method is called from multiple threads in a parallel build, This method is called from multiple threads in a parallel build,
@ -690,13 +692,13 @@ class Node(object, metaclass=NoSlotsPyPy):
Returns true if the node was successfully retrieved. Returns true if the node was successfully retrieved.
""" """
return 0 return False
# #
# Taskmaster interface subsystem # Taskmaster interface subsystem
# #
def make_ready(self): def make_ready(self) -> None:
"""Get a Node ready for evaluation. """Get a Node ready for evaluation.
This is called before the Taskmaster decides if the Node is This is called before the Taskmaster decides if the Node is
@ -757,7 +759,7 @@ class Node(object, metaclass=NoSlotsPyPy):
e.node = self e.node = self
raise raise
def built(self): def built(self) -> None:
"""Called just after this node is successfully built.""" """Called just after this node is successfully built."""
# Clear the implicit dependency caches of any Nodes # Clear the implicit dependency caches of any Nodes
@ -783,7 +785,6 @@ class Node(object, metaclass=NoSlotsPyPy):
except AttributeError: except AttributeError:
pass pass
self.clear() self.clear()
if self.pseudo: if self.pseudo:
@ -795,7 +796,7 @@ class Node(object, metaclass=NoSlotsPyPy):
"Cannot find target " + str(self) + " after building") "Cannot find target " + str(self) + " after building")
self.ninfo.update(self) self.ninfo.update(self)
def visited(self): def visited(self) -> None:
"""Called just after this node has been visited (with or """Called just after this node has been visited (with or
without a build).""" without a build)."""
try: try:
@ -808,7 +809,7 @@ class Node(object, metaclass=NoSlotsPyPy):
self.ninfo.update(self) self.ninfo.update(self)
SCons.Node.store_info_map[self.store_info](self) SCons.Node.store_info_map[self.store_info](self)
def release_target_info(self): def release_target_info(self) -> None:
"""Called just after this node has been marked """Called just after this node has been marked
up-to-date or was built completely. up-to-date or was built completely.
@ -825,10 +826,10 @@ class Node(object, metaclass=NoSlotsPyPy):
""" """
pass pass
def add_to_waiting_s_e(self, node): def add_to_waiting_s_e(self, node) -> None:
self.waiting_s_e.add(node) self.waiting_s_e.add(node)
def add_to_waiting_parents(self, node): def add_to_waiting_parents(self, node) -> int:
""" """
Returns the number of nodes added to our waiting parents list: Returns the number of nodes added to our waiting parents list:
1 if we add a unique waiting parent, 0 if not. (Note that the 1 if we add a unique waiting parent, 0 if not. (Note that the
@ -842,13 +843,13 @@ class Node(object, metaclass=NoSlotsPyPy):
wp.add(node) wp.add(node)
return 1 return 1
def postprocess(self): def postprocess(self) -> None:
"""Clean up anything we don't need to hang onto after we've """Clean up anything we don't need to hang onto after we've
been built.""" been built."""
self.executor_cleanup() self.executor_cleanup()
self.waiting_parents = set() self.waiting_parents = set()
def clear(self): def clear(self) -> None:
"""Completely clear a Node of all its cached state (so that it """Completely clear a Node of all its cached state (so that it
can be re-evaluated by interfaces that do continuous integration can be re-evaluated by interfaces that do continuous integration
builds). builds).
@ -868,17 +869,17 @@ class Node(object, metaclass=NoSlotsPyPy):
self.cached = 0 self.cached = 0
self.includes = None self.includes = None
def clear_memoized_values(self): def clear_memoized_values(self) -> None:
self._memo = {} self._memo = {}
def builder_set(self, builder): def builder_set(self, builder) -> None:
self.builder = builder self.builder = builder
try: try:
del self.executor del self.executor
except AttributeError: except AttributeError:
pass pass
def has_builder(self): def has_builder(self) -> bool:
"""Return whether this Node has a builder or not. """Return whether this Node has a builder or not.
In Boolean tests, this turns out to be a *lot* more efficient In Boolean tests, this turns out to be a *lot* more efficient
@ -897,11 +898,11 @@ class Node(object, metaclass=NoSlotsPyPy):
b = self.builder = None b = self.builder = None
return b is not None return b is not None
def set_explicit(self, is_explicit): def set_explicit(self, is_explicit) -> None:
self.is_explicit = is_explicit self.is_explicit = is_explicit
def has_explicit_builder(self): def has_explicit_builder(self) -> bool:
"""Return whether this Node has an explicit builder """Return whether this Node has an explicit builder.
This allows an internal Builder created by SCons to be marked This allows an internal Builder created by SCons to be marked
non-explicit, so that it can be overridden by an explicit non-explicit, so that it can be overridden by an explicit
@ -910,8 +911,8 @@ class Node(object, metaclass=NoSlotsPyPy):
try: try:
return self.is_explicit return self.is_explicit
except AttributeError: except AttributeError:
self.is_explicit = None self.is_explicit = False
return self.is_explicit return False
def get_builder(self, default_builder=None): def get_builder(self, default_builder=None):
"""Return the set builder, or a specified default value""" """Return the set builder, or a specified default value"""
@ -922,7 +923,7 @@ class Node(object, metaclass=NoSlotsPyPy):
multiple_side_effect_has_builder = has_builder multiple_side_effect_has_builder = has_builder
def is_derived(self): def is_derived(self) -> bool:
""" """
Returns true if this node is derived (i.e. built). Returns true if this node is derived (i.e. built).
@ -934,11 +935,11 @@ class Node(object, metaclass=NoSlotsPyPy):
""" """
return _is_derived_map[self._func_is_derived](self) return _is_derived_map[self._func_is_derived](self)
def is_sconscript(self): def is_sconscript(self) -> bool:
""" Returns true if this node is an sconscript """ """ Returns true if this node is an sconscript """
return self in SConscriptNodes return self in SConscriptNodes
def is_conftest(self): def is_conftest(self) -> bool:
""" Returns true if this node is an conftest node""" """ Returns true if this node is an conftest node"""
try: try:
self.attributes.conftest_node self.attributes.conftest_node
@ -1050,14 +1051,14 @@ class Node(object, metaclass=NoSlotsPyPy):
scanner = scanner.select(node) scanner = scanner.select(node)
return scanner return scanner
def add_to_implicit(self, deps): def add_to_implicit(self, deps) -> None:
if not hasattr(self, 'implicit') or self.implicit is None: if not hasattr(self, 'implicit') or self.implicit is None:
self.implicit = [] self.implicit = []
self.implicit_set = set() self.implicit_set = set()
self._children_reset() self._children_reset()
self._add_child(self.implicit, self.implicit_set, deps) self._add_child(self.implicit, self.implicit_set, deps)
def scan(self): def scan(self) -> None:
"""Scan this node's dependents for implicit dependencies.""" """Scan this node's dependents for implicit dependencies."""
# Don't bother scanning non-derived files, because we don't # Don't bother scanning non-derived files, because we don't
# care what their dependencies are. # care what their dependencies are.
@ -1119,7 +1120,7 @@ class Node(object, metaclass=NoSlotsPyPy):
""" """
return scanner.select(self) return scanner.select(self)
def env_set(self, env, safe=0): def env_set(self, env, safe: bool=False) -> None:
if safe and self.env: if safe and self.env:
return return
self.env = env self.env = env
@ -1197,7 +1198,7 @@ class Node(object, metaclass=NoSlotsPyPy):
return binfo return binfo
def del_binfo(self): def del_binfo(self) -> None:
"""Delete the build info from this node.""" """Delete the build info from this node."""
try: try:
delattr(self, 'binfo') delattr(self, 'binfo')
@ -1226,32 +1227,32 @@ class Node(object, metaclass=NoSlotsPyPy):
# #
# #
def set_precious(self, precious = 1): def set_precious(self, precious: int = 1) -> None:
"""Set the Node's precious value.""" """Set the Node's precious value."""
self.precious = precious self.precious = precious
def set_pseudo(self, pseudo = True): def set_pseudo(self, pseudo: bool = True) -> None:
"""Set the Node's precious value.""" """Set the Node's pseudo value."""
self.pseudo = pseudo self.pseudo = pseudo
def set_noclean(self, noclean = 1): def set_noclean(self, noclean: int = 1) -> None:
"""Set the Node's noclean value.""" """Set the Node's noclean value."""
# Make sure noclean is an integer so the --debug=stree # Make sure noclean is an integer so the --debug=stree
# output in Util.py can use it as an index. # output in Util.py can use it as an index.
self.noclean = noclean and 1 or 0 self.noclean = noclean and 1 or 0
def set_nocache(self, nocache = 1): def set_nocache(self, nocache: int = 1) -> None:
"""Set the Node's nocache value.""" """Set the Node's nocache value."""
# Make sure nocache is an integer so the --debug=stree # Make sure nocache is an integer so the --debug=stree
# output in Util.py can use it as an index. # output in Util.py can use it as an index.
self.nocache = nocache and 1 or 0 self.nocache = nocache and 1 or 0
def set_always_build(self, always_build = 1): def set_always_build(self, always_build: int = 1) -> None:
"""Set the Node's always_build value.""" """Set the Node's always_build value."""
self.always_build = always_build self.always_build = always_build
def exists(self): def exists(self) -> bool:
"""Does this node exists?""" """Reports whether node exists."""
return _exists_map[self._func_exists](self) return _exists_map[self._func_exists](self)
def rexists(self): def rexists(self):
@ -1263,7 +1264,7 @@ class Node(object, metaclass=NoSlotsPyPy):
"""Fetch the contents of the entry.""" """Fetch the contents of the entry."""
return _get_contents_map[self._func_get_contents](self) return _get_contents_map[self._func_get_contents](self)
def missing(self): def missing(self) -> bool:
return not self.is_derived() and \ return not self.is_derived() and \
not self.linked and \ not self.linked and \
not self.rexists() not self.rexists()
@ -1284,7 +1285,7 @@ class Node(object, metaclass=NoSlotsPyPy):
s = str(e) s = str(e)
raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
def add_prerequisite(self, prerequisite): def add_prerequisite(self, prerequisite) -> None:
"""Adds prerequisites""" """Adds prerequisites"""
if self.prerequisites is None: if self.prerequisites is None:
self.prerequisites = UniqueList() self.prerequisites = UniqueList()
@ -1317,7 +1318,7 @@ class Node(object, metaclass=NoSlotsPyPy):
s = str(e) s = str(e)
raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e))) raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
def _add_child(self, collection, set, child): def _add_child(self, collection, set, child) -> None:
"""Adds 'child' to 'collection', first checking 'set' to see if it's """Adds 'child' to 'collection', first checking 'set' to see if it's
already present.""" already present."""
added = None added = None
@ -1329,16 +1330,16 @@ class Node(object, metaclass=NoSlotsPyPy):
if added: if added:
self._children_reset() self._children_reset()
def set_specific_source(self, source): def set_specific_source(self, source) -> None:
self.add_source(source) self.add_source(source)
self._specific_sources = True self._specific_sources = True
def add_wkid(self, wkid): def add_wkid(self, wkid) -> None:
"""Add a node to the list of kids waiting to be evaluated""" """Add a node to the list of kids waiting to be evaluated"""
if self.wkids is not None: if self.wkids is not None:
self.wkids.append(wkid) self.wkids.append(wkid)
def _children_reset(self): def _children_reset(self) -> None:
self.clear_memoized_values() self.clear_memoized_values()
# We need to let the Executor clear out any calculated # We need to let the Executor clear out any calculated
# build info that it's cached so we can re-calculate it. # build info that it's cached so we can re-calculate it.
@ -1381,7 +1382,7 @@ class Node(object, metaclass=NoSlotsPyPy):
self._memo['_children_get'] = children self._memo['_children_get'] = children
return children return children
def all_children(self, scan=1): def all_children(self, scan: int=1):
"""Return a list of all the node's direct children.""" """Return a list of all the node's direct children."""
if scan: if scan:
self.scan() self.scan()
@ -1405,14 +1406,14 @@ class Node(object, metaclass=NoSlotsPyPy):
# internally anyway...) # internally anyway...)
return list(chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f])) return list(chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f]))
def children(self, scan=1): def children(self, scan: int=1):
"""Return a list of the node's direct children, minus those """Return a list of the node's direct children, minus those
that are ignored by this node.""" that are ignored by this node."""
if scan: if scan:
self.scan() self.scan()
return self._children_get() return self._children_get()
def set_state(self, state): def set_state(self, state) -> None:
self.state = state self.state = state
def get_state(self): def get_state(self):
@ -1425,7 +1426,7 @@ class Node(object, metaclass=NoSlotsPyPy):
env = SCons.Defaults.DefaultEnvironment() env = SCons.Defaults.DefaultEnvironment()
return env return env
def Decider(self, function): def Decider(self, function) -> None:
foundkey = None foundkey = None
for k, v in _decider_map.items(): for k, v in _decider_map.items():
if v == function: if v == function:
@ -1436,7 +1437,7 @@ class Node(object, metaclass=NoSlotsPyPy):
_decider_map[foundkey] = function _decider_map[foundkey] = function
self.changed_since_last_build = foundkey self.changed_since_last_build = foundkey
def Tag(self, key, value): def Tag(self, key, value) -> None:
""" Add a user-defined tag. """ """ Add a user-defined tag. """
if not self._tags: if not self._tags:
self._tags = {} self._tags = {}
@ -1448,7 +1449,7 @@ class Node(object, metaclass=NoSlotsPyPy):
return None return None
return self._tags.get(key, None) return self._tags.get(key, None)
def changed(self, node=None, allowcache=False): def changed(self, node=None, allowcache: bool=False):
""" """
Returns if the node is up-to-date with respect to the BuildInfo Returns if the node is up-to-date with respect to the BuildInfo
stored last time it was built. The default behavior is to compare stored last time it was built. The default behavior is to compare
@ -1512,12 +1513,12 @@ class Node(object, metaclass=NoSlotsPyPy):
return result return result
def is_up_to_date(self): def is_up_to_date(self) -> bool:
"""Default check for whether the Node is current: unknown Node """Default check for whether the Node is current: unknown Node
subtypes are always out of date, so they will always get built.""" subtypes are always out of date, so they will always get built."""
return None return False
def children_are_up_to_date(self): def children_are_up_to_date(self) -> bool:
"""Alternate check for whether the Node is current: If all of """Alternate check for whether the Node is current: If all of
our children were up-to-date, then this Node was up-to-date, too. our children were up-to-date, then this Node was up-to-date, too.
@ -1526,7 +1527,7 @@ class Node(object, metaclass=NoSlotsPyPy):
# Allow the children to calculate their signatures. # Allow the children to calculate their signatures.
self.binfo = self.get_binfo() self.binfo = self.get_binfo()
if self.always_build: if self.always_build:
return None return False
state = 0 state = 0
for kid in self.children(None): for kid in self.children(None):
s = kid.get_state() s = kid.get_state()
@ -1534,10 +1535,10 @@ class Node(object, metaclass=NoSlotsPyPy):
state = s state = s
return (state == 0 or state == SCons.Node.up_to_date) return (state == 0 or state == SCons.Node.up_to_date)
def is_literal(self): def is_literal(self) -> bool:
"""Always pass the string representation of a Node to """Always pass the string representation of a Node to
the command interpreter literally.""" the command interpreter literally."""
return 1 return True
def render_include_tree(self): def render_include_tree(self):
""" """
@ -1710,12 +1711,12 @@ class Node(object, metaclass=NoSlotsPyPy):
return ( ' '*11).join(lines) return ( ' '*11).join(lines)
class NodeList(collections.UserList): class NodeList(collections.UserList):
def __str__(self): def __str__(self) -> str:
return str(list(map(str, self.data))) return str(list(map(str, self.data)))
def get_children(node, parent): return node.children() def get_children(node, parent): return node.children()
def ignore_cycle(node, stack): pass def ignore_cycle(node, stack) -> None: pass
def do_nothing(node, parent): pass def do_nothing(node, parent) -> None: pass
class Walker: class Walker:
"""An iterator for walking a Node tree. """An iterator for walking a Node tree.
@ -1732,7 +1733,7 @@ class Walker:
""" """
def __init__(self, node, kids_func=get_children, def __init__(self, node, kids_func=get_children,
cycle_func=ignore_cycle, cycle_func=ignore_cycle,
eval_func=do_nothing): eval_func=do_nothing) -> None:
self.kids_func = kids_func self.kids_func = kids_func
self.cycle_func = cycle_func self.cycle_func = cycle_func
self.eval_func = eval_func self.eval_func = eval_func
@ -1771,7 +1772,7 @@ class Walker:
return node return node
return None return None
def is_done(self): def is_done(self) -> bool:
return not self.stack return not self.stack

View file

@ -64,10 +64,9 @@ def node_conv(obj):
return result return result
class _PathList: class _PathList:
""" """An actual PathList object."""
An actual PathList object.
""" def __init__(self, pathlist, split=True) -> None:
def __init__(self, pathlist):
""" """
Initializes a PathList object, canonicalizing the input and Initializes a PathList object, canonicalizing the input and
pre-processing it for quicker substitution later. pre-processing it for quicker substitution later.
@ -94,7 +93,10 @@ class _PathList:
over and over for each target. over and over for each target.
""" """
if SCons.Util.is_String(pathlist): if SCons.Util.is_String(pathlist):
if split:
pathlist = pathlist.split(os.pathsep) pathlist = pathlist.split(os.pathsep)
else: # no splitting, but still need a list
pathlist = [pathlist]
elif not SCons.Util.is_Sequence(pathlist): elif not SCons.Util.is_Sequence(pathlist):
pathlist = [pathlist] pathlist = [pathlist]
@ -113,7 +115,7 @@ class _PathList:
self.pathlist = tuple(pl) self.pathlist = tuple(pl)
def __len__(self): return len(self.pathlist) def __len__(self) -> int: return len(self.pathlist)
def __getitem__(self, i): return self.pathlist[i] def __getitem__(self, i): return self.pathlist[i]
@ -141,8 +143,7 @@ class _PathList:
class PathListCache: class PathListCache:
""" """A class to handle caching of PathList lookups.
A class to handle caching of PathList lookups.
This class gets instantiated once and then deleted from the namespace, This class gets instantiated once and then deleted from the namespace,
so it's used as a Singleton (although we don't enforce that in the so it's used as a Singleton (although we don't enforce that in the
@ -168,7 +169,7 @@ class PathListCache:
cheaply avoid re-parsing both values of CPPPATH by using the cheaply avoid re-parsing both values of CPPPATH by using the
common value from this cache. common value from this cache.
""" """
def __init__(self): def __init__(self) -> None:
self._memo = {} self._memo = {}
def _PathList_key(self, pathlist): def _PathList_key(self, pathlist):
@ -189,7 +190,7 @@ class PathListCache:
return pathlist return pathlist
@SCons.Memoize.CountDictCall(_PathList_key) @SCons.Memoize.CountDictCall(_PathList_key)
def PathList(self, pathlist): def PathList(self, pathlist, split=True):
""" """
Returns the cached _PathList object for the specified pathlist, Returns the cached _PathList object for the specified pathlist,
creating and caching a new object as necessary. creating and caching a new object as necessary.
@ -206,7 +207,7 @@ class PathListCache:
except KeyError: except KeyError:
pass pass
result = _PathList(pathlist) result = _PathList(pathlist, split)
memo_dict[pathlist] = result memo_dict[pathlist] = result

View file

@ -130,14 +130,14 @@ def DefaultToolList(platform, env):
class PlatformSpec: class PlatformSpec:
def __init__(self, name, generate): def __init__(self, name, generate) -> None:
self.name = name self.name = name
self.generate = generate self.generate = generate
def __call__(self, *args, **kw): def __call__(self, *args, **kw):
return self.generate(*args, **kw) return self.generate(*args, **kw)
def __str__(self): def __str__(self) -> str:
return self.name return self.name
@ -192,7 +192,7 @@ class TempFileMunge:
env["TEMPFILEARGESCFUNC"] = tempfile_arg_esc_func env["TEMPFILEARGESCFUNC"] = tempfile_arg_esc_func
""" """
def __init__(self, cmd, cmdstr = None): def __init__(self, cmd, cmdstr = None) -> None:
self.cmd = cmd self.cmd = cmd
self.cmdstr = cmdstr self.cmdstr = cmdstr
@ -323,7 +323,7 @@ class TempFileMunge:
return cmdlist return cmdlist
def _print_cmd_str(self, target, source, env, cmdstr): def _print_cmd_str(self, target, source, env, cmdstr) -> None:
# check if the user has specified a cmd line print function # check if the user has specified a cmd line print function
print_func = None print_func = None
try: try:

View file

@ -28,7 +28,7 @@ will usually be imported through the generic SCons.Platform.Platform()
selection method. selection method.
""" """
import subprocess from subprocess import PIPE
from . import posix from . import posix
@ -47,27 +47,26 @@ def get_xlc(env, xlc=None, packages=[]):
xlc = xlc[0] xlc = xlc[0]
for package in packages: for package in packages:
# find the installed filename, which may be a symlink as well # find the installed filename, which may be a symlink as well
pipe = SCons.Action._subproc(env, ['lslpp', '-fc', package], cp = SCons.Action.scons_subproc_run(
stdin = 'devnull', env, ['lslpp', '-fc', package], universal_newlines=True, stdout=PIPE
stderr = 'devnull', )
universal_newlines=True,
stdout = subprocess.PIPE)
# output of lslpp is something like this: # output of lslpp is something like this:
# #Path:Fileset:File # #Path:Fileset:File
# /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/exe/xlCcpp # /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/exe/xlCcpp
# /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/bin/xlc_r -> /usr/vac/bin/xlc # /usr/lib/objrepos:vac.C 6.0.0.0:/usr/vac/bin/xlc_r -> /usr/vac/bin/xlc
for line in pipe.stdout: for line in cp.stdout.splitlines():
if xlcPath: if xlcPath:
continue # read everything to let lslpp terminate continue # read everything to let lslpp terminate
fileset, filename = line.split(':')[1:3] fileset, filename = line.split(':')[1:3]
filename = filename.split()[0] filename = filename.split()[0]
if ('/' in xlc and filename == xlc) \ if ('/' in xlc and filename == xlc) or (
or ('/' not in xlc and filename.endswith('/' + xlc)): '/' not in xlc and filename.endswith('/' + xlc)
):
xlcVersion = fileset.split()[1] xlcVersion = fileset.split()[1]
xlcPath, sep, xlc = filename.rpartition('/') xlcPath, sep, xlc = filename.rpartition('/')
return (xlcPath, xlc, xlcVersion) return (xlcPath, xlc, xlcVersion)
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
#Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion
env['MAXLINELENGTH'] = 21576 env['MAXLINELENGTH'] = 21576

View file

@ -40,15 +40,16 @@ if sys.platform == 'win32':
r'C:\cygwin\bin' r'C:\cygwin\bin'
] ]
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
env['PROGPREFIX'] = '' env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = '.exe' env['PROGSUFFIX'] = '.exe'
env['SHLIBPREFIX'] = '' env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll' env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX', '$IMPLIBPREFIX' ] env['LIBPREFIXES'] = ['$LIBPREFIX', '$SHLIBPREFIX', '$IMPLIBPREFIX']
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX', '$IMPLIBSUFFIX' ] env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX', '$IMPLIBSUFFIX']
env['LIBLITERAPPREFIX'] = ':'
env['TEMPFILE'] = TempFileMunge env['TEMPFILE'] = TempFileMunge
env['TEMPFILEPREFIX'] = '@' env['TEMPFILEPREFIX'] = '@'
env['MAXLINELENGTH'] = 2048 env['MAXLINELENGTH'] = 2048

View file

@ -32,7 +32,7 @@ from . import posix
import os import os
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib' env['SHLIBSUFFIX'] = '.dylib'
env['HOST_OS'] = 'darwin' env['HOST_OS'] = 'darwin'
@ -54,7 +54,7 @@ def generate(env):
for file in filelist: for file in filelist:
if os.path.isfile(file): if os.path.isfile(file):
with open(file, 'r') as f: with open(file) as f:
lines = f.readlines() lines = f.readlines()
for line in lines: for line in lines:
if line: if line:

View file

@ -30,7 +30,7 @@ selection method.
from . import posix from . import posix
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
#Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion
env['MAXLINELENGTH'] = 2045000 env['MAXLINELENGTH'] = 2045000

View file

@ -30,7 +30,7 @@ selection method.
from . import posix from . import posix
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
env['HOST_OS'] = 'irix' env['HOST_OS'] = 'irix'

View file

@ -29,5 +29,7 @@ MINGW_DEFAULT_PATHS = []
if sys.platform == 'win32': if sys.platform == 'win32':
MINGW_DEFAULT_PATHS = [ MINGW_DEFAULT_PATHS = [
r'C:\msys64', r'C:\msys64',
r'C:\msys' r'C:\msys64\usr\bin',
r'C:\msys',
r'C:\msys\usr\bin'
] ]

View file

@ -30,7 +30,7 @@ selection method.
from . import win32 from . import win32
def generate(env): def generate(env) -> None:
if 'ENV' not in env: if 'ENV' not in env:
env['ENV'] = {} env['ENV'] = {}
env['OBJPREFIX'] = '' env['OBJPREFIX'] = ''
@ -43,8 +43,9 @@ def generate(env):
env['LIBSUFFIX'] = '.lib' env['LIBSUFFIX'] = '.lib'
env['SHLIBPREFIX'] = '' env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll' env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = '$LIBPREFIX' env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX']
env['LIBLITERAPPREFIX'] = ''
env['HOST_OS'] = 'os2' env['HOST_OS'] = 'os2'
env['HOST_ARCH'] = win32.get_architecture().arch env['HOST_ARCH'] = win32.get_architecture().arch

View file

@ -74,7 +74,7 @@ def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr):
env, stdout, stderr) env, stdout, stderr)
def generate(env): def generate(env) -> None:
# Bearing in mind we have python 2.4 as a baseline, we can just do this: # Bearing in mind we have python 2.4 as a baseline, we can just do this:
spawn = subprocess_spawn spawn = subprocess_spawn
pspawn = piped_env_spawn pspawn = piped_env_spawn
@ -93,8 +93,9 @@ def generate(env):
env['LIBSUFFIX'] = '.a' env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = '$LIBPREFIX' env['SHLIBPREFIX'] = '$LIBPREFIX'
env['SHLIBSUFFIX'] = '.so' env['SHLIBSUFFIX'] = '.so'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ] env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ] env['LIBSUFFIXES'] = ['$LIBSUFFIX', '$SHLIBSUFFIX']
env['LIBLITERALPREFIX'] = ''
env['HOST_OS'] = 'posix' env['HOST_OS'] = 'posix'
env['HOST_ARCH'] = platform.machine() env['HOST_ARCH'] = platform.machine()
env['PSPAWN'] = pspawn env['PSPAWN'] = pspawn

View file

@ -30,7 +30,7 @@ selection method.
from . import posix from . import posix
def generate(env): def generate(env) -> None:
posix.generate(env) posix.generate(env)
# Based on sunSparc 8:32bit # Based on sunSparc 8:32bit
# ARG_MAX=1048320 - 3000 for environment expansion # ARG_MAX=1048320 - 3000 for environment expansion

View file

@ -51,7 +51,7 @@ def _running_in_virtualenv():
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)) (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
def _is_path_in(path, base): def _is_path_in(path, base) -> bool:
"""Returns true if **path** is located under the **base** directory.""" """Returns true if **path** is located under the **base** directory."""
if not path or not base: # empty path may happen, base too if not path or not base: # empty path may happen, base too
return False return False
@ -59,7 +59,7 @@ def _is_path_in(path, base):
return (not rp.startswith(os.path.pardir)) and (not rp == os.path.curdir) return (not rp.startswith(os.path.pardir)) and (not rp == os.path.curdir)
def _inject_venv_variables(env): def _inject_venv_variables(env) -> None:
if 'ENV' not in env: if 'ENV' not in env:
env['ENV'] = {} env['ENV'] = {}
ENV = env['ENV'] ENV = env['ENV']
@ -69,7 +69,7 @@ def _inject_venv_variables(env):
except KeyError: except KeyError:
pass pass
def _inject_venv_path(env, path_list=None): def _inject_venv_path(env, path_list=None) -> None:
"""Modify environment such that SCons will take into account its virtualenv """Modify environment such that SCons will take into account its virtualenv
when running external tools.""" when running external tools."""
if path_list is None: if path_list is None:
@ -86,7 +86,7 @@ def select_paths_in_venv(path_list):
return [path for path in path_list if IsInVirtualenv(path)] return [path for path in path_list if IsInVirtualenv(path)]
def ImportVirtualenv(env): def ImportVirtualenv(env) -> None:
"""Copies virtualenv-related environment variables from OS environment """Copies virtualenv-related environment variables from OS environment
to ``env['ENV']`` and prepends virtualenv's PATH to ``env['ENV']['PATH']``. to ``env['ENV']`` and prepends virtualenv's PATH to ``env['ENV']['PATH']``.
""" """

View file

@ -58,7 +58,7 @@ if False:
shutil.copy2 = CopyFile shutil.copy2 = CopyFile
def win_api_copyfile(src,dst): def win_api_copyfile(src,dst) -> None:
CopyFile(src,dst) CopyFile(src,dst)
os.utime(dst) os.utime(dst)
@ -81,9 +81,8 @@ try:
# This locked version of spawnve works around a Windows # This locked version of spawnve works around a Windows
# MSVCRT bug, because its spawnve is not thread-safe. # MSVCRT bug, because its spawnve is not thread-safe.
# Without this, python can randomly crash while using -jN. # Without this, python can randomly crash while using -jN.
# See the python bug at http://bugs.python.org/issue6476 # See the python bug at https://github.com/python/cpython/issues/50725
# and SCons issue at # and SCons issue at https://github.com/SCons/scons/issues/2449
# https://github.com/SCons/scons/issues/2449
def spawnve(mode, file, args, env): def spawnve(mode, file, args, env):
spawn_lock.acquire() spawn_lock.acquire()
try: try:
@ -166,18 +165,20 @@ def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
# and do clean up stuff # and do clean up stuff
if stdout is not None and not stdoutRedirected: if stdout is not None and not stdoutRedirected:
try: try:
with open(tmpFileStdoutName, "r") as tmpFileStdout: with open(tmpFileStdoutName, "rb") as tmpFileStdout:
stdout.write(tmpFileStdout.read()) output = tmpFileStdout.read()
stdout.write(output.decode(stdout.encoding, "replace"))
os.remove(tmpFileStdoutName) os.remove(tmpFileStdoutName)
except (IOError, OSError): except OSError:
pass pass
if stderr is not None and not stderrRedirected: if stderr is not None and not stderrRedirected:
try: try:
with open(tmpFileStderrName, "r") as tmpFileStderr: with open(tmpFileStderrName, "rb") as tmpFileStderr:
stderr.write(tmpFileStderr.read()) errors = tmpFileStderr.read()
stderr.write(errors.decode(stderr.encoding, "replace"))
os.remove(tmpFileStderrName) os.remove(tmpFileStderrName)
except (IOError, OSError): except OSError:
pass pass
return ret return ret
@ -186,7 +187,7 @@ def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
def exec_spawn(l, env): def exec_spawn(l, env):
try: try:
result = spawnve(os.P_WAIT, l[0], l, env) result = spawnve(os.P_WAIT, l[0], l, env)
except (OSError, EnvironmentError) as e: except OSError as e:
try: try:
result = exitvalmap[e.errno] result = exitvalmap[e.errno]
sys.stderr.write("scons: %s: %s\n" % (l[0], e.strerror)) sys.stderr.write("scons: %s: %s\n" % (l[0], e.strerror))
@ -283,7 +284,7 @@ class ArchDefinition:
Determine which windows CPU were running on. Determine which windows CPU were running on.
A class for defining architecture-specific settings and logic. A class for defining architecture-specific settings and logic.
""" """
def __init__(self, arch, synonyms=[]): def __init__(self, arch, synonyms=[]) -> None:
self.arch = arch self.arch = arch
self.synonyms = synonyms self.synonyms = synonyms
@ -298,6 +299,11 @@ SupportedArchitectureList = [
['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'], ['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'],
), ),
ArchDefinition(
'arm64',
['ARM64', 'aarch64', 'AARCH64', 'AArch64'],
),
ArchDefinition( ArchDefinition(
'ia64', 'ia64',
['IA64'], ['IA64'],
@ -315,9 +321,20 @@ def get_architecture(arch=None):
"""Returns the definition for the specified architecture string. """Returns the definition for the specified architecture string.
If no string is specified, the system default is returned (as defined If no string is specified, the system default is returned (as defined
by the PROCESSOR_ARCHITEW6432 or PROCESSOR_ARCHITECTURE environment by the registry PROCESSOR_ARCHITECTURE value, PROCESSOR_ARCHITEW6432
variables). environment variable, PROCESSOR_ARCHITECTURE environment variable, or
the platform machine).
""" """
if arch is None:
if SCons.Util.can_read_reg:
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment')
val, tok = SCons.Util.RegQueryValueEx(k, 'PROCESSOR_ARCHITECTURE')
except SCons.Util.RegError:
val = ''
if val and val in SupportedArchitectureMap:
arch = val
if arch is None: if arch is None:
arch = os.environ.get('PROCESSOR_ARCHITEW6432') arch = os.environ.get('PROCESSOR_ARCHITEW6432')
if not arch: if not arch:
@ -405,8 +422,9 @@ def generate(env):
env['LIBSUFFIX'] = '.lib' env['LIBSUFFIX'] = '.lib'
env['SHLIBPREFIX'] = '' env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll' env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ] env['LIBPREFIXES'] = ['$LIBPREFIX']
env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ] env['LIBSUFFIXES'] = ['$LIBSUFFIX']
env['LIBLITERALPREFIX'] = ''
env['PSPAWN'] = piped_spawn env['PSPAWN'] = piped_spawn
env['SPAWN'] = spawn env['SPAWN'] = spawn
env['SHELL'] = cmd_interp env['SHELL'] = cmd_interp

View file

@ -39,6 +39,7 @@ import os
import re import re
import sys import sys
import traceback import traceback
from typing import Tuple
import SCons.Action import SCons.Action
import SCons.Builder import SCons.Builder
@ -61,7 +62,7 @@ SCons.Conftest.LogErrorMessages = 0
build_type = None build_type = None
build_types = ['clean', 'help'] build_types = ['clean', 'help']
def SetBuildType(buildtype): def SetBuildType(buildtype) -> None:
global build_type global build_type
build_type = buildtype build_type = buildtype
@ -73,7 +74,7 @@ FORCE=1 # force all tests to be rebuilt
CACHE=2 # force all tests to be taken from cache (raise an error, if necessary) CACHE=2 # force all tests to be taken from cache (raise an error, if necessary)
cache_mode = AUTO cache_mode = AUTO
def _set_conftest_node(node): def _set_conftest_node(node) -> None:
node.attributes.conftest_node = 1 node.attributes.conftest_node = 1
def SetCacheMode(mode): def SetCacheMode(mode):
@ -90,7 +91,7 @@ def SetCacheMode(mode):
raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode) raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode)
progress_display = SCons.Util.display # will be overwritten by SCons.Script progress_display = SCons.Util.display # will be overwritten by SCons.Script
def SetProgressDisplay(display): def SetProgressDisplay(display) -> None:
"""Set the progress display to use (called from SCons.Script)""" """Set the progress display to use (called from SCons.Script)"""
global progress_display global progress_display
progress_display = display progress_display = display
@ -102,7 +103,7 @@ _ac_config_logs = {} # all config.log files created in this build
_ac_config_hs = {} # all config.h files created in this build _ac_config_hs = {} # all config.h files created in this build
sconf_global = None # current sconf object sconf_global = None # current sconf object
def _createConfigH(target, source, env): def _createConfigH(target, source, env) -> None:
t = open(str(target[0]), "w") t = open(str(target[0]), "w")
defname = re.sub('[^A-Za-z0-9_]', '_', str(target[0]).upper()) defname = re.sub('[^A-Za-z0-9_]', '_', str(target[0]).upper())
t.write("""#ifndef %(DEFNAME)s_SEEN t.write("""#ifndef %(DEFNAME)s_SEEN
@ -119,13 +120,13 @@ def _stringConfigH(target, source, env):
return "scons: Configure: creating " + str(target[0]) return "scons: Configure: creating " + str(target[0])
def NeedConfigHBuilder(): def NeedConfigHBuilder() -> bool:
if len(_ac_config_hs) == 0: if len(_ac_config_hs) == 0:
return False return False
else: else:
return True return True
def CreateConfigHBuilder(env): def CreateConfigHBuilder(env) -> None:
"""Called if necessary just before the building targets phase begins.""" """Called if necessary just before the building targets phase begins."""
action = SCons.Action.Action(_createConfigH, action = SCons.Action.Action(_createConfigH,
_stringConfigH) _stringConfigH)
@ -141,13 +142,13 @@ SCons.Warnings.enableWarningClass(SConfWarning)
# some error definitions # some error definitions
class SConfError(SCons.Errors.UserError): class SConfError(SCons.Errors.UserError):
def __init__(self,msg): def __init__(self,msg) -> None:
super().__init__(msg) super().__init__(msg)
class ConfigureDryRunError(SConfError): class ConfigureDryRunError(SConfError):
"""Raised when a file or directory needs to be updated during a Configure """Raised when a file or directory needs to be updated during a Configure
process, but the user requested a dry-run""" process, but the user requested a dry-run"""
def __init__(self,target): def __init__(self,target) -> None:
if not isinstance(target, SCons.Node.FS.File): if not isinstance(target, SCons.Node.FS.File):
msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target) msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target)
else: else:
@ -157,12 +158,12 @@ class ConfigureDryRunError(SConfError):
class ConfigureCacheError(SConfError): class ConfigureCacheError(SConfError):
"""Raised when a use explicitely requested the cache feature, but the test """Raised when a use explicitely requested the cache feature, but the test
is run the first time.""" is run the first time."""
def __init__(self,target): def __init__(self,target) -> None:
super().__init__('"%s" is not yet built and cache is forced.' % str(target)) super().__init__('"%s" is not yet built and cache is forced.' % str(target))
# define actions for building text files # define actions for building text files
def _createSource(target, source, env): def _createSource(target, source, env) -> None:
fd = open(str(target[0]), "w") fd = open(str(target[0]), "w")
fd.write(source[0].get_contents().decode()) fd.write(source[0].get_contents().decode())
fd.close() fd.close()
@ -180,11 +181,11 @@ class SConfBuildInfo(SCons.Node.FS.FileBuildInfo):
""" """
__slots__ = ('result', 'string') __slots__ = ('result', 'string')
def __init__(self): def __init__(self) -> None:
self.result = None # -> 0/None -> no error, != 0 error self.result = None # -> 0/None -> no error, != 0 error
self.string = None # the stdout / stderr output when building the target self.string = None # the stdout / stderr output when building the target
def set_build_result(self, result, string): def set_build_result(self, result, string) -> None:
self.result = result self.result = result
self.string = string self.string = string
@ -193,11 +194,11 @@ class Streamer:
""" """
'Sniffer' for a file-like writable object. Similar to the unix tool tee. 'Sniffer' for a file-like writable object. Similar to the unix tool tee.
""" """
def __init__(self, orig): def __init__(self, orig) -> None:
self.orig = orig self.orig = orig
self.s = io.StringIO() self.s = io.StringIO()
def write(self, str): def write(self, str) -> None:
if self.orig: if self.orig:
self.orig.write(str) self.orig.write(str)
try: try:
@ -206,7 +207,7 @@ class Streamer:
# "unicode argument expected" bug in IOStream (python 2.x) # "unicode argument expected" bug in IOStream (python 2.x)
self.s.write(str.decode()) self.s.write(str.decode())
def writelines(self, lines): def writelines(self, lines) -> None:
for l in lines: for l in lines:
self.write(l + '\n') self.write(l + '\n')
@ -216,7 +217,7 @@ class Streamer:
""" """
return self.s.getvalue() return self.s.getvalue()
def flush(self): def flush(self) -> None:
if self.orig: if self.orig:
self.orig.flush() self.orig.flush()
self.s.flush() self.s.flush()
@ -229,11 +230,11 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
""" """
non_sconf_nodes = set() non_sconf_nodes = set()
def display(self, message): def display(self, message) -> None:
if sconf_global.logstream: if sconf_global.logstream:
sconf_global.logstream.write("scons: Configure: " + message + "\n") sconf_global.logstream.write("scons: Configure: " + message + "\n")
def display_cached_string(self, bi): def display_cached_string(self, bi) -> None:
""" """
Logs the original builder messages, given the SConfBuildInfo instance Logs the original builder messages, given the SConfBuildInfo instance
bi. bi.
@ -250,10 +251,9 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
def failed(self): def failed(self):
# check, if the reason was a ConfigureDryRunError or a # check, if the reason was a ConfigureDryRunError or a
# ConfigureCacheError and if yes, reraise the exception # ConfigureCacheError and if yes, reraise the exception
exc_type = self.exc_info()[0] exc_type, exc, _ = self.exc_info()
if issubclass(exc_type, SConfError): if issubclass(exc_type, SConfError):
# TODO pylint E0704: bare raise not inside except raise exc
raise
elif issubclass(exc_type, SCons.Errors.BuildError): elif issubclass(exc_type, SCons.Errors.BuildError):
# we ignore Build Errors (occurs, when a test doesn't pass) # we ignore Build Errors (occurs, when a test doesn't pass)
# Clear the exception to prevent the contained traceback # Clear the exception to prevent the contained traceback
@ -265,12 +265,12 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
sys.excepthook(*self.exc_info()) sys.excepthook(*self.exc_info())
return SCons.Taskmaster.Task.failed(self) return SCons.Taskmaster.Task.failed(self)
def collect_node_states(self): def collect_node_states(self) -> Tuple[bool, bool, bool]:
# returns (is_up_to_date, cached_error, cachable) # returns (is_up_to_date, cached_error, cachable)
# where is_up_to_date is 1, if the node(s) are up_to_date # where is_up_to_date is True if the node(s) are up_to_date
# cached_error is 1, if the node(s) are up_to_date, but the # cached_error is True if the node(s) are up_to_date, but the
# build will fail # build will fail
# cachable is 0, if some nodes are not in our cache # cachable is False if some nodes are not in our cache
T = 0 T = 0
changed = False changed = False
cached_error = False cached_error = False
@ -311,7 +311,7 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
if cache_mode == CACHE and not cachable: if cache_mode == CACHE and not cachable:
raise ConfigureCacheError(self.targets[0]) raise ConfigureCacheError(self.targets[0])
elif cache_mode == FORCE: elif cache_mode == FORCE:
is_up_to_date = 0 is_up_to_date = False
if cached_error and is_up_to_date: if cached_error and is_up_to_date:
self.display("Building \"%s\" failed in a previous run and all " self.display("Building \"%s\" failed in a previous run and all "
@ -378,7 +378,7 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
sconsign.set_entry(t.name, sconsign_entry) sconsign.set_entry(t.name, sconsign_entry)
sconsign.merge() sconsign.merge()
def make_ready_current(self): def make_ready_current(self) -> None:
# We're overriding make_ready_current() call to add to the list # We're overriding make_ready_current() call to add to the list
# of nodes used by this task, filtering out any nodes created # of nodes used by this task, filtering out any nodes created
# by the checker for it's own purpose. # by the checker for it's own purpose.
@ -386,7 +386,7 @@ class SConfBuildTask(SCons.Taskmaster.AlwaysTask):
super().make_ready_current() super().make_ready_current()
make_ready = make_ready_current make_ready = make_ready_current
def postprocess(self): def postprocess(self) -> None:
# We're done executing this task, so now we'll go through all the # We're done executing this task, so now we'll go through all the
# nodes used by this task which aren't nodes created for # nodes used by this task which aren't nodes created for
# Configure checkers, but rather are existing or built files # Configure checkers, but rather are existing or built files
@ -410,8 +410,8 @@ class SConfBase:
SConf run, we need to explicitly cache this error. SConf run, we need to explicitly cache this error.
""" """
def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR', def __init__(self, env, custom_tests = {}, conf_dir: str='$CONFIGUREDIR',
log_file='$CONFIGURELOG', config_h = None, _depth = 0): log_file: str='$CONFIGURELOG', config_h = None, _depth: int = 0) -> None:
"""Constructor. Pass additional tests in the custom_tests-dictionary, """Constructor. Pass additional tests in the custom_tests-dictionary,
e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest
defines a custom test. defines a custom test.
@ -432,7 +432,7 @@ class SConfBase:
# and keep the build state consistent. # and keep the build state consistent.
def force_build(dependency, target, prev_ni, def force_build(dependency, target, prev_ni,
repo_node=None, repo_node=None,
env_decider=env.decide_source): env_decider=env.decide_source) -> bool:
try: try:
env_decider(dependency, target, prev_ni, repo_node) env_decider(dependency, target, prev_ni, repo_node)
except Exception as e: except Exception as e:
@ -496,7 +496,7 @@ class SConfBase:
return self.env return self.env
def Define(self, name, value = None, comment = None): def Define(self, name, value = None, comment = None) -> None:
""" """
Define a pre processor symbol name, with the optional given value in the Define a pre processor symbol name, with the optional given value in the
current config header. current config header.
@ -602,7 +602,7 @@ class SConfBase:
""" """
return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream) return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream)
def TryBuild(self, builder, text=None, extension=""): def TryBuild(self, builder, text=None, extension: str=""):
"""Low level TryBuild implementation. Normally you don't need to """Low level TryBuild implementation. Normally you don't need to
call that - you can use TryCompile / TryLink / TryRun instead call that - you can use TryCompile / TryLink / TryRun instead
""" """
@ -673,7 +673,7 @@ class SConfBase:
return result return result
def TryAction(self, action, text = None, extension = ""): def TryAction(self, action, text = None, extension: str = ""):
"""Tries to execute the given action with optional source file """Tries to execute the given action with optional source file
contents <text> and optional source file extension <extension>, contents <text> and optional source file extension <extension>,
Returns the status (0 : failed, 1 : ok) and the contents of the Returns the status (0 : failed, 1 : ok) and the contents of the
@ -714,6 +714,12 @@ class SConfBase:
if ok: if ok:
prog = self.lastTarget prog = self.lastTarget
pname = prog.get_internal_path() pname = prog.get_internal_path()
if sys.platform == "win32" and os.sep == "/":
# msys might have a Python where os.sep='/' on Windows.
# That builds a path in the env.Command below which breaks
# if the SHELL used is cmd because 'pname' will always have
# an os.sep in it.
pname = pname.replace(os.sep, os.altsep)
output = self.confdir.File(os.path.basename(pname)+'.out') output = self.confdir.File(os.path.basename(pname)+'.out')
node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ]) node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ])
ok = self.BuildNodes(node) ok = self.BuildNodes(node)
@ -724,7 +730,7 @@ class SConfBase:
class TestWrapper: class TestWrapper:
"""A wrapper around Tests (to ensure sanity)""" """A wrapper around Tests (to ensure sanity)"""
def __init__(self, test, sconf): def __init__(self, test, sconf) -> None:
self.test = test self.test = test
self.sconf = sconf self.sconf = sconf
def __call__(self, *args, **kw): def __call__(self, *args, **kw):
@ -737,12 +743,12 @@ class SConfBase:
context.Result("error: no result") context.Result("error: no result")
return ret return ret
def AddTest(self, test_name, test_instance): def AddTest(self, test_name, test_instance) -> None:
"""Adds test_class to this SConf instance. It can be called with """Adds test_class to this SConf instance. It can be called with
self.test_name(...)""" self.test_name(...)"""
setattr(self, test_name, SConfBase.TestWrapper(test_instance, self)) setattr(self, test_name, SConfBase.TestWrapper(test_instance, self))
def AddTests(self, tests): def AddTests(self, tests) -> None:
"""Adds all the tests given in the tests dictionary to this SConf """Adds all the tests given in the tests dictionary to this SConf
instance instance
""" """
@ -758,7 +764,7 @@ class SConfBase:
if not os.path.isdir( dirName ): if not os.path.isdir( dirName ):
os.makedirs( dirName ) os.makedirs( dirName )
def _startup(self): def _startup(self) -> None:
"""Private method. Set up logstream, and set the environment """Private method. Set up logstream, and set the environment
variables necessary for a piped build variables necessary for a piped build
""" """
@ -781,7 +787,7 @@ class SConfBase:
log_mode = "w" log_mode = "w"
fp = open(str(self.logfile), log_mode) fp = open(str(self.logfile), log_mode)
def conflog_cleanup(logf): def conflog_cleanup(logf) -> None:
logf.close() logf.close()
atexit.register(conflog_cleanup, fp) atexit.register(conflog_cleanup, fp)
@ -855,7 +861,7 @@ class CheckContext:
changed. changed.
""" """
def __init__(self, sconf): def __init__(self, sconf) -> None:
"""Constructor. Pass the corresponding SConf instance.""" """Constructor. Pass the corresponding SConf instance."""
self.sconf = sconf self.sconf = sconf
self.did_show_result = 0 self.did_show_result = 0
@ -873,7 +879,7 @@ class CheckContext:
# correctly. Note that we can't use Conftest.py's support for config.h, # correctly. Note that we can't use Conftest.py's support for config.h,
# cause we will need to specify a builder for the config.h file ... # cause we will need to specify a builder for the config.h file ...
def Message(self, text): def Message(self, text) -> None:
"""Inform about what we are doing right now, e.g. """Inform about what we are doing right now, e.g.
'Checking for SOMETHING ... ' 'Checking for SOMETHING ... '
""" """
@ -881,7 +887,7 @@ class CheckContext:
self.sconf.cached = 1 self.sconf.cached = 1
self.did_show_result = 0 self.did_show_result = 0
def Result(self, res): def Result(self, res) -> None:
"""Inform about the result of the test. If res is not a string, displays """Inform about the result of the test. If res is not a string, displays
'yes' or 'no' depending on whether res is evaluated as true or false. 'yes' or 'no' depending on whether res is evaluated as true or false.
The result is only displayed when self.did_show_result is not set. The result is only displayed when self.did_show_result is not set.
@ -923,17 +929,17 @@ class CheckContext:
#### Stuff used by Conftest.py (look there for explanations). #### Stuff used by Conftest.py (look there for explanations).
def BuildProg(self, text, ext): def BuildProg(self, text, ext) -> bool:
self.sconf.cached = 1 self.sconf.cached = 1
# TODO: should use self.vardict for $CC, $CPPFLAGS, etc. # TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.Program, text, ext) return not self.TryBuild(self.env.Program, text, ext)
def CompileProg(self, text, ext): def CompileProg(self, text, ext) -> bool:
self.sconf.cached = 1 self.sconf.cached = 1
# TODO: should use self.vardict for $CC, $CPPFLAGS, etc. # TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.Object, text, ext) return not self.TryBuild(self.env.Object, text, ext)
def CompileSharedObject(self, text, ext): def CompileSharedObject(self, text, ext) -> bool:
self.sconf.cached = 1 self.sconf.cached = 1
# TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc. # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc.
return not self.TryBuild(self.env.SharedObject, text, ext) return not self.TryBuild(self.env.SharedObject, text, ext)
@ -944,7 +950,7 @@ class CheckContext:
st, out = self.TryRun(text, ext) st, out = self.TryRun(text, ext)
return not st, out return not st, out
def AppendLIBS(self, lib_name_list, unique=False): def AppendLIBS(self, lib_name_list, unique: bool=False):
oldLIBS = self.env.get( 'LIBS', [] ) oldLIBS = self.env.get( 'LIBS', [] )
if unique: if unique:
self.env.AppendUnique(LIBS = lib_name_list) self.env.AppendUnique(LIBS = lib_name_list)
@ -952,7 +958,7 @@ class CheckContext:
self.env.Append(LIBS = lib_name_list) self.env.Append(LIBS = lib_name_list)
return oldLIBS return oldLIBS
def PrependLIBS(self, lib_name_list, unique=False): def PrependLIBS(self, lib_name_list, unique: bool=False):
oldLIBS = self.env.get( 'LIBS', [] ) oldLIBS = self.env.get( 'LIBS', [] )
if unique: if unique:
self.env.PrependUnique(LIBS = lib_name_list) self.env.PrependUnique(LIBS = lib_name_list)
@ -965,7 +971,7 @@ class CheckContext:
self.env.Replace(LIBS = val) self.env.Replace(LIBS = val)
return oldLIBS return oldLIBS
def Display(self, msg): def Display(self, msg) -> None:
if self.sconf.cached: if self.sconf.cached:
# We assume that Display is called twice for each test here # We assume that Display is called twice for each test here
# once for the Checking for ... message and once for the result. # once for the Checking for ... message and once for the result.
@ -975,7 +981,7 @@ class CheckContext:
progress_display(msg, append_newline=0) progress_display(msg, append_newline=0)
self.Log("scons: Configure: " + msg + "\n") self.Log("scons: Configure: " + msg + "\n")
def Log(self, msg): def Log(self, msg) -> None:
if self.sconf.logstream is not None: if self.sconf.logstream is not None:
self.sconf.logstream.write(msg) self.sconf.logstream.write(msg)
@ -995,39 +1001,39 @@ def SConf(*args, **kw):
return SCons.Util.Null() return SCons.Util.Null()
def CheckFunc(context, function_name, header = None, language = None): def CheckFunc(context, function_name, header = None, language = None, funcargs = None) -> bool:
res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language) res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language, funcargs = funcargs)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckType(context, type_name, includes = "", language = None): def CheckType(context, type_name, includes: str = "", language = None) -> bool:
res = SCons.Conftest.CheckType(context, type_name, res = SCons.Conftest.CheckType(context, type_name,
header = includes, language = language) header = includes, language = language)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckTypeSize(context, type_name, includes = "", language = None, expect = None): def CheckTypeSize(context, type_name, includes: str = "", language = None, expect = None):
res = SCons.Conftest.CheckTypeSize(context, type_name, res = SCons.Conftest.CheckTypeSize(context, type_name,
header = includes, language = language, header = includes, language = language,
expect = expect) expect = expect)
context.did_show_result = 1 context.did_show_result = 1
return res return res
def CheckDeclaration(context, declaration, includes = "", language = None): def CheckDeclaration(context, declaration, includes: str = "", language = None) -> bool:
res = SCons.Conftest.CheckDeclaration(context, declaration, res = SCons.Conftest.CheckDeclaration(context, declaration,
includes = includes, includes = includes,
language = language) language = language)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckMember(context, aggregate_member, header = None, language = None): def CheckMember(context, aggregate_member, header = None, language = None) -> bool:
'''Returns the status (False : failed, True : ok).''' '''Returns the status (False : failed, True : ok).'''
res = SCons.Conftest.CheckMember(context, aggregate_member, header=header, language=language) res = SCons.Conftest.CheckMember(context, aggregate_member, header=header, language=language)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'): def createIncludesFromHeaders(headers, leaveLast, include_quotes: str = '""'):
# used by CheckHeader and CheckLibWithHeader to produce C - #include # used by CheckHeader and CheckLibWithHeader to produce C - #include
# statements from the specified header (list) # statements from the specified header (list)
if not SCons.Util.is_List(headers): if not SCons.Util.is_List(headers):
@ -1043,7 +1049,7 @@ def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'):
% (include_quotes[0], s, include_quotes[1])) % (include_quotes[0], s, include_quotes[1]))
return ''.join(l), lastHeader return ''.join(l), lastHeader
def CheckHeader(context, header, include_quotes = '<>', language = None): def CheckHeader(context, header, include_quotes: str = '<>', language = None) -> bool:
""" """
A test for a C or C++ header file. A test for a C or C++ header file.
""" """
@ -1055,29 +1061,29 @@ def CheckHeader(context, header, include_quotes = '<>', language = None):
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckCC(context): def CheckCC(context) -> bool:
res = SCons.Conftest.CheckCC(context) res = SCons.Conftest.CheckCC(context)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckCXX(context): def CheckCXX(context) -> bool:
res = SCons.Conftest.CheckCXX(context) res = SCons.Conftest.CheckCXX(context)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckSHCC(context): def CheckSHCC(context) -> bool:
res = SCons.Conftest.CheckSHCC(context) res = SCons.Conftest.CheckSHCC(context)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
def CheckSHCXX(context): def CheckSHCXX(context) -> bool:
res = SCons.Conftest.CheckSHCXX(context) res = SCons.Conftest.CheckSHCXX(context)
context.did_show_result = 1 context.did_show_result = 1
return not res return not res
# Bram: Make this function obsolete? CheckHeader() is more generic. # Bram: Make this function obsolete? CheckHeader() is more generic.
def CheckCHeader(context, header, include_quotes = '""'): def CheckCHeader(context, header, include_quotes: str = '""'):
""" """
A test for a C header file. A test for a C header file.
""" """
@ -1086,16 +1092,16 @@ def CheckCHeader(context, header, include_quotes = '""'):
# Bram: Make this function obsolete? CheckHeader() is more generic. # Bram: Make this function obsolete? CheckHeader() is more generic.
def CheckCXXHeader(context, header, include_quotes = '""'): def CheckCXXHeader(context, header, include_quotes: str = '""'):
""" """
A test for a C++ header file. A test for a C++ header file.
""" """
return CheckHeader(context, header, include_quotes, language = "C++") return CheckHeader(context, header, include_quotes, language = "C++")
def CheckLib(context, library = None, symbol = "main", def CheckLib(context, library = None, symbol: str = "main",
header = None, language = None, autoadd=True, header = None, language = None, autoadd: bool=True,
append=True, unique=False) -> bool: append: bool=True, unique: bool=False) -> bool:
""" """
A test for a library. See also CheckLibWithHeader. A test for a library. See also CheckLibWithHeader.
Note that library may also be None to test whether the given symbol Note that library may also be None to test whether the given symbol
@ -1119,7 +1125,7 @@ def CheckLib(context, library = None, symbol = "main",
# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H. # Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H.
def CheckLibWithHeader(context, libs, header, language, def CheckLibWithHeader(context, libs, header, language,
call = None, autoadd=True, append=True, unique=False) -> bool: call = None, autoadd: bool=True, append: bool=True, unique: bool=False) -> bool:
# ToDo: accept path for library. Support system header files. # ToDo: accept path for library. Support system header files.
""" """
Another (more sophisticated) test for a library. Another (more sophisticated) test for a library.

View file

@ -23,7 +23,7 @@
"""Operations on signature database files (.sconsign). """ """Operations on signature database files (.sconsign). """
import SCons.compat import SCons.compat # pylint: disable=wrong-import-order
import os import os
import pickle import pickle
@ -35,7 +35,7 @@ from SCons.compat import PICKLE_PROTOCOL
from SCons.Util import print_time from SCons.Util import print_time
def corrupt_dblite_warning(filename): def corrupt_dblite_warning(filename) -> None:
SCons.Warnings.warn( SCons.Warnings.warn(
SCons.Warnings.CorruptSConsignWarning, SCons.Warnings.CorruptSConsignWarning,
"Ignoring corrupt .sconsign file: %s" % filename, "Ignoring corrupt .sconsign file: %s" % filename,
@ -69,11 +69,10 @@ def current_sconsign_filename():
# eg .sconsign_sha1, etc. # eg .sconsign_sha1, etc.
if hash_format is None and current_hash_algorithm == 'md5': if hash_format is None and current_hash_algorithm == 'md5':
return ".sconsign" return ".sconsign"
else:
return ".sconsign_" + current_hash_algorithm return ".sconsign_" + current_hash_algorithm
def Get_DataBase(dir): def Get_DataBase(dir):
global DataBase, DB_Module, DB_Name global DB_Name
if DB_Name is None: if DB_Name is None:
DB_Name = current_sconsign_filename() DB_Name = current_sconsign_filename()
@ -88,7 +87,7 @@ def Get_DataBase(dir):
except KeyError: except KeyError:
path = d.entry_abspath(DB_Name) path = d.entry_abspath(DB_Name)
try: db = DataBase[d] = DB_Module.open(path, mode) try: db = DataBase[d] = DB_Module.open(path, mode)
except (IOError, OSError): except OSError:
pass pass
else: else:
if mode != "r": if mode != "r":
@ -106,19 +105,18 @@ def Get_DataBase(dir):
raise raise
def Reset(): def Reset() -> None:
"""Reset global state. Used by unit tests that end up using """Reset global state. Used by unit tests that end up using
SConsign multiple times to get a clean slate for each test.""" SConsign multiple times to get a clean slate for each test."""
global sig_files, DB_sync_list global sig_files, DB_sync_list
sig_files = [] sig_files = []
DB_sync_list = [] DB_sync_list = []
normcase = os.path.normcase normcase = os.path.normcase
def write(): def write() -> None:
global sig_files
if print_time(): if print_time():
start_time = time.perf_counter() start_time = time.perf_counter()
@ -154,16 +152,16 @@ class SConsignEntry:
__slots__ = ("binfo", "ninfo", "__weakref__") __slots__ = ("binfo", "ninfo", "__weakref__")
current_version_id = 2 current_version_id = 2
def __init__(self): def __init__(self) -> None:
# Create an object attribute from the class attribute so it ends up # Create an object attribute from the class attribute so it ends up
# in the pickled data in the .sconsign file. # in the pickled data in the .sconsign file.
#_version_id = self.current_version_id #_version_id = self.current_version_id
pass pass
def convert_to_sconsign(self): def convert_to_sconsign(self) -> None:
self.binfo.convert_to_sconsign() self.binfo.convert_to_sconsign()
def convert_from_sconsign(self, dir, name): def convert_from_sconsign(self, dir, name) -> None:
self.binfo.convert_from_sconsign(dir, name) self.binfo.convert_from_sconsign(dir, name)
def __getstate__(self): def __getstate__(self):
@ -180,7 +178,7 @@ class SConsignEntry:
pass pass
return state return state
def __setstate__(self, state): def __setstate__(self, state) -> None:
for key, value in state.items(): for key, value in state.items():
if key not in ('_version_id', '__weakref__'): if key not in ('_version_id', '__weakref__'):
setattr(self, key, value) setattr(self, key, value)
@ -195,7 +193,7 @@ class Base:
methods for fetching and storing the individual bits of information methods for fetching and storing the individual bits of information
that make up signature entry. that make up signature entry.
""" """
def __init__(self): def __init__(self) -> None:
self.entries = {} self.entries = {}
self.dirty = False self.dirty = False
self.to_be_merged = {} self.to_be_merged = {}
@ -206,26 +204,26 @@ class Base:
""" """
return self.entries[filename] return self.entries[filename]
def set_entry(self, filename, obj): def set_entry(self, filename, obj) -> None:
""" """
Set the entry. Set the entry.
""" """
self.entries[filename] = obj self.entries[filename] = obj
self.dirty = True self.dirty = True
def do_not_set_entry(self, filename, obj): def do_not_set_entry(self, filename, obj) -> None:
pass pass
def store_info(self, filename, node): def store_info(self, filename, node) -> None:
entry = node.get_stored_info() entry = node.get_stored_info()
entry.binfo.merge(node.get_binfo()) entry.binfo.merge(node.get_binfo())
self.to_be_merged[filename] = node self.to_be_merged[filename] = node
self.dirty = True self.dirty = True
def do_not_store_info(self, filename, node): def do_not_store_info(self, filename, node) -> None:
pass pass
def merge(self): def merge(self) -> None:
for key, node in self.to_be_merged.items(): for key, node in self.to_be_merged.items():
entry = node.get_stored_info() entry = node.get_stored_info()
try: try:
@ -247,7 +245,7 @@ class DB(Base):
from a global .sconsign.db* file--the actual file suffix is from a global .sconsign.db* file--the actual file suffix is
determined by the database module. determined by the database module.
""" """
def __init__(self, dir): def __init__(self, dir) -> None:
super().__init__() super().__init__()
self.dir = dir self.dir = dir
@ -284,10 +282,9 @@ class DB(Base):
self.set_entry = self.do_not_set_entry self.set_entry = self.do_not_set_entry
self.store_info = self.do_not_store_info self.store_info = self.do_not_store_info
global sig_files
sig_files.append(self) sig_files.append(self)
def write(self, sync=1): def write(self, sync: int=1) -> None:
if not self.dirty: if not self.dirty:
return return
@ -315,10 +312,8 @@ class DB(Base):
class Dir(Base): class Dir(Base):
def __init__(self, fp=None, dir=None): def __init__(self, fp=None, dir=None) -> None:
""" """fp - file pointer to read entries from."""
fp - file pointer to read entries from
"""
super().__init__() super().__init__()
if not fp: if not fp:
@ -335,20 +330,16 @@ class Dir(Base):
class DirFile(Dir): class DirFile(Dir):
""" """Encapsulates reading and writing a per-directory .sconsign file."""
Encapsulates reading and writing a per-directory .sconsign file. def __init__(self, dir) -> None:
""" """dir - the directory for the file."""
def __init__(self, dir):
"""
dir - the directory for the file
"""
self.dir = dir self.dir = dir
self.sconsign = os.path.join(dir.get_internal_path(), current_sconsign_filename()) self.sconsign = os.path.join(dir.get_internal_path(), current_sconsign_filename())
try: try:
fp = open(self.sconsign, 'rb') fp = open(self.sconsign, 'rb')
except IOError: except OSError:
fp = None fp = None
try: try:
@ -364,12 +355,10 @@ class DirFile(Dir):
except AttributeError: except AttributeError:
pass pass
global sig_files
sig_files.append(self) sig_files.append(self)
def write(self, sync=1): def write(self, sync: int=1) -> None:
""" """Write the .sconsign file to disk.
Write the .sconsign file to disk.
Try to write to a temporary file first, and rename it if we Try to write to a temporary file first, and rename it if we
succeed. If we can't write to the temporary file, it's succeed. If we can't write to the temporary file, it's
@ -389,11 +378,11 @@ class DirFile(Dir):
try: try:
file = open(temp, 'wb') file = open(temp, 'wb')
fname = temp fname = temp
except IOError: except OSError:
try: try:
file = open(self.sconsign, 'wb') file = open(self.sconsign, 'wb')
fname = self.sconsign fname = self.sconsign
except IOError: except OSError:
return return
for key, entry in self.entries.items(): for key, entry in self.entries.items():
entry.convert_to_sconsign() entry.convert_to_sconsign()
@ -404,7 +393,7 @@ class DirFile(Dir):
mode = os.stat(self.sconsign)[0] mode = os.stat(self.sconsign)[0]
os.chmod(self.sconsign, 0o666) os.chmod(self.sconsign, 0o666)
os.unlink(self.sconsign) os.unlink(self.sconsign)
except (IOError, OSError): except OSError:
# Try to carry on in the face of either OSError # Try to carry on in the face of either OSError
# (things like permission issues) or IOError (disk # (things like permission issues) or IOError (disk
# or network issues). If there's a really dangerous # or network issues). If there's a really dangerous
@ -425,13 +414,13 @@ class DirFile(Dir):
os.chmod(self.sconsign, mode) os.chmod(self.sconsign, mode)
try: try:
os.unlink(temp) os.unlink(temp)
except (IOError, OSError): except OSError:
pass pass
ForDirectory = DB ForDirectory = DB
def File(name, dbm_module=None): def File(name, dbm_module=None) -> None:
""" """
Arrange for all signatures to be stored in a global .sconsign.db* Arrange for all signatures to be stored in a global .sconsign.db*
file. file.

View file

@ -21,7 +21,12 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Dependency scanner for C/C++ code.""" """Dependency scanner for C/C++ code.
Two scanners are defined here: the default CScanner, and the optional
CConditionalScanner, which must be explicitly selected by calling
add_scanner() for each affected suffix.
"""
import SCons.Node.FS import SCons.Node.FS
import SCons.cpp import SCons.cpp
@ -36,11 +41,11 @@ class SConsCPPScanner(SCons.cpp.PreProcessor):
by Nodes, not strings; 2) we can keep track of the files that are by Nodes, not strings; 2) we can keep track of the files that are
missing. missing.
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.missing = [] self.missing = []
def initialize_result(self, fname): def initialize_result(self, fname) -> None:
self.result = SCons.Util.UniqueList([fname]) self.result = SCons.Util.UniqueList([fname])
def finalize_result(self, fname): def finalize_result(self, fname):
@ -53,35 +58,69 @@ class SConsCPPScanner(SCons.cpp.PreProcessor):
self.missing.append((fname, self.current_file)) self.missing.append((fname, self.current_file))
return result return result
def read_file(self, file): def read_file(self, file) -> str:
try: try:
with open(str(file.rfile())) as fp: return file.rfile().get_text_contents()
return fp.read() except OSError as e:
except EnvironmentError as e:
self.missing.append((file, self.current_file)) self.missing.append((file, self.current_file))
return '' return ''
def dictify_CPPDEFINES(env) -> dict: def dictify_CPPDEFINES(env) -> dict:
"""Returns CPPDEFINES converted to a dict.""" """Returns CPPDEFINES converted to a dict.
This should be similar to :func:`~SCons.Defaults.processDefines`.
Unfortunately, we can't do the simple thing of calling that routine and
passing the result to the dict() constructor, because it turns the defines
into a list of "name=value" pairs, which the dict constructor won't
consume correctly. Also cannot just call dict on CPPDEFINES itself - it's
fine if it's stored in the converted form (currently deque of tuples), but
CPPDEFINES could be in other formats too.
So we have to do all the work here - keep concepts in sync with
``processDefines``.
"""
cppdefines = env.get('CPPDEFINES', {}) cppdefines = env.get('CPPDEFINES', {})
if cppdefines is None:
return {}
if SCons.Util.is_Sequence(cppdefines):
result = {} result = {}
if cppdefines is None:
return result
if SCons.Util.is_Tuple(cppdefines):
try:
return {cppdefines[0]: cppdefines[1]}
except IndexError:
return {cppdefines[0]: None}
if SCons.Util.is_Sequence(cppdefines):
for c in cppdefines: for c in cppdefines:
if SCons.Util.is_Sequence(c): if SCons.Util.is_Sequence(c):
try: try:
result[c[0]] = c[1] result[c[0]] = c[1]
except IndexError: except IndexError:
# it could be a one-item sequence # could be a one-item sequence
result[c[0]] = None result[c[0]] = None
elif SCons.Util.is_String(c):
try:
name, value = c.split('=')
result[name] = value
except ValueError:
result[c] = None
else: else:
# don't really know what to do here
result[c] = None result[c] = None
return result return result
if not SCons.Util.is_Dict(cppdefines):
return {cppdefines : None} if SCons.Util.is_String(cppdefines):
try:
name, value = cppdefines.split('=')
return {name: value}
except ValueError:
return {cppdefines: None}
if SCons.Util.is_Dict(cppdefines):
return cppdefines return cppdefines
return {cppdefines: None}
class SConsCPPScannerWrapper: class SConsCPPScannerWrapper:
"""The SCons wrapper around a cpp.py scanner. """The SCons wrapper around a cpp.py scanner.
@ -91,7 +130,7 @@ class SConsCPPScannerWrapper:
evaluation of #if/#ifdef/#else/#elif lines. evaluation of #if/#ifdef/#else/#elif lines.
""" """
def __init__(self, name, variable): def __init__(self, name, variable) -> None:
self.name = name self.name = name
self.path = FindPathDirs(variable) self.path = FindPathDirs(variable)
@ -145,12 +184,12 @@ class SConsCPPConditionalScanner(SCons.cpp.PreProcessor):
missing. missing.
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.missing = [] self.missing = []
self._known_paths = [] self._known_paths = []
def initialize_result(self, fname): def initialize_result(self, fname) -> None:
self.result = SCons.Util.UniqueList([fname]) self.result = SCons.Util.UniqueList([fname])
def find_include_file(self, t): def find_include_file(self, t):
@ -169,11 +208,10 @@ class SConsCPPConditionalScanner(SCons.cpp.PreProcessor):
self.missing.append((fname, self.current_file)) self.missing.append((fname, self.current_file))
return result return result
def read_file(self, file): def read_file(self, file) -> str:
try: try:
with open(str(file.rfile())) as fp: return file.rfile().get_text_contents()
return fp.read() except OSError:
except EnvironmentError:
self.missing.append((file, self.current_file)) self.missing.append((file, self.current_file))
return "" return ""
@ -188,7 +226,7 @@ class SConsCPPConditionalScannerWrapper:
evaluation of #if/#ifdef/#else/#elif lines. evaluation of #if/#ifdef/#else/#elif lines.
""" """
def __init__(self, name, variable): def __init__(self, name, variable) -> None:
self.name = name self.name = name
self.path = FindPathDirs(variable) self.path = FindPathDirs(variable)

View file

@ -35,7 +35,7 @@ def DScanner():
return ds return ds
class D(Classic): class D(Classic):
def __init__(self): def __init__(self) -> None:
super().__init__( super().__init__(
name="DScanner", name="DScanner",
suffixes='$DSUFFIXES', suffixes='$DSUFFIXES',
@ -43,13 +43,16 @@ class D(Classic):
regex=r'(?:import\s+)([\w\s=,.]+)(?:\s*:[\s\w,=]+)?(?:;)', regex=r'(?:import\s+)([\w\s=,.]+)(?:\s*:[\s\w,=]+)?(?:;)',
) )
def find_include(self, include, source_dir, path): @staticmethod
def find_include(include, source_dir, path):
# translate dots (package separators) to slashes # translate dots (package separators) to slashes
inc = include.replace('.', '/') inc = include.replace('.', '/')
i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path) # According to https://dlang.org/dmd-linux.html#interface-files
if i is None: # Prefer .di files over .d files when processing includes(imports)
i = SCons.Node.FS.find_file(inc + '.di', (source_dir,) + path) i = SCons.Node.FS.find_file(inc + '.di', (source_dir,) + path)
if i is None:
i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path)
return i, include return i, include
def find_include_names(self, node): def find_include_names(self, node):

View file

@ -100,7 +100,7 @@ def scan_on_disk(node, env, path=()):
""" """
try: try:
flist = node.fs.listdir(node.get_abspath()) flist = node.fs.listdir(node.get_abspath())
except (IOError, OSError): except OSError:
return [] return []
e = node.Entry e = node.Entry
for f in filter(do_not_scan, flist): for f in filter(do_not_scan, flist):

View file

@ -48,7 +48,7 @@ class F90Scanner(Classic):
""" """
def __init__(self, name, suffixes, path_variable, def __init__(self, name, suffixes, path_variable,
use_regex, incl_regex, def_regex, *args, **kwargs): use_regex, incl_regex, def_regex, *args, **kwargs) -> None:
self.cre_use = re.compile(use_regex, re.M) self.cre_use = re.compile(use_regex, re.M)
self.cre_incl = re.compile(incl_regex, re.M) self.cre_incl = re.compile(incl_regex, re.M)
@ -119,7 +119,7 @@ class F90Scanner(Classic):
return [pair[1] for pair in sorted(nodes)] return [pair[1] for pair in sorted(nodes)]
def FortranScan(path_variable="FORTRANPATH"): def FortranScan(path_variable: str="FORTRANPATH"):
"""Return a prototype Scanner instance for scanning source files """Return a prototype Scanner instance for scanning source files
for Fortran USE & INCLUDE statements""" for Fortran USE & INCLUDE statements"""

View file

@ -52,7 +52,7 @@ def _subst_paths(env, paths) -> list:
return paths return paths
def _collect_classes(classlist, dirname, files): def _collect_classes(classlist, dirname, files) -> None:
for fname in files: for fname in files:
if fname.endswith(".class"): if fname.endswith(".class"):
classlist.append(os.path.join(str(dirname), fname)) classlist.append(os.path.join(str(dirname), fname))

View file

@ -78,7 +78,7 @@ class FindENVPathDirs:
A class to bind a specific E{*}PATH variable name to a function that A class to bind a specific E{*}PATH variable name to a function that
will return all of the E{*}path directories. will return all of the E{*}path directories.
""" """
def __init__(self, variable): def __init__(self, variable) -> None:
self.variable = variable self.variable = variable
def __call__(self, env, dir=None, target=None, source=None, argument=None): def __call__(self, env, dir=None, target=None, source=None, argument=None):
@ -175,7 +175,7 @@ class LaTeX(ScannerBase):
'includefrom', 'subincludefrom', 'includefrom', 'subincludefrom',
'inputfrom', 'subinputfrom'] 'inputfrom', 'subinputfrom']
def __init__(self, name, suffixes, graphics_extensions, *args, **kwargs): def __init__(self, name, suffixes, graphics_extensions, *args, **kwargs) -> None:
regex = r''' regex = r'''
\\( \\(
include include
@ -219,7 +219,7 @@ class LaTeX(ScannerBase):
back and uses a dictionary of tuples rather than a single tuple back and uses a dictionary of tuples rather than a single tuple
of paths. of paths.
""" """
def __init__(self, dictionary): def __init__(self, dictionary) -> None:
self.dictionary = {} self.dictionary = {}
for k,n in dictionary.items(): for k,n in dictionary.items():
self.dictionary[k] = (FindPathDirs(n), FindENVPathDirs(n)) self.dictionary[k] = (FindPathDirs(n), FindENVPathDirs(n))
@ -241,7 +241,7 @@ class LaTeX(ScannerBase):
Do not scan *.eps, *.pdf, *.jpg, etc. Do not scan *.eps, *.pdf, *.jpg, etc.
""" """
def __init__(self, suffixes): def __init__(self, suffixes) -> None:
self.suffixes = suffixes self.suffixes = suffixes
def __call__(self, node, env): def __call__(self, node, env):
@ -289,7 +289,8 @@ class LaTeX(ScannerBase):
return [filename+e for e in self.graphics_extensions] return [filename+e for e in self.graphics_extensions]
return [filename] return [filename]
def sort_key(self, include): @staticmethod
def sort_key(include):
return SCons.Node.FS._my_normcase(str(include)) return SCons.Node.FS._my_normcase(str(include))
def find_include(self, include, source_dir, path): def find_include(self, include, source_dir, path):
@ -331,7 +332,7 @@ class LaTeX(ScannerBase):
line_continues_a_comment = len(comment) > 0 line_continues_a_comment = len(comment) > 0
return '\n'.join(out).rstrip()+'\n' return '\n'.join(out).rstrip()+'\n'
def scan(self, node, subdir='.'): def scan(self, node, subdir: str='.'):
# Modify the default scan function to allow for the regular # Modify the default scan function to allow for the regular
# expression to return a comma separated list of file names # expression to return a comma separated list of file names
# as can be the case with the bibliography keyword. # as can be the case with the bibliography keyword.

View file

@ -59,7 +59,7 @@ class FindPathDirs:
"""Class to bind a specific E{*}PATH variable name to a function that """Class to bind a specific E{*}PATH variable name to a function that
will return all of the E{*}path directories. will return all of the E{*}path directories.
""" """
def __init__(self, variable): def __init__(self, variable) -> None:
self.variable = variable self.variable = variable
def __call__(self, env, dir=None, target=None, source=None, argument=None): def __call__(self, env, dir=None, target=None, source=None, argument=None):
@ -149,7 +149,7 @@ class ScannerBase:
def __init__( def __init__(
self, self,
function, function,
name="NONE", name: str="NONE",
argument=_null, argument=_null,
skeys=_null, skeys=_null,
path_function=None, path_function=None,
@ -159,7 +159,7 @@ class ScannerBase:
node_factory=None, node_factory=None,
scan_check=None, scan_check=None,
recursive=None, recursive=None,
): ) -> None:
"""Construct a new scanner object given a scanner function.""" """Construct a new scanner object given a scanner function."""
# Note: this class could easily work with scanner functions that take # Note: this class could easily work with scanner functions that take
# something other than a filename as an argument (e.g. a database # something other than a filename as an argument (e.g. a database
@ -238,10 +238,10 @@ class ScannerBase:
def __hash__(self): def __hash__(self):
return id(self) return id(self)
def __str__(self): def __str__(self) -> str:
return self.name return self.name
def add_skey(self, skey): def add_skey(self, skey) -> None:
"""Add a skey to the list of skeys""" """Add a skey to the list of skeys"""
self.skeys.append(skey) self.skeys.append(skey)
@ -270,7 +270,7 @@ class ScannerBase:
# recurse_nodes = _recurse_no_nodes # recurse_nodes = _recurse_no_nodes
def add_scanner(self, skey, scanner): def add_scanner(self, skey, scanner) -> None:
self.function[skey] = scanner self.function[skey] = scanner
self.add_skey(skey) self.add_skey(skey)
@ -292,7 +292,7 @@ class Selector(ScannerBase):
used by various Tool modules and therefore was likely a template used by various Tool modules and therefore was likely a template
for custom modules that may be out there.) for custom modules that may be out there.)
""" """
def __init__(self, mapping, *args, **kwargs): def __init__(self, mapping, *args, **kwargs) -> None:
super().__init__(None, *args, **kwargs) super().__init__(None, *args, **kwargs)
self.mapping = mapping self.mapping = mapping
self.skeys = list(mapping.keys()) self.skeys = list(mapping.keys())
@ -306,7 +306,7 @@ class Selector(ScannerBase):
except KeyError: except KeyError:
return None return None
def add_scanner(self, skey, scanner): def add_scanner(self, skey, scanner) -> None:
self.mapping[skey] = scanner self.mapping[skey] = scanner
self.add_skey(skey) self.add_skey(skey)
@ -318,7 +318,7 @@ class Current(ScannerBase):
either locally or in a repository). either locally or in a repository).
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs) -> None:
def current_check(node, env): def current_check(node, env):
return not node.has_builder() or node.is_up_to_date() return not node.has_builder() or node.is_up_to_date()
@ -337,7 +337,7 @@ class Classic(Current):
name of the include file in group 0. name of the include file in group 0.
""" """
def __init__(self, name, suffixes, path_variable, regex, *args, **kwargs): def __init__(self, name, suffixes, path_variable, regex, *args, **kwargs) -> None:
self.cre = re.compile(regex, re.M) self.cre = re.compile(regex, re.M)
def _scan(node, _, path=(), self=self): def _scan(node, _, path=(), self=self):
@ -415,7 +415,8 @@ class ClassicCPP(Classic):
to the constructor must return the leading bracket in group 0, and to the constructor must return the leading bracket in group 0, and
the contained filename in group 1. the contained filename in group 1.
""" """
def find_include(self, include, source_dir, path): @staticmethod
def find_include(include, source_dir, path):
include = list(map(SCons.Util.to_str, include)) include = list(map(SCons.Util.to_str, include))
if include[0] == '"': if include[0] == '"':
paths = (source_dir,) + tuple(path) paths = (source_dir,) + tuple(path)
@ -426,7 +427,8 @@ class ClassicCPP(Classic):
i = SCons.Util.silent_intern(include[1]) i = SCons.Util.silent_intern(include[1])
return n, i return n, i
def sort_key(self, include): @staticmethod
def sort_key(include):
return SCons.Node.FS._my_normcase(' '.join(include)) return SCons.Node.FS._my_normcase(' '.join(include))
# Local Variables: # Local Variables:

View file

@ -112,7 +112,7 @@ version Prints SCons version information.
'sh' : 'shell', 'sh' : 'shell',
} }
def __init__(self, **kw): def __init__(self, **kw) -> None:
cmd.Cmd.__init__(self) cmd.Cmd.__init__(self)
for key, val in kw.items(): for key, val in kw.items():
setattr(self, key, val) setattr(self, key, val)
@ -122,7 +122,7 @@ version Prints SCons version information.
else: else:
self.shell_variable = 'SHELL' self.shell_variable = 'SHELL'
def default(self, argv): def default(self, argv) -> None:
print("*** Unknown command: %s" % argv[0]) print("*** Unknown command: %s" % argv[0])
def onecmd(self, line): def onecmd(self, line):
@ -148,7 +148,7 @@ version Prints SCons version information.
return self.default(argv) return self.default(argv)
return func(argv) return func(argv)
def do_build(self, argv): def do_build(self, argv) -> None:
"""\ """\
build [TARGETS] Build the specified TARGETS and their build [TARGETS] Build the specified TARGETS and their
dependencies. 'b' is a synonym. dependencies. 'b' is a synonym.
@ -213,11 +213,11 @@ version Prints SCons version information.
seen_nodes = {} seen_nodes = {}
def get_unseen_children(node, parent, seen_nodes=seen_nodes): def get_unseen_children(node, parent, seen_nodes=seen_nodes):
def is_unseen(node, seen_nodes=seen_nodes): def is_unseen(node, seen_nodes=seen_nodes) -> bool:
return node not in seen_nodes return node not in seen_nodes
return [child for child in node.children(scan=1) if is_unseen(child)] return [child for child in node.children(scan=1) if is_unseen(child)]
def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes): def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes) -> None:
seen_nodes[node] = 1 seen_nodes[node] = 1
# If this file is in a VariantDir and has a # If this file is in a VariantDir and has a
@ -272,11 +272,11 @@ version Prints SCons version information.
""" """
return self.do_build(['build', '--clean'] + argv[1:]) return self.do_build(['build', '--clean'] + argv[1:])
def do_EOF(self, argv): def do_EOF(self, argv) -> None:
print() print()
self.do_exit(argv) self.do_exit(argv)
def _do_one_help(self, arg): def _do_one_help(self, arg) -> None:
try: try:
# If help_<arg>() exists, then call it. # If help_<arg>() exists, then call it.
func = getattr(self, 'help_' + arg) func = getattr(self, 'help_' + arg)
@ -312,13 +312,13 @@ version Prints SCons version information.
lines = list(map(strip_spaces, lines)) lines = list(map(strip_spaces, lines))
return '\n'.join(lines) return '\n'.join(lines)
def do_exit(self, argv): def do_exit(self, argv) -> None:
"""\ """\
exit Exit SCons interactive mode. exit Exit SCons interactive mode.
""" """
sys.exit(0) sys.exit(0)
def do_help(self, argv): def do_help(self, argv) -> None:
"""\ """\
help [COMMAND] Prints help for the specified COMMAND. 'h' help [COMMAND] Prints help for the specified COMMAND. 'h'
and '?' are synonyms. and '?' are synonyms.
@ -335,7 +335,7 @@ version Prints SCons version information.
sys.stdout.write(doc + '\n') sys.stdout.write(doc + '\n')
sys.stdout.flush() sys.stdout.flush()
def do_shell(self, argv): def do_shell(self, argv) -> None:
"""\ """\
shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and
'!' are synonyms. '!' are synonyms.
@ -346,22 +346,22 @@ version Prints SCons version information.
argv = os.environ[self.shell_variable] argv = os.environ[self.shell_variable]
try: try:
# Per "[Python-Dev] subprocess insufficiently platform-independent?" # Per "[Python-Dev] subprocess insufficiently platform-independent?"
# http://mail.python.org/pipermail/python-dev/2008-August/081979.html "+ # https://mail.python.org/pipermail/python-dev/2008-August/081979.html "+
# Doing the right thing with an argument list currently # Doing the right thing with an argument list currently
# requires different shell= values on Windows and Linux. # requires different shell= values on Windows and Linux.
p = subprocess.Popen(argv, shell=(sys.platform=='win32')) p = subprocess.Popen(argv, shell=(sys.platform=='win32'))
except EnvironmentError as e: except OSError as e:
sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror)) sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror))
else: else:
p.wait() p.wait()
def do_version(self, argv): def do_version(self, argv) -> None:
"""\ """\
version Prints SCons version information. version Prints SCons version information.
""" """
sys.stdout.write(self.parser.version + '\n') sys.stdout.write(self.parser.version + '\n')
def interact(fs, parser, options, targets, target_top): def interact(fs, parser, options, targets, target_top) -> None:
c = SConsInteractiveCmd(prompt = 'scons>>> ', c = SConsInteractiveCmd(prompt = 'scons>>> ',
fs = fs, fs = fs,
parser = parser, parser = parser,

View file

@ -31,13 +31,8 @@ some other module. If it's specific to the "scons" script invocation,
it goes here. it goes here.
""" """
# these define the range of versions SCons supports
minimum_python_version = (3, 6, 0)
deprecated_python_version = (3, 6, 0)
import SCons.compat import SCons.compat
import atexit
import importlib.util import importlib.util
import os import os
import re import re
@ -46,6 +41,7 @@ import time
import traceback import traceback
import platform import platform
import threading import threading
from typing import Optional, List
import SCons.CacheDir import SCons.CacheDir
import SCons.Debug import SCons.Debug
@ -63,9 +59,34 @@ import SCons.Taskmaster
import SCons.Util import SCons.Util
import SCons.Warnings import SCons.Warnings
import SCons.Script.Interactive import SCons.Script.Interactive
from SCons.Util.stats import count_stats, memory_stats, time_stats, ENABLE_JSON, write_scons_stats_file, JSON_OUTPUT_FILE
from SCons import __version__ as SConsVersion from SCons import __version__ as SConsVersion
# these define the range of versions SCons supports
minimum_python_version = (3, 6, 0)
deprecated_python_version = (3, 6, 0)
# ordered list of SConsctruct names to look for if there is no -f flag
KNOWN_SCONSTRUCT_NAMES = [
'SConstruct',
'Sconstruct',
'sconstruct',
'SConstruct.py',
'Sconstruct.py',
'sconstruct.py',
]
# list of names recognized by debugger as "SConscript files" (inc. SConstruct)
# files suffixed .py always work so don't need to be in this list.
KNOWN_SCONSCRIPTS = [
"SConstruct",
"Sconstruct",
"sconstruct",
"SConscript",
"sconscript",
]
# Global variables # Global variables
first_command_start = None first_command_start = None
last_command_end = None last_command_end = None
@ -82,7 +103,7 @@ num_jobs = None
delayed_warnings = [] delayed_warnings = []
def revert_io(): def revert_io() -> None:
# This call is added to revert stderr and stdout to the original # This call is added to revert stderr and stdout to the original
# ones just in case some build rule or something else in the system # ones just in case some build rule or something else in the system
# has redirected them elsewhere. # has redirected them elsewhere.
@ -103,7 +124,7 @@ class Progressor:
count = 0 count = 0
target_string = '$TARGET' target_string = '$TARGET'
def __init__(self, obj, interval=1, file=None, overwrite=False): def __init__(self, obj, interval: int=1, file=None, overwrite: bool=False) -> None:
if file is None: if file is None:
file = sys.stdout file = sys.stdout
@ -121,12 +142,12 @@ class Progressor:
else: else:
self.func = self.string self.func = self.string
def write(self, s): def write(self, s) -> None:
self.file.write(s) self.file.write(s)
self.file.flush() self.file.flush()
self.prev = s self.prev = s
def erase_previous(self): def erase_previous(self) -> None:
if self.prev: if self.prev:
length = len(self.prev) length = len(self.prev)
if self.prev[-1] in ('\n', '\r'): if self.prev[-1] in ('\n', '\r'):
@ -134,16 +155,16 @@ class Progressor:
self.write(' ' * length + '\r') self.write(' ' * length + '\r')
self.prev = '' self.prev = ''
def spinner(self, node): def spinner(self, node) -> None:
self.write(self.obj[self.count % len(self.obj)]) self.write(self.obj[self.count % len(self.obj)])
def string(self, node): def string(self, node) -> None:
self.write(self.obj) self.write(self.obj)
def replace_string(self, node): def replace_string(self, node) -> None:
self.write(self.obj.replace(self.target_string, str(node))) self.write(self.obj.replace(self.target_string, str(node)))
def __call__(self, node): def __call__(self, node) -> None:
self.count = self.count + 1 self.count = self.count + 1
if (self.count % self.interval) == 0: if (self.count % self.interval) == 0:
if self.overwrite: if self.overwrite:
@ -152,7 +173,7 @@ class Progressor:
ProgressObject = SCons.Util.Null() ProgressObject = SCons.Util.Null()
def Progress(*args, **kw): def Progress(*args, **kw) -> None:
global ProgressObject global ProgressObject
ProgressObject = Progressor(*args, **kw) ProgressObject = Progressor(*args, **kw)
@ -170,7 +191,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
"""An SCons build task.""" """An SCons build task."""
progress = ProgressObject progress = ProgressObject
def display(self, message): def display(self, message) -> None:
display('scons: ' + message) display('scons: ' + message)
def prepare(self): def prepare(self):
@ -179,14 +200,14 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
self.progress(target) self.progress(target)
return SCons.Taskmaster.OutOfDateTask.prepare(self) return SCons.Taskmaster.OutOfDateTask.prepare(self)
def needs_execute(self): def needs_execute(self) -> bool:
if SCons.Taskmaster.OutOfDateTask.needs_execute(self): if SCons.Taskmaster.OutOfDateTask.needs_execute(self):
return True return True
if self.top and self.targets[0].has_builder(): if self.top and self.targets[0].has_builder():
display("scons: `%s' is up to date." % str(self.node)) display("scons: `%s' is up to date." % str(self.node))
return False return False
def execute(self): def execute(self) -> None:
if print_time: if print_time:
start_time = time.time() start_time = time.time()
global first_command_start global first_command_start
@ -208,12 +229,13 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
"Command execution end timestamp: %s: %f\n" "Command execution end timestamp: %s: %f\n"
% (str(self.node), finish_time) % (str(self.node), finish_time)
) )
time_stats.add_command(str(self.node), start_time, finish_time)
sys.stdout.write( sys.stdout.write(
"Command execution time: %s: %f seconds\n" "Command execution time: %s: %f seconds\n"
% (str(self.node), (finish_time - start_time)) % (str(self.node), (finish_time - start_time))
) )
def do_failed(self, status=2): def do_failed(self, status: int=2) -> None:
_BuildFailures.append(self.exception[1]) _BuildFailures.append(self.exception[1])
global exit_status global exit_status
global this_build_status global this_build_status
@ -253,7 +275,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
else: else:
SCons.Taskmaster.OutOfDateTask.executed(self) SCons.Taskmaster.OutOfDateTask.executed(self)
def failed(self): def failed(self) -> None:
# Handle the failure of a build task. The primary purpose here # Handle the failure of a build task. The primary purpose here
# is to display the various types of Errors and Exceptions # is to display the various types of Errors and Exceptions
# appropriately. # appropriately.
@ -284,7 +306,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
node = buildError.node node = buildError.node
if not SCons.Util.is_List(node): if not SCons.Util.is_List(node):
node = [ node ] node = [node]
nodename = ', '.join(map(str, node)) nodename = ', '.join(map(str, node))
errfmt = "scons: *** [%s] %s\n" errfmt = "scons: *** [%s] %s\n"
@ -309,7 +331,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
self.exc_clear() self.exc_clear()
def postprocess(self): def postprocess(self) -> None:
if self.top: if self.top:
t = self.targets[0] t = self.targets[0]
for tp in self.options.tree_printers: for tp in self.options.tree_printers:
@ -321,7 +343,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
print(tree) print(tree)
SCons.Taskmaster.OutOfDateTask.postprocess(self) SCons.Taskmaster.OutOfDateTask.postprocess(self)
def make_ready(self): def make_ready(self) -> None:
"""Make a task ready for execution""" """Make a task ready for execution"""
SCons.Taskmaster.OutOfDateTask.make_ready(self) SCons.Taskmaster.OutOfDateTask.make_ready(self)
if self.out_of_date and self.options.debug_explain: if self.out_of_date and self.options.debug_explain:
@ -332,7 +354,7 @@ class BuildTask(SCons.Taskmaster.OutOfDateTask):
class CleanTask(SCons.Taskmaster.AlwaysTask): class CleanTask(SCons.Taskmaster.AlwaysTask):
"""An SCons clean task.""" """An SCons clean task."""
def fs_delete(self, path, pathstr, remove=True): def fs_delete(self, path, pathstr, remove: bool=True):
try: try:
if os.path.lexists(path): if os.path.lexists(path):
if os.path.isfile(path) or os.path.islink(path): if os.path.isfile(path) or os.path.islink(path):
@ -356,7 +378,7 @@ class CleanTask(SCons.Taskmaster.AlwaysTask):
raise SCons.Errors.UserError(errstr % pathstr) raise SCons.Errors.UserError(errstr % pathstr)
except SCons.Errors.UserError as e: except SCons.Errors.UserError as e:
print(e) print(e)
except (IOError, OSError) as e: except OSError as e:
print("scons: Could not remove '%s':" % pathstr, e.strerror) print("scons: Could not remove '%s':" % pathstr, e.strerror)
def _get_files_to_clean(self): def _get_files_to_clean(self):
@ -366,20 +388,20 @@ class CleanTask(SCons.Taskmaster.AlwaysTask):
result = [t for t in self.targets if not t.noclean] result = [t for t in self.targets if not t.noclean]
return result return result
def _clean_targets(self, remove=True): def _clean_targets(self, remove: bool=True) -> None:
target = self.targets[0] target = self.targets[0]
if target in SCons.Environment.CleanTargets: if target in SCons.Environment.CleanTargets:
files = SCons.Environment.CleanTargets[target] files = SCons.Environment.CleanTargets[target]
for f in files: for f in files:
self.fs_delete(f.get_abspath(), str(f), remove) self.fs_delete(f.get_abspath(), str(f), remove)
def show(self): def show(self) -> None:
for t in self._get_files_to_clean(): for t in self._get_files_to_clean():
if not t.isdir(): if not t.isdir():
display("Removed " + str(t)) display("Removed " + str(t))
self._clean_targets(remove=False) self._clean_targets(remove=False)
def remove(self): def remove(self) -> None:
for t in self._get_files_to_clean(): for t in self._get_files_to_clean():
try: try:
removed = t.remove() removed = t.remove()
@ -389,7 +411,7 @@ class CleanTask(SCons.Taskmaster.AlwaysTask):
# the file not existing. In either case, print a # the file not existing. In either case, print a
# message and keep going to try to remove as many # message and keep going to try to remove as many
# targets as possible. # targets as possible.
print("scons: Could not remove '{0}'".format(str(t)), e.strerror) print(f"scons: Could not remove '{str(t)}'", e.strerror)
else: else:
if removed: if removed:
display("Removed " + str(t)) display("Removed " + str(t))
@ -408,15 +430,15 @@ class CleanTask(SCons.Taskmaster.AlwaysTask):
# anything really needs to be done. # anything really needs to be done.
make_ready = SCons.Taskmaster.Task.make_ready_all make_ready = SCons.Taskmaster.Task.make_ready_all
def prepare(self): def prepare(self) -> None:
pass pass
class QuestionTask(SCons.Taskmaster.AlwaysTask): class QuestionTask(SCons.Taskmaster.AlwaysTask):
"""An SCons task for the -q (question) option.""" """An SCons task for the -q (question) option."""
def prepare(self): def prepare(self) -> None:
pass pass
def execute(self): def execute(self) -> None:
if self.targets[0].get_state() != SCons.Node.up_to_date or \ if self.targets[0].get_state() != SCons.Node.up_to_date or \
(self.top and not self.targets[0].exists()): (self.top and not self.targets[0].exists()):
global exit_status global exit_status
@ -425,12 +447,12 @@ class QuestionTask(SCons.Taskmaster.AlwaysTask):
this_build_status = 1 this_build_status = 1
self.tm.stop() self.tm.stop()
def executed(self): def executed(self) -> None:
pass pass
class TreePrinter: class TreePrinter:
def __init__(self, derived=False, prune=False, status=False, sLineDraw=False): def __init__(self, derived: bool=False, prune: bool=False, status: bool=False, sLineDraw: bool=False) -> None:
self.derived = derived self.derived = derived
self.prune = prune self.prune = prune
self.status = status self.status = status
@ -440,7 +462,7 @@ class TreePrinter:
def get_derived_children(self, node): def get_derived_children(self, node):
children = node.all_children(None) children = node.all_children(None)
return [x for x in children if x.has_builder()] return [x for x in children if x.has_builder()]
def display(self, t): def display(self, t) -> None:
if self.derived: if self.derived:
func = self.get_derived_children func = self.get_derived_children
else: else:
@ -460,24 +482,29 @@ def python_version_deprecated(version=sys.version_info):
class FakeOptionParser: class FakeOptionParser:
""" """A do-nothing option parser, used for the initial OptionsParser value.
A do-nothing option parser, used for the initial OptionsParser variable.
During normal SCons operation, the OptionsParser is created right During normal SCons operation, the OptionsParser is created right
away by the main() function. Certain tests scripts however, can away by the main() function. Certain test scripts however, can
introspect on different Tool modules, the initialization of which introspect on different Tool modules, the initialization of which
can try to add a new, local option to an otherwise uninitialized can try to add a new, local option to an otherwise uninitialized
OptionsParser object. This allows that introspection to happen OptionsParser object. This allows that introspection to happen
without blowing up. without blowing up.
""" """
class FakeOptionValues: class FakeOptionValues:
def __getattr__(self, attr): def __getattr__(self, attr):
return None return None
values = FakeOptionValues() values = FakeOptionValues()
def add_local_option(self, *args, **kw):
# TODO: to quiet checkers, FakeOptionParser should also define
# raise_exception_on_error, preserve_unknown_options, largs and parse_args
def add_local_option(self, *args, **kw) -> None:
pass pass
OptionsParser = FakeOptionParser() OptionsParser = FakeOptionParser()
def AddOption(*args, **kw): def AddOption(*args, **kw):
@ -492,86 +519,61 @@ def GetOption(name):
def SetOption(name, value): def SetOption(name, value):
return OptionsParser.values.set_option(name, value) return OptionsParser.values.set_option(name, value)
def DebugOptions(json=None):
"""
API to allow specifying options to SCons debug logic
Currently only json is supported which changes the
json file written by --debug=json from the default
"""
if json is not None:
json_node = SCons.Defaults.DefaultEnvironment().arg2nodes(json)
SCons.Util.stats.JSON_OUTPUT_FILE = json_node[0].get_abspath()
# Check if parent dir to JSON_OUTPUT_FILE exists
json_dir = os.path.dirname(SCons.Util.stats.JSON_OUTPUT_FILE)
try:
if not os.path.isdir(json_dir):
os.makedirs(json_dir, exist_ok=True)
# Now try to open file and see if you can..
with open(SCons.Util.stats.JSON_OUTPUT_FILE,'w') as js:
pass
except OSError as e:
raise SCons.Errors.UserError(f"Unable to create directory for JSON debug output file: {SCons.Util.stats.JSON_OUTPUT_FILE}")
def ValidateOptions(throw_exception=False) -> None:
def ValidateOptions(throw_exception: bool=False) -> None:
"""Validate options passed to SCons on the command line. """Validate options passed to SCons on the command line.
If you call this after you set all your command line options with AddOption(), Checks that all options given on the command line are known to this
it will verify that all command line options are valid. instance of SCons. Call after all of the cli options have been set
So if you added an option --xyz and you call SCons with --xyy you can cause up through :func:`AddOption` calls. For example, if you added an
option ``--xyz`` and you call SCons with ``--xyy`` you can cause
SCons to issue an error message and exit by calling this function. SCons to issue an error message and exit by calling this function.
:param bool throw_exception: (Optional) Should this function raise an error if there's an invalid option on the command line, or issue a message and exit with error status. Arguments:
throw_exception: if an invalid option is present on the command line,
raises an exception if this optional parameter evaluates true;
if false (the default), issue a message and exit with error status.
:raises SConsBadOptionError: If throw_exception is True and there are invalid options on command line. Raises:
SConsBadOptionError: If *throw_exception* is true and there are invalid
options on the command line.
.. versionadded:: 4.5.0 .. versionadded:: 4.5.0
""" """
OptionsParser.raise_exception_on_error = throw_exception OptionsParser.raise_exception_on_error = throw_exception
OptionsParser.preserve_unknown_options = False OptionsParser.preserve_unknown_options = False
OptionsParser.parse_args(OptionsParser.largs, OptionsParser.values) OptionsParser.parse_args(OptionsParser.largs, OptionsParser.values)
def PrintHelp(file=None): def PrintHelp(file=None, local_only: bool = False) -> None:
if local_only:
OptionsParser.print_local_option_help(file=file)
else:
OptionsParser.print_help(file=file) OptionsParser.print_help(file=file)
class Stats:
def __init__(self):
self.stats = []
self.labels = []
self.append = self.do_nothing
self.print_stats = self.do_nothing
def enable(self, outfp):
self.outfp = outfp
self.append = self.do_append
self.print_stats = self.do_print
def do_nothing(self, *args, **kw):
pass
class CountStats(Stats):
def do_append(self, label):
self.labels.append(label)
self.stats.append(SCons.Debug.fetchLoggedInstances())
def do_print(self):
stats_table = {}
for s in self.stats:
for n in [t[0] for t in s]:
stats_table[n] = [0, 0, 0, 0]
i = 0
for s in self.stats:
for n, c in s:
stats_table[n][i] = c
i = i + 1
self.outfp.write("Object counts:\n")
pre = [" "]
post = [" %s\n"]
l = len(self.stats)
fmt1 = ''.join(pre + [' %7s']*l + post)
fmt2 = ''.join(pre + [' %7d']*l + post)
labels = self.labels[:l]
labels.append(("", "Class"))
self.outfp.write(fmt1 % tuple([x[0] for x in labels]))
self.outfp.write(fmt1 % tuple([x[1] for x in labels]))
for k in sorted(stats_table.keys()):
r = stats_table[k][:l] + [k]
self.outfp.write(fmt2 % tuple(r))
count_stats = CountStats()
class MemStats(Stats):
def do_append(self, label):
self.labels.append(label)
self.stats.append(SCons.Debug.memory())
def do_print(self):
fmt = 'Memory %-32s %12d\n'
for label, stats in zip(self.labels, self.stats):
self.outfp.write(fmt % (label, stats))
memory_stats = MemStats()
# utility functions # utility functions
def _scons_syntax_error(e): def _scons_syntax_error(e) -> None:
"""Handle syntax errors. Print out a message and show where the error """Handle syntax errors. Print out a message and show where the error
occurred. occurred.
""" """
@ -599,7 +601,7 @@ def find_deepest_user_frame(tb):
return frame return frame
return tb[0] return tb[0]
def _scons_user_error(e): def _scons_user_error(e) -> None:
"""Handle user errors. Print out a message and a description of the """Handle user errors. Print out a message and a description of the
error, along with the line number and routine where it occured. error, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is The file and line number will be the deepest stack frame that is
@ -614,7 +616,7 @@ def _scons_user_error(e):
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
sys.exit(2) sys.exit(2)
def _scons_user_warning(e): def _scons_user_warning(e) -> None:
"""Handle user warnings. Print out a message and a description of """Handle user warnings. Print out a message and a description of
the warning, along with the line number and routine where it occured. the warning, along with the line number and routine where it occured.
The file and line number will be the deepest stack frame that is The file and line number will be the deepest stack frame that is
@ -625,7 +627,7 @@ def _scons_user_warning(e):
sys.stderr.write("\nscons: warning: %s\n" % e) sys.stderr.write("\nscons: warning: %s\n" % e)
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
def _scons_internal_warning(e): def _scons_internal_warning(e) -> None:
"""Slightly different from _scons_user_warning in that we use the """Slightly different from _scons_user_warning in that we use the
*current call stack* rather than sys.exc_info() to get our stack trace. *current call stack* rather than sys.exc_info() to get our stack trace.
This is used by the warnings framework to print warnings.""" This is used by the warnings framework to print warnings."""
@ -633,7 +635,7 @@ def _scons_internal_warning(e):
sys.stderr.write("\nscons: warning: %s\n" % e.args[0]) sys.stderr.write("\nscons: warning: %s\n" % e.args[0])
sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine)) sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
def _scons_internal_error(): def _scons_internal_error() -> None:
"""Handle all errors but user errors. Print out a message telling """Handle all errors but user errors. Print out a message telling
the user what to do in this case and print a normal trace. the user what to do in this case and print a normal trace.
""" """
@ -641,13 +643,24 @@ def _scons_internal_error():
traceback.print_exc() traceback.print_exc()
sys.exit(2) sys.exit(2)
def _SConstruct_exists(dirname='', repositories=[], filelist=None): def _SConstruct_exists(
"""This function checks that an SConstruct file exists in a directory. dirname: str, repositories: List[str], filelist: List[str]
If so, it returns the path of the file. By default, it checks the ) -> Optional[str]:
current directory. """Check that an SConstruct file exists in a directory.
Arguments:
dirname: the directory to search. If empty, look in cwd.
repositories: a list of repositories to search in addition to the
project directory tree.
filelist: names of SConstruct file(s) to search for.
If empty list, use the built-in list of names.
Returns:
The path to the located SConstruct file, or ``None``.
""" """
if not filelist: if not filelist:
filelist = ['SConstruct', 'Sconstruct', 'sconstruct', 'SConstruct.py', 'Sconstruct.py', 'sconstruct.py'] filelist = KNOWN_SCONSTRUCT_NAMES
for file in filelist: for file in filelist:
sfile = os.path.join(dirname, file) sfile = os.path.join(dirname, file)
if os.path.isfile(sfile): if os.path.isfile(sfile):
@ -658,8 +671,10 @@ def _SConstruct_exists(dirname='', repositories=[], filelist=None):
return sfile return sfile
return None return None
def _set_debug_values(options):
global print_memoizer, print_objects, print_stacktrace, print_time, print_action_timestamps def _set_debug_values(options) -> None:
global print_memoizer, print_objects, print_stacktrace, print_time, \
print_action_timestamps, ENABLE_JSON
debug_values = options.debug debug_values = options.debug
@ -679,11 +694,11 @@ def _set_debug_values(options):
SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg) SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg)
if "dtree" in debug_values: if "dtree" in debug_values:
options.tree_printers.append(TreePrinter(derived=True)) options.tree_printers.append(TreePrinter(derived=True))
options.debug_explain = ("explain" in debug_values) options.debug_explain = "explain" in debug_values
if "findlibs" in debug_values: if "findlibs" in debug_values:
SCons.Scanner.Prog.print_find_libs = "findlibs" SCons.Scanner.Prog.print_find_libs = "findlibs"
options.debug_includes = ("includes" in debug_values) options.debug_includes = "includes" in debug_values
print_memoizer = ("memoizer" in debug_values) print_memoizer = "memoizer" in debug_values
if "memory" in debug_values: if "memory" in debug_values:
memory_stats.enable(sys.stdout) memory_stats.enable(sys.stdout)
print_objects = ("objects" in debug_values) print_objects = ("objects" in debug_values)
@ -697,6 +712,8 @@ def _set_debug_values(options):
options.tree_printers.append(TreePrinter(status=True)) options.tree_printers.append(TreePrinter(status=True))
if "time" in debug_values: if "time" in debug_values:
print_time = True print_time = True
time_stats.enable(sys.stdout)
time_stats.enable(sys.stdout)
if "action-timestamps" in debug_values: if "action-timestamps" in debug_values:
print_time = True print_time = True
print_action_timestamps = True print_action_timestamps = True
@ -706,6 +723,10 @@ def _set_debug_values(options):
SCons.Taskmaster.print_prepare = True SCons.Taskmaster.print_prepare = True
if "duplicate" in debug_values: if "duplicate" in debug_values:
SCons.Node.print_duplicate = True SCons.Node.print_duplicate = True
if "json" in debug_values:
ENABLE_JSON = True
if "sconscript" in debug_values:
SCons.Debug.sconscript_trace = True
def _create_path(plist): def _create_path(plist):
path = '.' path = '.'
@ -774,7 +795,7 @@ def _load_site_scons_dir(topdir, site_dir_name=None):
if not re_dunder.match(k): if not re_dunder.match(k):
site_m[k] = v site_m[k] = v
with open(spec.origin, 'r') as f: with open(spec.origin) as f:
code = f.read() code = f.read()
try: try:
codeobj = compile(code, spec.name, "exec") codeobj = compile(code, spec.name, "exec")
@ -798,7 +819,7 @@ def _load_site_scons_dir(topdir, site_dir_name=None):
raise raise
def _load_all_site_scons_dirs(topdir, verbose=False): def _load_all_site_scons_dirs(topdir, verbose: bool=False) -> None:
"""Load all of the predefined site_scons dir. """Load all of the predefined site_scons dir.
Order is significant; we load them in order from most generic Order is significant; we load them in order from most generic
(machine-wide) to most specific (topdir). (machine-wide) to most specific (topdir).
@ -841,7 +862,7 @@ def _load_all_site_scons_dirs(topdir, verbose=False):
print("Loading site dir ", d) print("Loading site dir ", d)
_load_site_scons_dir(d) _load_site_scons_dir(d)
def test_load_all_site_scons_dirs(d): def test_load_all_site_scons_dirs(d) -> None:
_load_all_site_scons_dirs(d, True) _load_all_site_scons_dirs(d, True)
def version_string(label, module): def version_string(label, module):
@ -858,7 +879,7 @@ def version_string(label, module):
module.__developer__, module.__developer__,
module.__buildsys__) module.__buildsys__)
def path_string(label, module): def path_string(label, module) -> str:
path = module.__path__ path = module.__path__
return "\t%s path: %s\n"%(label,path) return "\t%s path: %s\n"%(label,path)
@ -919,9 +940,9 @@ def _main(parser):
target_top = None target_top = None
if options.climb_up: if options.climb_up:
target_top = '.' # directory to prepend to targets target_top = '.' # directory to prepend to targets
while script_dir and not _SConstruct_exists(script_dir, while script_dir and not _SConstruct_exists(
options.repository, script_dir, options.repository, options.file
options.file): ):
script_dir, last_part = os.path.split(script_dir) script_dir, last_part = os.path.split(script_dir)
if last_part: if last_part:
target_top = os.path.join(last_part, target_top) target_top = os.path.join(last_part, target_top)
@ -951,8 +972,7 @@ def _main(parser):
if options.file: if options.file:
scripts.extend(options.file) scripts.extend(options.file)
if not scripts: if not scripts:
sfile = _SConstruct_exists(repositories=options.repository, sfile = _SConstruct_exists("", options.repository, options.file)
filelist=options.file)
if sfile: if sfile:
scripts.append(sfile) scripts.append(sfile)
@ -1114,7 +1134,7 @@ def _main(parser):
raise SConsPrintHelpException raise SConsPrintHelpException
else: else:
print(help_text) print(help_text)
print("Use scons -H for help about command-line options.") print("Use scons -H for help about SCons built-in command-line options.")
exit_status = 0 exit_status = 0
return return
@ -1327,7 +1347,7 @@ def _build_targets(fs, options, targets, target_top):
options=options, options=options,
closing_message=closing_message, closing_message=closing_message,
failure_message=failure_message failure_message=failure_message
): ) -> None:
if jobs.were_interrupted(): if jobs.were_interrupted():
if not options.no_progress and not options.silent: if not options.no_progress and not options.silent:
sys.stderr.write("scons: Build interrupted.\n") sys.stderr.write("scons: Build interrupted.\n")
@ -1353,7 +1373,7 @@ def _build_targets(fs, options, targets, target_top):
return nodes return nodes
def _exec_main(parser, values): def _exec_main(parser, values) -> None:
sconsflags = os.environ.get('SCONSFLAGS', '') sconsflags = os.environ.get('SCONSFLAGS', '')
all_args = sconsflags.split() + sys.argv[1:] all_args = sconsflags.split() + sys.argv[1:]
@ -1361,7 +1381,43 @@ def _exec_main(parser, values):
if isinstance(options.debug, list) and "pdb" in options.debug: if isinstance(options.debug, list) and "pdb" in options.debug:
import pdb import pdb
pdb.Pdb().runcall(_main, parser)
class SConsPdb(pdb.Pdb):
"""Specialization of Pdb to help find SConscript files."""
def lookupmodule(self, filename: str) -> Optional[str]:
"""Helper function for break/clear parsing -- SCons version.
Translates (possibly incomplete) file or module name
into an absolute file name. The "possibly incomplete"
means adding a ``.py`` suffix if not present, which breaks
picking breakpoints in sconscript files, which often don't
have a suffix. This version fixes for some known names of
sconscript files that don't have the suffix.
.. versionadded:: 4.6.0
"""
if os.path.isabs(filename) and os.path.exists(filename):
return filename
f = os.path.join(sys.path[0], filename)
if os.path.exists(f) and self.canonic(f) == self.mainpyfile:
return f
root, ext = os.path.splitext(filename)
base = os.path.split(filename)[-1]
if ext == '' and base not in KNOWN_SCONSCRIPTS: # SCons mod
filename = filename + '.py'
if os.path.isabs(filename):
return filename
for dirname in sys.path:
while os.path.islink(dirname):
dirname = os.readlink(dirname)
fullname = os.path.join(dirname, filename)
if os.path.exists(fullname):
return fullname
return None
SConsPdb().runcall(_main, parser)
elif options.profile_file: elif options.profile_file:
from cProfile import Profile from cProfile import Profile
@ -1370,14 +1426,16 @@ def _exec_main(parser, values):
prof.runcall(_main, parser) prof.runcall(_main, parser)
finally: finally:
prof.dump_stats(options.profile_file) prof.dump_stats(options.profile_file)
else: else:
_main(parser) _main(parser)
def main(): def main() -> None:
global OptionsParser global OptionsParser
global exit_status global exit_status
global first_command_start global first_command_start
global ENABLE_JSON
# Check up front for a Python version we do not support. We # Check up front for a Python version we do not support. We
# delay the check for deprecated Python versions until later, # delay the check for deprecated Python versions until later,
@ -1389,6 +1447,13 @@ def main():
sys.stderr.write("scons: *** Minimum Python version is %d.%d.%d\n" %minimum_python_version) sys.stderr.write("scons: *** Minimum Python version is %d.%d.%d\n" %minimum_python_version)
sys.exit(1) sys.exit(1)
try:
import threading
except ImportError:
msg = "scons: *** SCons version %s requires a Python interpreter with support for the `threading` package"
sys.stderr.write(msg % SConsVersion)
sys.exit(1)
parts = ["SCons by Steven Knight et al.:\n"] parts = ["SCons by Steven Knight et al.:\n"]
try: try:
import SCons import SCons
@ -1468,6 +1533,11 @@ def main():
print("Total SConscript file execution time: %f seconds"%sconscript_time) print("Total SConscript file execution time: %f seconds"%sconscript_time)
print("Total SCons execution time: %f seconds"%scons_time) print("Total SCons execution time: %f seconds"%scons_time)
print("Total command execution time: %f seconds"%ct) print("Total command execution time: %f seconds"%ct)
time_stats.total_times(total_time, sconscript_time, scons_time, ct)
if ENABLE_JSON:
write_scons_stats_file()
sys.exit(exit_status) sys.exit(exit_status)

View file

@ -40,7 +40,7 @@ SUPPRESS_HELP = optparse.SUPPRESS_HELP
diskcheck_all = SCons.Node.FS.diskcheck_types() diskcheck_all = SCons.Node.FS.diskcheck_types()
experimental_features = {'warp_speed', 'transporter', 'ninja', 'tm_v2'} experimental_features = {'warp_speed', 'transporter', 'ninja', 'legacy_sched'}
def diskcheck_convert(value): def diskcheck_convert(value):
@ -98,7 +98,7 @@ class SConsValues(optparse.Values):
in the set_option() method. in the set_option() method.
""" """
def __init__(self, defaults): def __init__(self, defaults) -> None:
self.__defaults__ = defaults self.__defaults__ = defaults
self.__SConscript_settings__ = {} self.__SConscript_settings__ = {}
@ -271,8 +271,7 @@ class SConsOption(optparse.Option):
class SConsOptionGroup(optparse.OptionGroup): class SConsOptionGroup(optparse.OptionGroup):
""" """A subclass for SCons-specific option groups.
A subclass for SCons-specific option groups.
The only difference between this and the base class is that we print The only difference between this and the base class is that we print
the group's help text flush left, underneath their own title but the group's help text flush left, underneath their own title but
@ -288,7 +287,8 @@ class SConsOptionGroup(optparse.OptionGroup):
formatter.dedent() formatter.dedent()
result = formatter.format_heading(self.title) result = formatter.format_heading(self.title)
formatter.indent() formatter.indent()
result = result + optparse.OptionContainer.format_help(self, formatter) # bypass OptionGroup format_help and call up to its parent
result += optparse.OptionContainer.format_help(self, formatter)
return result return result
@ -300,11 +300,11 @@ class SConsBadOptionError(optparse.BadOptionError):
""" """
def __init__(self, opt_str, parser=None): def __init__(self, opt_str, parser=None) -> None:
self.opt_str = opt_str self.opt_str = opt_str
self.parser = parser self.parser = parser
def __str__(self): def __str__(self) -> str:
return _("no such option: %s") % self.opt_str return _("no such option: %s") % self.opt_str
@ -396,7 +396,7 @@ class SConsOptionParser(optparse.OptionParser):
option.process(opt, value, values, self) option.process(opt, value, values, self)
def reparse_local_options(self): def reparse_local_options(self) -> None:
""" Re-parse the leftover command-line options. """ Re-parse the leftover command-line options.
Parse options stored in `self.largs`, so that any value Parse options stored in `self.largs`, so that any value
@ -474,7 +474,6 @@ class SConsOptionParser(optparse.OptionParser):
self.local_option_group = group self.local_option_group = group
result = group.add_option(*args, **kw) result = group.add_option(*args, **kw)
if result: if result:
# The option was added successfully. We now have to add the # The option was added successfully. We now have to add the
# default value to our object that holds the default values # default value to our object that holds the default values
@ -489,9 +488,43 @@ class SConsOptionParser(optparse.OptionParser):
return result return result
def format_local_option_help(self, formatter=None, file=None):
"""Return the help for the project-level ("local") options.
.. versionadded:: 4.6.0
"""
if formatter is None:
formatter = self.formatter
try:
group = self.local_option_group
except AttributeError:
return ""
formatter.store_local_option_strings(self, group)
for opt in group.option_list:
strings = formatter.format_option_strings(opt)
formatter.option_strings[opt] = strings
# defeat our own cleverness, which starts out by dedenting
formatter.indent()
local_help = group.format_help(formatter)
formatter.dedent()
return local_help
def print_local_option_help(self, file=None):
"""Print help for just project-defined options.
Writes to *file* (default stdout).
.. versionadded:: 4.6.0
"""
if file is None:
file = sys.stdout
file.write(self.format_local_option_help())
class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter): class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter):
def format_usage(self, usage): def format_usage(self, usage) -> str:
""" Formats the usage message. """ """ Formats the usage message. """
return "usage: %s\n" % usage return "usage: %s\n" % usage
@ -504,7 +537,7 @@ class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter):
""" """
if heading == 'Options': if heading == 'Options':
heading = "SCons Options" heading = "SCons Options"
return optparse.IndentedHelpFormatter.format_heading(self, heading) return super().format_heading(heading)
def format_option(self, option): def format_option(self, option):
""" Customized option formatter. """ Customized option formatter.
@ -577,6 +610,24 @@ class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter):
result.append("\n") result.append("\n")
return "".join(result) return "".join(result)
def store_local_option_strings(self, parser, group):
"""Local-only version of store_option_strings.
We need to replicate this so the formatter will be set up
properly if we didn't go through the "normal" store_option_strings
.. versionadded:: 4.6.0
"""
self.indent()
max_len = 0
for opt in group.option_list:
strings = self.format_option_strings(opt)
self.option_strings[opt] = strings
max_len = max(max_len, len(strings) + self.current_indent)
self.dedent()
self.help_position = min(max_len + 2, self.max_help_position)
self.help_width = max(self.width - self.help_position, 11)
def Parser(version): def Parser(version):
"""Returns a parser object initialized with the standard SCons options. """Returns a parser object initialized with the standard SCons options.
@ -610,7 +661,7 @@ def Parser(version):
op.version = version op.version = version
# options ignored for compatibility # options ignored for compatibility
def opt_ignore(option, opt, value, parser): def opt_ignore(option, opt, value, parser) -> None:
sys.stderr.write("Warning: ignoring %s option\n" % opt) sys.stderr.write("Warning: ignoring %s option\n" % opt)
op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w",
@ -701,7 +752,7 @@ def Parser(version):
debug_options = ["count", "duplicate", "explain", "findlibs", debug_options = ["count", "duplicate", "explain", "findlibs",
"includes", "memoizer", "memory", "objects", "includes", "memoizer", "memory", "objects",
"pdb", "prepare", "presub", "stacktrace", "pdb", "prepare", "presub", "stacktrace",
"time", "action-timestamps"] "time", "action-timestamps", "json", "sconscript"]
def opt_debug(option, opt, value__, parser, def opt_debug(option, opt, value__, parser,
debug_options=debug_options, debug_options=debug_options,
@ -822,11 +873,11 @@ def Parser(version):
action="help", action="help",
help="Print this message and exit") help="Print this message and exit")
def warn_md5_chunksize_deprecated(option, opt, value, parser): def warn_md5_chunksize_deprecated(option, opt, value, parser) -> None:
if opt == '--md5-chunksize': if opt == '--md5-chunksize':
SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning,
"Parameter %s is deprecated. Use " f"Option {opt} is deprecated. "
"--hash-chunksize instead." % opt) "Use --hash-chunksize instead.")
setattr(parser.values, option.dest, value) setattr(parser.values, option.dest, value)
@ -865,7 +916,7 @@ def Parser(version):
action="store_true", action="store_true",
help="Cache implicit dependencies") help="Cache implicit dependencies")
def opt_implicit_deps(option, opt, value, parser): def opt_implicit_deps(option, opt, value, parser) -> None:
setattr(parser.values, 'implicit_cache', True) setattr(parser.values, 'implicit_cache', True)
setattr(parser.values, option.dest, True) setattr(parser.values, option.dest, True)
@ -1002,7 +1053,7 @@ def Parser(version):
help="Search up directory tree for SConstruct, " help="Search up directory tree for SConstruct, "
"build Default() targets from local SConscript") "build Default() targets from local SConscript")
def opt_version(option, opt, value, parser): def opt_version(option, opt, value, parser) -> None:
sys.stdout.write(parser.version + '\n') sys.stdout.write(parser.version + '\n')
sys.exit(0) sys.exit(0)
@ -1010,7 +1061,7 @@ def Parser(version):
action="callback", callback=opt_version, action="callback", callback=opt_version,
help="Print the SCons version number and exit") help="Print the SCons version number and exit")
def opt_warn(option, opt, value, parser, tree_options=tree_options): def opt_warn(option, opt, value, parser, tree_options=tree_options) -> None:
if SCons.Util.is_String(value): if SCons.Util.is_String(value):
value = value.split(',') value = value.split(',')
parser.values.warn.extend(value) parser.values.warn.extend(value)
@ -1033,7 +1084,7 @@ def Parser(version):
# we don't want to change. These all get a "the -X option is not # we don't want to change. These all get a "the -X option is not
# yet implemented" message and don't show up in the help output. # yet implemented" message and don't show up in the help output.
def opt_not_yet(option, opt, value, parser): def opt_not_yet(option, opt, value, parser) -> None:
msg = "Warning: the %s option is not yet implemented\n" % opt msg = "Warning: the %s option is not yet implemented\n" % opt
sys.stderr.write(msg) sys.stderr.write(msg)

View file

@ -104,7 +104,7 @@ def compute_exports(exports):
class Frame: class Frame:
"""A frame on the SConstruct/SConscript call stack""" """A frame on the SConstruct/SConscript call stack"""
def __init__(self, fs, exports, sconscript): def __init__(self, fs, exports, sconscript) -> None:
self.globals = BuildDefaultGlobals() self.globals = BuildDefaultGlobals()
self.retval = None self.retval = None
self.prev_dir = fs.getcwd() self.prev_dir = fs.getcwd()
@ -145,40 +145,32 @@ def Return(*vars, **kw):
stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :) stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
def handle_missing_SConscript(f, must_exist=None): def handle_missing_SConscript(f: str, must_exist: bool = True) -> None:
"""Take appropriate action on missing file in SConscript() call. """Take appropriate action on missing file in SConscript() call.
Print a warning or raise an exception on missing file, unless Print a warning or raise an exception on missing file, unless
missing is explicitly allowed by the *must_exist* value. missing is explicitly allowed by the *must_exist* parameter or by
On first warning, print a deprecation message. a global flag.
Args: Args:
f (str): path of missing configuration file f: path to missing configuration file
must_exist (bool): if true, fail. If false, but not ``None``, must_exist: if true (the default), fail. If false
allow the file to be missing. The default is ``None``, do nothing, allowing a build to declare it's okay to be missing.
which means issue the warning. The default is deprecated.
Raises: Raises:
UserError: if *must_exist* is true or if global UserError: if *must_exist* is true or if global
:data:`SCons.Script._no_missing_sconscript` is true. :data:`SCons.Script._no_missing_sconscript` is true.
"""
if must_exist or (SCons.Script._no_missing_sconscript and must_exist is not False): .. versionchanged: 4.6.0
msg = "Fatal: missing SConscript '%s'" % f.get_internal_path() Changed default from False.
"""
if not must_exist: # explicitly set False: ok
return
if not SCons.Script._no_missing_sconscript: # system default changed: ok
return
msg = f"missing SConscript file {f.get_internal_path()!r}"
raise SCons.Errors.UserError(msg) raise SCons.Errors.UserError(msg)
if must_exist is None:
if SCons.Script._warn_missing_sconscript_deprecated:
msg = (
"Calling missing SConscript without error is deprecated.\n"
"Transition by adding must_exist=False to SConscript calls.\n"
"Missing SConscript '%s'" % f.get_internal_path()
)
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg)
SCons.Script._warn_missing_sconscript_deprecated = False
else:
msg = "Ignoring missing SConscript '%s'" % f.get_internal_path()
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg)
def _SConscript(fs, *files, **kw): def _SConscript(fs, *files, **kw):
top = fs.Top top = fs.Top
@ -282,8 +274,15 @@ def _SConscript(fs, *files, **kw):
scriptdata = _file_.read() scriptdata = _file_.read()
scriptname = _file_.name scriptname = _file_.name
_file_.close() _file_.close()
if SCons.Debug.sconscript_trace:
print("scons: Entering "+str(scriptname))
exec(compile(scriptdata, scriptname, 'exec'), call_stack[-1].globals) exec(compile(scriptdata, scriptname, 'exec'), call_stack[-1].globals)
if SCons.Debug.sconscript_trace:
print("scons: Exiting "+str(scriptname))
except SConscriptReturn: except SConscriptReturn:
if SCons.Debug.sconscript_trace:
print("scons: Exiting "+str(scriptname))
else:
pass pass
finally: finally:
if Main.print_time: if Main.print_time:
@ -294,7 +293,7 @@ def _SConscript(fs, *files, **kw):
call_stack[-1].globals.update({__file__:old_file}) call_stack[-1].globals.update({__file__:old_file})
else: else:
handle_missing_SConscript(f, kw.get('must_exist', None)) handle_missing_SConscript(f, kw.get('must_exist', True))
finally: finally:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1 SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1
@ -332,7 +331,7 @@ def _SConscript(fs, *files, **kw):
else: else:
return tuple(results) return tuple(results)
def SConscript_exception(file=sys.stderr): def SConscript_exception(file=sys.stderr) -> None:
"""Print an exception stack trace just for the SConscript file(s). """Print an exception stack trace just for the SConscript file(s).
This will show users who have Python errors where the problem is, This will show users who have Python errors where the problem is,
without cluttering the output with all of the internal calls leading without cluttering the output with all of the internal calls leading
@ -481,11 +480,11 @@ class SConsEnvironment(SCons.Environment.Base):
kw['_depth'] = kw.get('_depth', 0) + 1 kw['_depth'] = kw.get('_depth', 0) + 1
return SCons.Environment.Base.Configure(self, *args, **kw) return SCons.Environment.Base.Configure(self, *args, **kw)
def Default(self, *targets): def Default(self, *targets) -> None:
SCons.Script._Set_Default_Targets(self, targets) SCons.Script._Set_Default_Targets(self, targets)
@staticmethod @staticmethod
def EnsureSConsVersion(major, minor, revision=0): def EnsureSConsVersion(major, minor, revision: int=0) -> None:
"""Exit abnormally if the SCons version is not late enough.""" """Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process # split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__': if SCons.__version__ == '__' + 'VERSION__':
@ -503,7 +502,7 @@ class SConsEnvironment(SCons.Environment.Base):
sys.exit(2) sys.exit(2)
@staticmethod @staticmethod
def EnsurePythonVersion(major, minor): def EnsurePythonVersion(major, minor) -> None:
"""Exit abnormally if the Python version is not late enough.""" """Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor): if sys.version_info < (major, minor):
v = sys.version.split()[0] v = sys.version.split()[0]
@ -511,10 +510,10 @@ class SConsEnvironment(SCons.Environment.Base):
sys.exit(2) sys.exit(2)
@staticmethod @staticmethod
def Exit(value=0): def Exit(value: int=0) -> None:
sys.exit(value) sys.exit(value)
def Export(self, *vars, **kw): def Export(self, *vars, **kw) -> None:
for var in vars: for var in vars:
global_exports.update(compute_exports(self.Split(var))) global_exports.update(compute_exports(self.Split(var)))
global_exports.update(kw) global_exports.update(kw)
@ -528,10 +527,25 @@ class SConsEnvironment(SCons.Environment.Base):
name = self.subst(name) name = self.subst(name)
return SCons.Script.Main.GetOption(name) return SCons.Script.Main.GetOption(name)
def Help(self, text, append: bool = False, keep_local: bool = False) -> None:
"""Update the help text.
def Help(self, text, append=False): The previous help text has *text* appended to it, except on the
first call. On first call, the values of *append* and *keep_local*
are considered to determine what is appended to.
Arguments:
text: string to add to the help text.
append: on first call, if true, keep the existing help text
(default False).
keep_local: on first call, if true and *append* is also true,
keep only the help text from AddOption calls.
.. versionchanged:: 4.6.0
The *keep_local* parameter was added.
"""
text = self.subst(text, raw=1) text = self.subst(text, raw=1)
SCons.Script.HelpFunction(text, append=append) SCons.Script.HelpFunction(text, append=append, keep_local=keep_local)
def Import(self, *vars): def Import(self, *vars):
try: try:
@ -602,7 +616,7 @@ class SConsEnvironment(SCons.Environment.Base):
global sconscript_chdir global sconscript_chdir
sconscript_chdir = flag sconscript_chdir = flag
def SetOption(self, name, value): def SetOption(self, name, value) -> None:
name = self.subst(name) name = self.subst(name)
SCons.Script.Main.SetOption(name, value) SCons.Script.Main.SetOption(name, value)
@ -650,7 +664,7 @@ class DefaultEnvironmentCall:
thereby prevent expansion of construction variables (since from thereby prevent expansion of construction variables (since from
the user's point of view this was called as a global function, the user's point of view this was called as a global function,
with no associated construction environment).""" with no associated construction environment)."""
def __init__(self, method_name, subst=0): def __init__(self, method_name, subst: int=0) -> None:
self.method_name = method_name self.method_name = method_name
if subst: if subst:
self.factory = SCons.Defaults.DefaultEnvironment self.factory = SCons.Defaults.DefaultEnvironment
@ -674,7 +688,7 @@ def BuildDefaultGlobals():
import SCons.Script import SCons.Script
d = SCons.Script.__dict__ d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)): def not_a_module(m, d=d, mtype=type(SCons.Script)) -> bool:
return not isinstance(d[m], mtype) return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)): for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m] GlobalDict[m] = d[m]

View file

@ -100,7 +100,6 @@ main = Main.main
BuildTask = Main.BuildTask BuildTask = Main.BuildTask
CleanTask = Main.CleanTask CleanTask = Main.CleanTask
QuestionTask = Main.QuestionTask QuestionTask = Main.QuestionTask
#PrintHelp = Main.PrintHelp
#SConscriptSettableOptions = Main.SConscriptSettableOptions #SConscriptSettableOptions = Main.SConscriptSettableOptions
AddOption = Main.AddOption AddOption = Main.AddOption
@ -110,6 +109,7 @@ SetOption = Main.SetOption
ValidateOptions = Main.ValidateOptions ValidateOptions = Main.ValidateOptions
Progress = Main.Progress Progress = Main.Progress
GetBuildFailures = Main.GetBuildFailures GetBuildFailures = Main.GetBuildFailures
DebugOptions = Main.DebugOptions
#keep_going_on_error = Main.keep_going_on_error #keep_going_on_error = Main.keep_going_on_error
#print_dtree = Main.print_dtree #print_dtree = Main.print_dtree
@ -175,11 +175,11 @@ DefaultEnvironment = SCons.Defaults.DefaultEnvironment
# Other variables we provide. # Other variables we provide.
class TargetList(collections.UserList): class TargetList(collections.UserList):
def _do_nothing(self, *args, **kw): def _do_nothing(self, *args, **kw) -> None:
pass pass
def _add_Default(self, list): def _add_Default(self, list) -> None:
self.extend(list) self.extend(list)
def _clear(self): def _clear(self) -> None:
del self[:] del self[:]
ARGUMENTS = {} ARGUMENTS = {}
@ -199,13 +199,13 @@ DEFAULT_TARGETS = []
# own targets to BUILD_TARGETS. # own targets to BUILD_TARGETS.
_build_plus_default = TargetList() _build_plus_default = TargetList()
def _Add_Arguments(alist): def _Add_Arguments(alist) -> None:
for arg in alist: for arg in alist:
a, b = arg.split('=', 1) a, b = arg.split('=', 1)
ARGUMENTS[a] = b ARGUMENTS[a] = b
ARGLIST.append((a, b)) ARGLIST.append((a, b))
def _Add_Targets(tlist): def _Add_Targets(tlist) -> None:
if tlist: if tlist:
COMMAND_LINE_TARGETS.extend(tlist) COMMAND_LINE_TARGETS.extend(tlist)
BUILD_TARGETS.extend(tlist) BUILD_TARGETS.extend(tlist)
@ -225,7 +225,7 @@ def _Set_Default_Targets_Has_Not_Been_Called(d, fs):
_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called _Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called
def _Set_Default_Targets(env, tlist): def _Set_Default_Targets(env, tlist) -> None:
global DEFAULT_TARGETS global DEFAULT_TARGETS
global _Get_Default_Targets global _Get_Default_Targets
_Get_Default_Targets = _Set_Default_Targets_Has_Been_Called _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called
@ -247,31 +247,37 @@ def _Set_Default_Targets(env, tlist):
BUILD_TARGETS._add_Default(nodes) BUILD_TARGETS._add_Default(nodes)
_build_plus_default._add_Default(nodes) _build_plus_default._add_Default(nodes)
#
help_text = None help_text = None
def HelpFunction(text, append=False):
def HelpFunction(text, append: bool = False, keep_local: bool = False) -> None:
"""The implementaion of the the ``Help`` method.
See :meth:`~SCons.Script.SConscript.Help`.
.. versionchanged:: 4.6.0
The *keep_local* parameter was added.
"""
global help_text global help_text
if help_text is None: if help_text is None:
if append: if append:
s = StringIO() with StringIO() as s:
PrintHelp(s) PrintHelp(s, local_only=keep_local)
help_text = s.getvalue() help_text = s.getvalue()
s.close()
else: else:
help_text = "" help_text = ""
help_text= help_text + text help_text += text
#
# Will be non-zero if we are reading an SConscript file. # Will be non-zero if we are reading an SConscript file.
sconscript_reading = 0 sconscript_reading: int = 0
_no_missing_sconscript = False _no_missing_sconscript = True
_warn_missing_sconscript_deprecated = True _warn_missing_sconscript_deprecated = False # TODO: now unused
def set_missing_sconscript_error(flag=1): def set_missing_sconscript_error(flag: bool = True) -> bool:
"""Set behavior on missing file in SConscript() call. """Set behavior on missing file in SConscript() call.
Returns: Returns:
@ -337,6 +343,7 @@ GlobalDefaultEnvironmentFunctions = [
'Local', 'Local',
'ParseDepends', 'ParseDepends',
'Precious', 'Precious',
'Pseudo',
'PyPackageDir', 'PyPackageDir',
'Repository', 'Repository',
'Requires', 'Requires',

View file

@ -26,6 +26,7 @@
import collections import collections
import re import re
from inspect import signature, Parameter from inspect import signature, Parameter
from typing import Optional
import SCons.Errors import SCons.Errors
from SCons.Util import is_String, is_Sequence from SCons.Util import is_String, is_Sequence
@ -40,7 +41,7 @@ _strconv = [
AllowableExceptions = (IndexError, NameError) AllowableExceptions = (IndexError, NameError)
def SetAllowableExceptions(*excepts): def SetAllowableExceptions(*excepts) -> None:
global AllowableExceptions global AllowableExceptions
AllowableExceptions = [_f for _f in excepts if _f] AllowableExceptions = [_f for _f in excepts if _f]
@ -59,10 +60,10 @@ class Literal:
around a string, then it will be interpreted as literal. around a string, then it will be interpreted as literal.
When passed to the command interpreter, all special When passed to the command interpreter, all special
characters will be escaped.""" characters will be escaped."""
def __init__(self, lstr): def __init__(self, lstr) -> None:
self.lstr = lstr self.lstr = lstr
def __str__(self): def __str__(self) -> str:
return self.lstr return self.lstr
def escape(self, escape_func): def escape(self, escape_func):
@ -71,15 +72,15 @@ class Literal:
def for_signature(self): def for_signature(self):
return self.lstr return self.lstr
def is_literal(self): def is_literal(self) -> bool:
return 1 return True
def __eq__(self, other): def __eq__(self, other):
if not isinstance(other, Literal): if not isinstance(other, Literal):
return False return False
return self.lstr == other.lstr return self.lstr == other.lstr
def __neq__(self, other): def __neq__(self, other) -> bool:
return not self.__eq__(other) return not self.__eq__(other)
def __hash__(self): def __hash__(self):
@ -94,7 +95,7 @@ class SpecialAttrWrapper:
such that we can return some canonical string during signature such that we can return some canonical string during signature
calculation to avoid unnecessary rebuilds.""" calculation to avoid unnecessary rebuilds."""
def __init__(self, lstr, for_signature=None): def __init__(self, lstr, for_signature=None) -> None:
"""The for_signature parameter, if supplied, will be the """The for_signature parameter, if supplied, will be the
canonical string we return from for_signature(). Else canonical string we return from for_signature(). Else
we will simply return lstr.""" we will simply return lstr."""
@ -104,7 +105,7 @@ class SpecialAttrWrapper:
else: else:
self.forsig = lstr self.forsig = lstr
def __str__(self): def __str__(self) -> str:
return self.lstr return self.lstr
def escape(self, escape_func): def escape(self, escape_func):
@ -113,8 +114,8 @@ class SpecialAttrWrapper:
def for_signature(self): def for_signature(self):
return self.forsig return self.forsig
def is_literal(self): def is_literal(self) -> bool:
return 1 return True
def quote_spaces(arg): def quote_spaces(arg):
"""Generic function for putting double quotes around any string that """Generic function for putting double quotes around any string that
@ -131,11 +132,11 @@ class CmdStringHolder(collections.UserString):
particular platform, it will return the contained string with the particular platform, it will return the contained string with the
proper escape sequences inserted. proper escape sequences inserted.
""" """
def __init__(self, cmd, literal=None): def __init__(self, cmd, literal=None) -> None:
super().__init__(cmd) super().__init__(cmd)
self.literal = literal self.literal = literal
def is_literal(self): def is_literal(self) -> bool:
return self.literal return self.literal
def escape(self, escape_func, quote_func=quote_spaces): def escape(self, escape_func, quote_func=quote_spaces):
@ -180,7 +181,7 @@ class NLWrapper:
cleaner conceptually... cleaner conceptually...
""" """
def __init__(self, list, func): def __init__(self, list, func) -> None:
self.list = list self.list = list
self.func = func self.func = func
def _return_nodelist(self): def _return_nodelist(self):
@ -209,7 +210,7 @@ class Targets_or_Sources(collections.UserList):
a list during variable expansion. We're not really using any a list during variable expansion. We're not really using any
collections.UserList methods in practice. collections.UserList methods in practice.
""" """
def __init__(self, nl): def __init__(self, nl) -> None:
self.nl = nl self.nl = nl
def __getattr__(self, attr): def __getattr__(self, attr):
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
@ -217,10 +218,10 @@ class Targets_or_Sources(collections.UserList):
def __getitem__(self, i): def __getitem__(self, i):
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
return nl[i] return nl[i]
def __str__(self): def __str__(self) -> str:
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
return str(nl) return str(nl)
def __repr__(self): def __repr__(self) -> str:
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
return repr(nl) return repr(nl)
@ -230,7 +231,7 @@ class Target_or_Source:
to access an individual proxy Node, calling the NLWrapper to create to access an individual proxy Node, calling the NLWrapper to create
a proxy on demand. a proxy on demand.
""" """
def __init__(self, nl): def __init__(self, nl) -> None:
self.nl = nl self.nl = nl
def __getattr__(self, attr): def __getattr__(self, attr):
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
@ -241,20 +242,20 @@ class Target_or_Source:
# pass through, so raise AttributeError for everything. # pass through, so raise AttributeError for everything.
raise AttributeError("NodeList has no attribute: %s" % attr) raise AttributeError("NodeList has no attribute: %s" % attr)
return getattr(nl0, attr) return getattr(nl0, attr)
def __str__(self): def __str__(self) -> str:
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
if nl: if nl:
return str(nl[0]) return str(nl[0])
return '' return ''
def __repr__(self): def __repr__(self) -> str:
nl = self.nl._create_nodelist() nl = self.nl._create_nodelist()
if nl: if nl:
return repr(nl[0]) return repr(nl[0])
return '' return ''
class NullNodeList(SCons.Util.NullSeq): class NullNodeList(SCons.Util.NullSeq):
def __call__(self, *args, **kwargs): return '' def __call__(self, *args, **kwargs) -> str: return ''
def __str__(self): return '' def __str__(self) -> str: return ''
NullNodesList = NullNodeList() NullNodesList = NullNodeList()
@ -335,7 +336,7 @@ class StringSubber:
""" """
def __init__(self, env, mode, conv, gvars): def __init__(self, env, mode, conv, gvars) -> None:
self.env = env self.env = env
self.mode = mode self.mode = mode
self.conv = conv self.conv = conv
@ -448,11 +449,12 @@ class StringSubber:
This serves as a wrapper for splitting up a string into This serves as a wrapper for splitting up a string into
separate tokens. separate tokens.
""" """
def sub_match(match):
return self.conv(self.expand(match.group(1), lvars))
if is_String(args) and not isinstance(args, CmdStringHolder): if is_String(args) and not isinstance(args, CmdStringHolder):
args = str(args) # In case it's a UserString. args = str(args) # In case it's a UserString.
try: try:
def sub_match(match):
return self.conv(self.expand(match.group(1), lvars))
result = _dollar_exps.sub(sub_match, args) result = _dollar_exps.sub(sub_match, args)
except TypeError: except TypeError:
# If the internal conversion routine doesn't return # If the internal conversion routine doesn't return
@ -489,7 +491,7 @@ class ListSubber(collections.UserList):
and the rest of the object takes care of doing the right thing and the rest of the object takes care of doing the right thing
internally. internally.
""" """
def __init__(self, env, mode, conv, gvars): def __init__(self, env, mode, conv, gvars) -> None:
super().__init__([]) super().__init__([])
self.env = env self.env = env
self.mode = mode self.mode = mode
@ -503,7 +505,7 @@ class ListSubber(collections.UserList):
self.in_strip = None self.in_strip = None
self.next_line() self.next_line()
def expanded(self, s): def expanded(self, s) -> bool:
"""Determines if the string s requires further expansion. """Determines if the string s requires further expansion.
Due to the implementation of ListSubber expand will call Due to the implementation of ListSubber expand will call
@ -620,7 +622,7 @@ class ListSubber(collections.UserList):
else: else:
self.append(s) self.append(s)
def substitute(self, args, lvars, within_list): def substitute(self, args, lvars, within_list) -> None:
"""Substitute expansions in an argument or list of arguments. """Substitute expansions in an argument or list of arguments.
This serves as a wrapper for splitting up a string into This serves as a wrapper for splitting up a string into
@ -643,23 +645,23 @@ class ListSubber(collections.UserList):
else: else:
self.expand(args, lvars, within_list) self.expand(args, lvars, within_list)
def next_line(self): def next_line(self) -> None:
"""Arrange for the next word to start a new line. This """Arrange for the next word to start a new line. This
is like starting a new word, except that we have to append is like starting a new word, except that we have to append
another line to the result.""" another line to the result."""
collections.UserList.append(self, []) collections.UserList.append(self, [])
self.next_word() self.next_word()
def this_word(self): def this_word(self) -> None:
"""Arrange for the next word to append to the end of the """Arrange for the next word to append to the end of the
current last word in the result.""" current last word in the result."""
self.append = self.add_to_current_word self.append = self.add_to_current_word
def next_word(self): def next_word(self) -> None:
"""Arrange for the next word to start a new word.""" """Arrange for the next word to start a new word."""
self.append = self.add_new_word self.append = self.add_new_word
def add_to_current_word(self, x): def add_to_current_word(self, x) -> None:
"""Append the string x to the end of the current last word """Append the string x to the end of the current last word
in the result. If that is not possible, then just add in the result. If that is not possible, then just add
it as a new word. Make sure the entire concatenated string it as a new word. Make sure the entire concatenated string
@ -707,7 +709,7 @@ class ListSubber(collections.UserList):
y = CmdStringHolder(y, None) y = CmdStringHolder(y, None)
self[-1][-1] = y self[-1][-1] = y
def add_new_word(self, x): def add_new_word(self, x) -> None:
if not self.in_strip or self.mode != SUBST_SIG: if not self.in_strip or self.mode != SUBST_SIG:
literal = self.literal(x) literal = self.literal(x)
x = self.conv(x) x = self.conv(x)
@ -724,12 +726,12 @@ class ListSubber(collections.UserList):
else: else:
return l() return l()
def open_strip(self, x): def open_strip(self, x) -> None:
"""Handle the "open strip" $( token.""" """Handle the "open strip" $( token."""
self.add_strip(x) self.add_strip(x)
self.in_strip = 1 self.in_strip = 1
def close_strip(self, x): def close_strip(self, x) -> None:
"""Handle the "close strip" $) token.""" """Handle the "close strip" $) token."""
self.add_strip(x) self.add_strip(x)
self.in_strip = None self.in_strip = None
@ -805,7 +807,7 @@ _separate_args = re.compile(r'(%s|\s+|[^\s$]+|\$)' % _dollar_exps_str)
_space_sep = re.compile(r'[\t ]+(?![^{]*})') _space_sep = re.compile(r'[\t ]+(?![^{]*})')
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides=False): def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides: Optional[dict] = None):
"""Expand a string or list containing construction variable """Expand a string or list containing construction variable
substitutions. substitutions.
@ -887,7 +889,7 @@ def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={
return result return result
def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None,overrides=False): def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None, overrides: Optional[dict] = None):
"""Substitute construction variables in a string (or list or other """Substitute construction variables in a string (or list or other
object) and separate the arguments into a command list. object) and separate the arguments into a command list.

View file

@ -0,0 +1,763 @@
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Serial and Parallel classes to execute build tasks.
The Jobs class provides a higher level interface to start,
stop, and wait on jobs.
"""
import SCons.compat
import logging
import os
import queue
import signal
import sys
import threading
from enum import Enum
import SCons.Errors
import SCons.Warnings
# The default stack size (in kilobytes) of the threads used to execute
# jobs in parallel.
#
# We use a stack size of 256 kilobytes. The default on some platforms
# is too large and prevents us from creating enough threads to fully
# parallelized the build. For example, the default stack size on linux
# is 8 MBytes.
explicit_stack_size = None
default_stack_size = 256
interrupt_msg = 'Build interrupted.'
class InterruptState:
def __init__(self) -> None:
self.interrupted = False
def set(self) -> None:
self.interrupted = True
def __call__(self):
return self.interrupted
class Jobs:
"""An instance of this class initializes N jobs, and provides
methods for starting, stopping, and waiting on all N jobs.
"""
def __init__(self, num, taskmaster) -> None:
"""
Create 'num' jobs using the given taskmaster. The exact implementation
used varies with the number of jobs requested and the state of the `legacy_sched` flag
to `--experimental`.
"""
# Importing GetOption here instead of at top of file to avoid
# circular imports
# pylint: disable=import-outside-toplevel
from SCons.Script import GetOption
stack_size = explicit_stack_size
if stack_size is None:
stack_size = default_stack_size
experimental_option = GetOption('experimental') or []
if 'legacy_sched' in experimental_option:
if num > 1:
self.job = LegacyParallel(taskmaster, num, stack_size)
else:
self.job = Serial(taskmaster)
else:
self.job = NewParallel(taskmaster, num, stack_size)
self.num_jobs = num
def run(self, postfunc=lambda: None) -> None:
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler()
def were_interrupted(self):
"""Returns whether the jobs were interrupted by a signal."""
return self.job.interrupted()
def _setup_sig_handler(self) -> None:
"""Setup an interrupt handler so that SCons can shutdown cleanly in
various conditions:
a) SIGINT: Keyboard interrupt
b) SIGTERM: kill or system shutdown
c) SIGHUP: Controlling shell exiting
We handle all of these cases by stopping the taskmaster. It
turns out that it's very difficult to stop the build process
by throwing asynchronously an exception such as
KeyboardInterrupt. For example, the python Condition
variables (threading.Condition) and queues do not seem to be
asynchronous-exception-safe. It would require adding a whole
bunch of try/finally block and except KeyboardInterrupt all
over the place.
Note also that we have to be careful to handle the case when
SCons forks before executing another process. In that case, we
want the child to exit immediately.
"""
def handler(signum, stack, self=self, parentpid=os.getpid()) -> None:
if os.getpid() == parentpid:
self.job.taskmaster.stop()
self.job.interrupted.set()
else:
os._exit(2) # pylint: disable=protected-access
self.old_sigint = signal.signal(signal.SIGINT, handler)
self.old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
self.old_sighup = signal.signal(signal.SIGHUP, handler)
except AttributeError:
pass
if (self.old_sigint is None) or (self.old_sigterm is None) or \
(hasattr(self, "old_sighup") and self.old_sighup is None):
msg = "Overwritting previous signal handler which was not installed from Python. " + \
"Will not be able to reinstate and so will return to default handler."
SCons.Warnings.warn(SCons.Warnings.SConsWarning, msg)
def _reset_sig_handler(self) -> None:
"""Restore the signal handlers to their previous state (before the
call to _setup_sig_handler()."""
sigint_to_use = self.old_sigint if self.old_sigint is not None else signal.SIG_DFL
sigterm_to_use = self.old_sigterm if self.old_sigterm is not None else signal.SIG_DFL
signal.signal(signal.SIGINT, sigint_to_use)
signal.signal(signal.SIGTERM, sigterm_to_use)
try:
sigterm_to_use = self.old_sighup if self.old_sighup is not None else signal.SIG_DFL
signal.signal(signal.SIGHUP, sigterm_to_use)
except AttributeError:
pass
class Serial:
"""This class is used to execute tasks in series, and is more efficient
than Parallel, but is only appropriate for non-parallel builds. Only
one instance of this class should be in existence at a time.
This class is not thread safe.
"""
def __init__(self, taskmaster) -> None:
"""Create a new serial job given a taskmaster.
The taskmaster's next_task() method should return the next task
that needs to be executed, or None if there are no more tasks. The
taskmaster's executed() method will be called for each task when it
is successfully executed, or failed() will be called if it failed to
execute (e.g. execute() raised an exception)."""
self.taskmaster = taskmaster
self.interrupted = InterruptState()
def start(self):
"""Start the job. This will begin pulling tasks from the taskmaster
and executing them, and return when there are no more tasks. If a task
fails to execute (i.e. execute() raises an exception), then the job will
stop."""
while True:
task = self.taskmaster.next_task()
if task is None:
break
try:
task.prepare()
if task.needs_execute():
task.execute()
except Exception:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
else:
task.exception_set()
# Let the failed() callback function arrange for the
# build to stop if that's appropriate.
task.failed()
else:
task.executed()
task.postprocess()
self.taskmaster.cleanup()
class Worker(threading.Thread):
"""A worker thread waits on a task to be posted to its request queue,
dequeues the task, executes it, and posts a tuple including the task
and a boolean indicating whether the task executed successfully. """
def __init__(self, requestQueue, resultsQueue, interrupted) -> None:
super().__init__()
self.daemon = True
self.requestQueue = requestQueue
self.resultsQueue = resultsQueue
self.interrupted = interrupted
self.start()
def run(self):
while True:
task = self.requestQueue.get()
if task is None:
# The "None" value is used as a sentinel by
# ThreadPool.cleanup(). This indicates that there
# are no more tasks, so we should quit.
break
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
task.exception_set()
ok = False
else:
ok = True
self.resultsQueue.put((task, ok))
class ThreadPool:
"""This class is responsible for spawning and managing worker threads."""
def __init__(self, num, stack_size, interrupted) -> None:
"""Create the request and reply queues, and 'num' worker threads.
One must specify the stack size of the worker threads. The
stack size is specified in kilobytes.
"""
self.requestQueue = queue.Queue(0)
self.resultsQueue = queue.Queue(0)
try:
prev_size = threading.stack_size(stack_size * 1024)
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
# Create worker threads
self.workers = []
for _ in range(num):
worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
self.workers.append(worker)
if 'prev_size' in locals():
threading.stack_size(prev_size)
def put(self, task) -> None:
"""Put task into request queue."""
self.requestQueue.put(task)
def get(self):
"""Remove and return a result tuple from the results queue."""
return self.resultsQueue.get()
def preparation_failed(self, task) -> None:
self.resultsQueue.put((task, False))
def cleanup(self) -> None:
"""
Shuts down the thread pool, giving each worker thread a
chance to shut down gracefully.
"""
# For each worker thread, put a sentinel "None" value
# on the requestQueue (indicating that there's no work
# to be done) so that each worker thread will get one and
# terminate gracefully.
for _ in self.workers:
self.requestQueue.put(None)
# Wait for all of the workers to terminate.
#
# If we don't do this, later Python versions (2.4, 2.5) often
# seem to raise exceptions during shutdown. This happens
# in requestQueue.get(), as an assertion failure that
# requestQueue.not_full is notified while not acquired,
# seemingly because the main thread has shut down (or is
# in the process of doing so) while the workers are still
# trying to pull sentinels off the requestQueue.
#
# Normally these terminations should happen fairly quickly,
# but we'll stick a one-second timeout on here just in case
# someone gets hung.
for worker in self.workers:
worker.join(1.0)
self.workers = []
class LegacyParallel:
"""This class is used to execute tasks in parallel, and is somewhat
less efficient than Serial, but is appropriate for parallel builds.
This class is thread safe.
"""
def __init__(self, taskmaster, num, stack_size) -> None:
"""Create a new parallel job given a taskmaster.
The taskmaster's next_task() method should return the next
task that needs to be executed, or None if there are no more
tasks. The taskmaster's executed() method will be called
for each task when it is successfully executed, or failed()
will be called if the task failed to execute (i.e. execute()
raised an exception).
Note: calls to taskmaster are serialized, but calls to
execute() on distinct tasks are not serialized, because
that is the whole point of parallel jobs: they can execute
multiple tasks simultaneously. """
self.taskmaster = taskmaster
self.interrupted = InterruptState()
self.tp = ThreadPool(num, stack_size, self.interrupted)
self.maxjobs = num
def start(self):
"""Start the job. This will begin pulling tasks from the
taskmaster and executing them, and return when there are no
more tasks. If a task fails to execute (i.e. execute() raises
an exception), then the job will stop."""
jobs = 0
while True:
# Start up as many available tasks as we're
# allowed to.
while jobs < self.maxjobs:
task = self.taskmaster.next_task()
if task is None:
break
try:
# prepare task for execution
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if task.needs_execute():
# dispatch task
self.tp.put(task)
jobs += 1
else:
task.executed()
task.postprocess()
if not task and not jobs:
break
# Let any/all completed tasks finish up before we go
# back and put the next batch of tasks on the queue.
while True:
task, ok = self.tp.get()
jobs -= 1
if ok:
task.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
except Exception:
task.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
task.failed()
task.postprocess()
if self.tp.resultsQueue.empty():
break
self.tp.cleanup()
self.taskmaster.cleanup()
# An experimental new parallel scheduler that uses a leaders/followers pattern.
class NewParallel:
class State(Enum):
READY = 0
SEARCHING = 1
STALLED = 2
COMPLETED = 3
class Worker(threading.Thread):
def __init__(self, owner) -> None:
super().__init__()
self.daemon = True
self.owner = owner
self.start()
def run(self) -> None:
self.owner._work()
class FakeLock(object):
def lock(self):
pass
def unlock(self):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
class FakeCondition(object):
def __init__(self, lock):
pass
def wait(self):
fatal();
def notify(self):
pass
def notify_all(self):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
def __init__(self, taskmaster, num, stack_size) -> None:
self.taskmaster = taskmaster
self.max_workers = num
self.stack_size = stack_size
self.interrupted = InterruptState()
self.workers = []
# The `tm_lock` is what ensures that we only have one
# thread interacting with the taskmaster at a time. It
# also protects access to our state that gets updated
# concurrently. The `can_search_cv` is associated with
# this mutex.
self.tm_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
# Guarded under `tm_lock`.
self.jobs = 0
self.state = NewParallel.State.READY
# The `can_search_cv` is used to manage a leader /
# follower pattern for access to the taskmaster, and to
# awaken from stalls.
self.can_search_cv = (threading.Condition if self.max_workers > 1 else NewParallel.FakeCondition)(self.tm_lock)
# The queue of tasks that have completed execution. The
# next thread to obtain `tm_lock`` will retire them.
self.results_queue_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
self.results_queue = []
if self.taskmaster.trace:
self.trace = self._setup_logging()
else:
self.trace = False
def _setup_logging(self):
jl = logging.getLogger("Job")
jl.setLevel(level=logging.DEBUG)
jl.addHandler(self.taskmaster.trace.log_handler)
return jl
def trace_message(self, message) -> None:
# This grabs the name of the function which calls trace_message()
method_name = sys._getframe(1).f_code.co_name + "():"
thread_id=threading.get_ident()
self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
def start(self) -> None:
if self.max_workers == 1:
self._work()
else:
self._start_worker()
while len(self.workers) > 0:
self.workers[0].join()
self.workers.pop(0)
self.taskmaster.cleanup()
def _maybe_start_worker(self) -> None:
if self.max_workers > 1 and len(self.workers) < self.max_workers:
if self.jobs >= len(self.workers):
self._start_worker()
def _start_worker(self) -> None:
prev_size = self._adjust_stack_size()
if self.trace:
self.trace_message("Starting new worker thread")
self.workers.append(NewParallel.Worker(self))
self._restore_stack_size(prev_size)
def _adjust_stack_size(self):
try:
prev_size = threading.stack_size(self.stack_size * 1024)
return prev_size
except AttributeError as e:
# Only print a warning if the stack size has been
# explicitly set.
if explicit_stack_size is not None:
msg = "Setting stack size is unsupported by this version of Python:\n " + \
e.args[0]
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
except ValueError as e:
msg = "Setting stack size failed:\n " + str(e)
SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
return None
def _restore_stack_size(self, prev_size) -> None:
if prev_size is not None:
threading.stack_size(prev_size)
def _work(self):
task = None
while True:
# Obtain `tm_lock`, granting exclusive access to the taskmaster.
with self.can_search_cv:
if self.trace:
self.trace_message("Gained exclusive access")
# Capture whether we got here with `task` set,
# then drop our reference to the task as we are no
# longer interested in the actual object.
completed_task = (task is not None)
task = None
# We will only have `completed_task` set here if
# we have looped back after executing a task. If
# we have completed a task and find that we are
# stalled, we should speculatively indicate that
# we are no longer stalled by transitioning to the
# 'ready' state which will bypass the condition
# wait so that we immediately process the results
# queue and hopefully light up new
# work. Otherwise, stay stalled, and we will wait
# in the condvar. Some other thread will come back
# here with a completed task.
if self.state == NewParallel.State.STALLED and completed_task:
if self.trace:
self.trace_message("Detected stall with completed task, bypassing wait")
self.state = NewParallel.State.READY
# Wait until we are neither searching nor stalled.
while self.state == NewParallel.State.SEARCHING or self.state == NewParallel.State.STALLED:
if self.trace:
self.trace_message("Search already in progress, waiting")
self.can_search_cv.wait()
# If someone set the completed flag, bail.
if self.state == NewParallel.State.COMPLETED:
if self.trace:
self.trace_message("Completion detected, breaking from main loop")
break
# Set the searching flag to indicate that a thread
# is currently in the critical section for
# taskmaster work.
#
if self.trace:
self.trace_message("Starting search")
self.state = NewParallel.State.SEARCHING
# Bulk acquire the tasks in the results queue
# under the result queue lock, then process them
# all outside that lock. We need to process the
# tasks in the results queue before looking for
# new work because we might be unable to find new
# work if we don't.
results_queue = []
with self.results_queue_lock:
results_queue, self.results_queue = self.results_queue, results_queue
if self.trace:
self.trace_message(f"Found {len(results_queue)} completed tasks to process")
for (rtask, rresult) in results_queue:
if rresult:
rtask.executed()
else:
if self.interrupted():
try:
raise SCons.Errors.BuildError(
rtask.targets[0], errstr=interrupt_msg)
except Exception:
rtask.exception_set()
# Let the failed() callback function arrange
# for the build to stop if that's appropriate.
rtask.failed()
rtask.postprocess()
self.jobs -= 1
# We are done with any task objects that were in
# the results queue.
results_queue.clear()
# Now, turn the crank on the taskmaster until we
# either run out of tasks, or find a task that
# needs execution. If we run out of tasks, go idle
# until results arrive if jobs are pending, or
# mark the walk as complete if not.
while self.state == NewParallel.State.SEARCHING:
if self.trace:
self.trace_message("Searching for new tasks")
task = self.taskmaster.next_task()
if task:
# We found a task. Walk it through the
# task lifecycle. If it does not need
# execution, just complete the task and
# look for the next one. Otherwise,
# indicate that we are no longer searching
# so we can drop out of this loop, execute
# the task outside the lock, and allow
# another thread in to search.
try:
task.prepare()
except Exception:
task.exception_set()
task.failed()
task.postprocess()
else:
if not task.needs_execute():
if self.trace:
self.trace_message("Found internal task")
task.executed()
task.postprocess()
else:
self.jobs += 1
if self.trace:
self.trace_message("Found task requiring execution")
self.state = NewParallel.State.READY
self.can_search_cv.notify()
# This thread will be busy taking care of
# `execute`ing this task. If we haven't
# reached the limit, spawn a new thread to
# turn the crank and find the next task.
self._maybe_start_worker()
else:
# We failed to find a task, so this thread
# cannot continue turning the taskmaster
# crank. We must exit the loop.
if self.jobs:
# No task was found, but there are
# outstanding jobs executing that
# might unblock new tasks when they
# complete. Transition to the stalled
# state. We do not need a notify,
# because we know there are threads
# outstanding that will re-enter the
# loop.
#
if self.trace:
self.trace_message("Found no task requiring execution, but have jobs: marking stalled")
self.state = NewParallel.State.STALLED
else:
# We didn't find a task and there are
# no jobs outstanding, so there is
# nothing that will ever return
# results which might unblock new
# tasks. We can conclude that the walk
# is complete. Update our state to
# note completion and awaken anyone
# sleeping on the condvar.
#
if self.trace:
self.trace_message("Found no task requiring execution, and have no jobs: marking complete")
self.state = NewParallel.State.COMPLETED
self.can_search_cv.notify_all()
# We no longer hold `tm_lock` here. If we have a task,
# we can now execute it. If there are threads waiting
# to search, one of them can now begin turning the
# taskmaster crank in NewParallel.
if task:
if self.trace:
self.trace_message("Executing task")
ok = True
try:
if self.interrupted():
raise SCons.Errors.BuildError(
task.targets[0], errstr=interrupt_msg)
task.execute()
except Exception:
ok = False
task.exception_set()
# Grab the results queue lock and enqueue the
# executed task and state. The next thread into
# the searching loop will complete the
# postprocessing work under the taskmaster lock.
#
if self.trace:
self.trace_message("Enqueueing executed task results")
with self.results_queue_lock:
self.results_queue.append((task, ok))
# Tricky state "fallthrough" here. We are going back
# to the top of the loop, which behaves differently
# depending on whether `task` is set. Do not perturb
# the value of the `task` variable if you add new code
# after this comment.
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:

View file

@ -81,7 +81,7 @@ class Stats:
the Taskmaster records its decision each time it processes the Node. the Taskmaster records its decision each time it processes the Node.
(Ideally, that's just once per Node.) (Ideally, that's just once per Node.)
""" """
def __init__(self): def __init__(self) -> None:
""" """
Instantiates a Taskmaster.Stats object, initializing all Instantiates a Taskmaster.Stats object, initializing all
appropriate counters to zero. appropriate counters to zero.
@ -106,7 +106,7 @@ fmt = "%(considered)3d "\
"%(build)3d " "%(build)3d "
def dump_stats(): def dump_stats() -> None:
for n in sorted(StatsNodes, key=lambda a: str(a)): for n in sorted(StatsNodes, key=lambda a: str(a)):
print((fmt % n.attributes.stats.__dict__) + str(n)) print((fmt % n.attributes.stats.__dict__) + str(n))
@ -132,19 +132,19 @@ class Task(ABC):
LOGGER = None LOGGER = None
def __init__(self, tm, targets, top, node): def __init__(self, tm, targets, top, node) -> None:
self.tm = tm self.tm = tm
self.targets = targets self.targets = targets
self.top = top self.top = top
self.node = node self.node = node
self.exc_clear() self.exc_clear()
def trace_message(self, node, description='node'): def trace_message(self, node, description: str='node') -> None:
# This grabs the name of the function which calls trace_message() # This grabs the name of the function which calls trace_message()
method_name=sys._getframe(1).f_code.co_name+"():" method_name=sys._getframe(1).f_code.co_name+"():"
Task.LOGGER.debug('%-15s %s %s' % (method_name, description, self.tm.tm_trace_node(node))) Task.LOGGER.debug('%-15s %s %s' % (method_name, description, self.tm.tm_trace_node(node)))
def display(self, message): def display(self, message) -> None:
""" """
Hook to allow the calling interface to display a message. Hook to allow the calling interface to display a message.
@ -157,7 +157,7 @@ class Task(ABC):
""" """
pass pass
def prepare(self): def prepare(self) -> None:
""" """
Called just before the task is executed. Called just before the task is executed.
@ -240,10 +240,12 @@ class Task(ABC):
for t in cached_targets: for t in cached_targets:
try: try:
t.fs.unlink(t.get_internal_path()) t.fs.unlink(t.get_internal_path())
except (IOError, OSError) as e: except OSError as e:
SCons.Warnings.warn(SCons.Warnings.CacheCleanupErrorWarning, SCons.Warnings.warn(SCons.Warnings.CacheCleanupErrorWarning,
"Failed copying all target files from cache, Error while attempting to remove file %s retrieved from cache: %s" % (t.get_internal_path(), e)) "Failed copying all target files from cache, Error while attempting to remove file %s retrieved from cache: %s" % (t.get_internal_path(), e))
self.targets[0].build() self.targets[0].build()
for t in self.targets:
t.push_to_cache()
else: else:
for t in cached_targets: for t in cached_targets:
t.cached = 1 t.cached = 1
@ -260,7 +262,7 @@ class Task(ABC):
buildError.exc_info = sys.exc_info() buildError.exc_info = sys.exc_info()
raise buildError raise buildError
def executed_without_callbacks(self): def executed_without_callbacks(self) -> None:
""" """
Called when the task has been successfully executed Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call and the Taskmaster instance doesn't want to call
@ -276,7 +278,7 @@ class Task(ABC):
side_effect.set_state(NODE_NO_STATE) side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED) t.set_state(NODE_EXECUTED)
def executed_with_callbacks(self): def executed_with_callbacks(self) -> None:
""" """
Called when the task has been successfully executed and Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback the Taskmaster instance wants to call the Node's callback
@ -299,8 +301,6 @@ class Task(ABC):
for side_effect in t.side_effects: for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE) side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED) t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built() t.built()
t.visited() t.visited()
if (not print_prepare and if (not print_prepare and
@ -311,7 +311,7 @@ class Task(ABC):
executed = executed_with_callbacks executed = executed_with_callbacks
def failed(self): def failed(self) -> None:
""" """
Default action when a task fails: stop the build. Default action when a task fails: stop the build.
@ -321,7 +321,7 @@ class Task(ABC):
""" """
self.fail_stop() self.fail_stop()
def fail_stop(self): def fail_stop(self) -> None:
""" """
Explicit stop-the-build failure. Explicit stop-the-build failure.
@ -349,7 +349,7 @@ class Task(ABC):
self.targets = [self.tm.current_top] self.targets = [self.tm.current_top]
self.top = 1 self.top = 1
def fail_continue(self): def fail_continue(self) -> None:
""" """
Explicit continue-the-build failure. Explicit continue-the-build failure.
@ -366,7 +366,7 @@ class Task(ABC):
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
def make_ready_all(self): def make_ready_all(self) -> None:
""" """
Marks all targets in a task ready for execution. Marks all targets in a task ready for execution.
@ -404,7 +404,7 @@ class Task(ABC):
t.disambiguate().make_ready() t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \ is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date()) (not t.always_build and t.is_up_to_date())
except EnvironmentError as e: except OSError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename) raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date: if not is_up_to_date:
@ -431,7 +431,7 @@ class Task(ABC):
make_ready = make_ready_current make_ready = make_ready_current
def postprocess(self): def postprocess(self) -> None:
""" """
Post-processes a task after it's been executed. Post-processes a task after it's been executed.
@ -511,7 +511,7 @@ class Task(ABC):
""" """
return self.exception return self.exception
def exc_clear(self): def exc_clear(self) -> None:
""" """
Clears any recorded exception. Clears any recorded exception.
@ -521,7 +521,7 @@ class Task(ABC):
self.exception = (None, None, None) self.exception = (None, None, None)
self.exception_raise = self._no_exception_to_raise self.exception_raise = self._no_exception_to_raise
def exception_set(self, exception=None): def exception_set(self, exception=None) -> None:
""" """
Records an exception to be raised at the appropriate time. Records an exception to be raised at the appropriate time.
@ -533,7 +533,7 @@ class Task(ABC):
self.exception = exception self.exception = exception
self.exception_raise = self._exception_raise self.exception_raise = self._exception_raise
def _no_exception_to_raise(self): def _no_exception_to_raise(self) -> None:
pass pass
def _exception_raise(self): def _exception_raise(self):
@ -563,7 +563,7 @@ class Task(ABC):
class AlwaysTask(Task): class AlwaysTask(Task):
def needs_execute(self): def needs_execute(self) -> bool:
""" """
Always returns True (indicating this Task should always Always returns True (indicating this Task should always
be executed). be executed).
@ -606,7 +606,7 @@ class Taskmaster:
The Taskmaster for walking the dependency DAG. The Taskmaster for walking the dependency DAG.
""" """
def __init__(self, targets=[], tasker=None, order=None, trace=None): def __init__(self, targets=[], tasker=None, order=None, trace=None) -> None:
self.original_top = targets self.original_top = targets
self.top_targets_left = targets[:] self.top_targets_left = targets[:]
self.top_targets_left.reverse() self.top_targets_left.reverse()
@ -623,7 +623,7 @@ class Taskmaster:
self.trace = False self.trace = False
self.configure_trace(trace) self.configure_trace(trace)
def configure_trace(self, trace=None): def configure_trace(self, trace=None) -> None:
""" """
This handles the command line option --taskmastertrace= This handles the command line option --taskmastertrace=
It can be: It can be:
@ -726,7 +726,7 @@ class Taskmaster:
self.will_not_build(candidates) self.will_not_build(candidates)
return None return None
def _validate_pending_children(self): def _validate_pending_children(self) -> None:
""" """
Validate the content of the pending_children set. Assert if an Validate the content of the pending_children set. Assert if an
internal error is found. internal error is found.
@ -803,7 +803,7 @@ class Taskmaster:
for p in n.waiting_parents: for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count) assert p.ref_count > 0, (str(n), str(p), p.ref_count)
def tm_trace_node(self, node): def tm_trace_node(self, node) -> str:
return('<%-10s %-3s %s>' % (StateString[node.get_state()], return('<%-10s %-3s %s>' % (StateString[node.get_state()],
node.ref_count, node.ref_count,
repr(str(node)))) repr(str(node))))
@ -1047,7 +1047,7 @@ class Taskmaster:
return task return task
def will_not_build(self, nodes, node_func=lambda n: None): def will_not_build(self, nodes, node_func=lambda n: None) -> None:
""" """
Perform clean-up about nodes that will never be built. Invokes Perform clean-up about nodes that will never be built. Invokes
a user defined function on all of these nodes (including all a user defined function on all of these nodes (including all
@ -1092,7 +1092,7 @@ class Taskmaster:
# allow us to use in-place updates # allow us to use in-place updates
self.pending_children = pending_children self.pending_children = pending_children
def stop(self): def stop(self) -> None:
""" """
Stops the current build completely. Stops the current build completely.
""" """

View file

@ -39,7 +39,7 @@ import SCons.Util
as_module = __import__('as', globals(), locals(), [], 1) as_module = __import__('as', globals(), locals(), [], 1)
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for ar to an Environment.""" """Add Builders and construction variables for ar to an Environment."""
as_module.generate(env) as_module.generate(env)

View file

@ -29,21 +29,22 @@ Coded by Russel Winder (russel@winder.org.uk)
2012-09-06 2012-09-06
""" """
from pathlib import Path
import os.path import os.path
def isD(env, source): def isD(env, source) -> bool:
if not source: if not source:
return 0 return False
for s in source: for s in source:
if s.sources: if s.sources:
ext = os.path.splitext(str(s.sources[0]))[1] ext = os.path.splitext(str(s.sources[0]))[1]
if ext == '.d': if ext == '.d':
return 1 return True
return 0 return False
def addDPATHToEnv(env, executable): def addDPATHToEnv(env, executable) -> None:
dPath = env.WhereIs(executable) dPath = env.WhereIs(executable)
if dPath: if dPath:
phobosDir = dPath[:dPath.rindex(executable)] + '/../src/phobos' phobosDir = dPath[:dPath.rindex(executable)] + '/../src/phobos'
@ -57,6 +58,27 @@ def allAtOnceEmitter(target, source, env):
env.Clean(target[0], str(target[0]) + '.o') env.Clean(target[0], str(target[0]) + '.o')
return target, source return target, source
def DObjectEmitter(target,source,env):
di_file_dir = env.get('DI_FILE_DIR', False)
# TODO: Verify sane DI_FILE_DIR?
if di_file_dir:
di_file_suffix = env.subst('$DI_FILE_SUFFIX', target=target, source=source)
file_base = Path(target[0].get_path()).stem
# print(f'DObjectEmitter: {di_file_dir}/*{file_base}*{di_file_suffix}')
target.append(env.fs.File(f"{file_base}{di_file_suffix}", di_file_dir))
# print("New Target:%s"%" ".join([str(t) for t in target]))
return (target,source)
def DStaticObjectEmitter(target,source,env):
for tgt in target:
tgt.attributes.shared = None
return DObjectEmitter(target,source,env)
def DSharedObjectEmitter(target,source,env):
for tgt in target:
tgt.attributes.shared = 1
return DObjectEmitter(target,source,env)
# Local Variables: # Local Variables:
# tab-width:4 # tab-width:4
# indent-tabs-mode:nil # indent-tabs-mode:nil

View file

@ -25,16 +25,17 @@
import re import re
import os.path import os.path
from typing import Tuple from typing import Tuple, List
import SCons.Scanner.Fortran import SCons.Scanner.Fortran
import SCons.Tool import SCons.Tool
import SCons.Util import SCons.Util
from SCons.Action import Action from SCons.Action import Action, CommandAction
from SCons.Defaults import StaticObjectEmitter, SharedObjectEmitter
def isfortran(env, source) -> bool: def isfortran(env, source) -> bool:
"""Returns True if source has any fortran files in it. """Returns True if *source* has any fortran files in it.
Only checks based on filename suffixes, does not examine code. Only checks based on filename suffixes, does not examine code.
""" """
@ -62,14 +63,14 @@ def _fortranEmitter(target, source, env) -> Tuple:
Called by both the static and shared object emitters, Called by both the static and shared object emitters,
mainly to account for generated module files. mainly to account for generated module files.
""" """
node = source[0].rfile() node = source[0].rfile()
if not node.exists() and not node.is_derived(): if not node.exists() and not node.is_derived():
print("Could not locate " + str(node.name)) print("Could not locate " + str(node.name))
return ([], []) return [], []
# This has to match the def_regex in the Fortran scanner # This has to match the def_regex in the Fortran scanner
mod_regex = r"""(?i)^\s*MODULE\s+(?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL)(\w+)""" mod_regex = r"""(?i)^\s*MODULE\s+(?!PROCEDURE|SUBROUTINE|FUNCTION|PURE|ELEMENTAL)(\w+)"""
cre = re.compile(mod_regex,re.M) cre = re.compile(mod_regex, re.M)
# Retrieve all USE'd module names # Retrieve all USE'd module names
modules = cre.findall(node.get_text_contents()) modules = cre.findall(node.get_text_contents())
# Remove unique items from the list # Remove unique items from the list
@ -77,45 +78,48 @@ def _fortranEmitter(target, source, env) -> Tuple:
# Convert module name to a .mod filename # Convert module name to a .mod filename
suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source) suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source)
moddir = env.subst('$FORTRANMODDIR', target=target, source=source) moddir = env.subst('$FORTRANMODDIR', target=target, source=source)
modules = [x.lower() + suffix for x in modules] modules = [mod.lower() + suffix for mod in modules]
for m in modules: for module in modules:
target.append(env.fs.File(m, moddir)) target.append(env.fs.File(module, moddir))
return (target, source) return target, source
def FortranEmitter(target, source, env) -> Tuple: def FortranEmitter(target, source, env) -> Tuple:
import SCons.Defaults """Create emitter for static objects."""
target, source = _fortranEmitter(target, source, env) target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.StaticObjectEmitter(target, source, env) return StaticObjectEmitter(target, source, env)
def ShFortranEmitter(target, source, env) -> Tuple: def ShFortranEmitter(target, source, env) -> Tuple:
import SCons.Defaults """Create emitter for shared objects."""
target, source = _fortranEmitter(target, source, env) target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.SharedObjectEmitter(target, source, env) return SharedObjectEmitter(target, source, env)
def ComputeFortranSuffixes(suffixes, ppsuffixes) -> None: def ComputeFortranSuffixes(suffixes: List[str], ppsuffixes: List[str]) -> None:
"""Update the suffix lists to reflect the platform requirements. """Update the suffix lists to reflect the platform requirements.
If upper-cased suffixes can be distinguished from lower, those are If upper-cased suffixes can be distinguished from lower, those are
added to *ppsuffixes*. If not, they are added to *suffixes*. added to *ppsuffixes*. If not, they are added to *suffixes*.
Args: Args:
suffixes (list): indicate regular Fortran source files suffixes: regular Fortran source files
ppsuffixes (list): indicate Fortran source files that should be ppsuffixes: Fortran source files that should be
be run through the pre-processor be run through the pre-processor
""" """
assert len(suffixes) > 0 assert len(suffixes) > 0
s = suffixes[0] s = suffixes[0]
sup = s.upper() sup = s.upper()
upper_suffixes = [_.upper() for _ in suffixes] upper_suffixes = [suf.upper() for suf in suffixes]
if SCons.Util.case_sensitive_suffixes(s, sup): if SCons.Util.case_sensitive_suffixes(s, sup):
ppsuffixes.extend(upper_suffixes) ppsuffixes.extend(upper_suffixes)
else: else:
suffixes.extend(upper_suffixes) suffixes.extend(upper_suffixes)
def CreateDialectActions(dialect) -> Tuple[Action, Action, Action, Action]:
def CreateDialectActions(
dialect: str,
) -> Tuple[CommandAction, CommandAction, CommandAction, CommandAction]:
"""Create dialect specific actions.""" """Create dialect specific actions."""
CompAction = Action(f'${dialect}COM ', cmdstr=f'${dialect}COMSTR') CompAction = Action(f'${dialect}COM ', cmdstr=f'${dialect}COMSTR')
CompPPAction = Action(f'${dialect}PPCOM ', cmdstr=f'${dialect}PPCOMSTR') CompPPAction = Action(f'${dialect}PPCOM ', cmdstr=f'${dialect}PPCOMSTR')
@ -124,14 +128,20 @@ def CreateDialectActions(dialect) -> Tuple[Action, Action, Action, Action]:
return CompAction, CompPPAction, ShCompAction, ShCompPPAction return CompAction, CompPPAction, ShCompAction, ShCompPPAction
def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_mods=False) -> None: def DialectAddToEnv(
env,
dialect: str,
suffixes: List[str],
ppsuffixes: List[str],
support_mods: bool = False,
) -> None:
"""Add dialect specific construction variables. """Add dialect specific construction variables.
Args: Args:
dialect (str): dialect name dialect: dialect name
suffixes (list): suffixes associated with this dialect suffixes: suffixes associated with this dialect
ppsuffixes (list): suffixes using cpp associated with this dialect ppsuffixes: suffixes using cpp associated with this dialect
support_mods (bool): whether this dialect supports modules support_mods: whether this dialect supports modules
""" """
ComputeFortranSuffixes(suffixes, ppsuffixes) ComputeFortranSuffixes(suffixes, ppsuffixes)
@ -184,16 +194,8 @@ def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_mods=False) -> N
def add_fortran_to_env(env) -> None: def add_fortran_to_env(env) -> None:
"""Add Builders and construction variables for Fortran/generic.""" """Add Builders and construction variables for Fortran/generic."""
try: FortranSuffixes = env.get('FORTRANFILESUFFIXES', ['.f', '.for', '.ftn'])
FortranSuffixes = env['FORTRANFILESUFFIXES'] FortranPPSuffixes = env.get('FORTRANPPFILESUFFIXES', ['.fpp', '.FPP'])
except KeyError:
FortranSuffixes = ['.f', '.for', '.ftn']
try:
FortranPPSuffixes = env['FORTRANPPFILESUFFIXES']
except KeyError:
FortranPPSuffixes = ['.fpp', '.FPP']
DialectAddToEnv(env, "FORTRAN", FortranSuffixes, FortranPPSuffixes, support_mods=True) DialectAddToEnv(env, "FORTRAN", FortranSuffixes, FortranPPSuffixes, support_mods=True)
# Module support # Module support
@ -206,72 +208,32 @@ def add_fortran_to_env(env) -> None:
def add_f77_to_env(env) -> None: def add_f77_to_env(env) -> None:
"""Add Builders and construction variables for f77 dialect.""" """Add Builders and construction variables for f77 dialect."""
try: F77Suffixes = env.get('F77FILESUFFIXES', ['.f77'])
F77Suffixes = env['F77FILESUFFIXES'] F77PPSuffixes = env.get('F77PPFILESUFFIXES', [])
except KeyError:
F77Suffixes = ['.f77']
try:
F77PPSuffixes = env['F77PPFILESUFFIXES']
except KeyError:
F77PPSuffixes = []
DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes) DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes)
def add_f90_to_env(env) -> None: def add_f90_to_env(env) -> None:
"""Add Builders and construction variables for f90 dialect.""" """Add Builders and construction variables for f90 dialect."""
try: F90Suffixes = env.get('F90FILESUFFIXES', ['.f90'])
F90Suffixes = env['F90FILESUFFIXES'] F90PPSuffixes = env.get('F90PPFILESUFFIXES', [])
except KeyError:
F90Suffixes = ['.f90']
try:
F90PPSuffixes = env['F90PPFILESUFFIXES']
except KeyError:
F90PPSuffixes = []
DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes, support_mods=True) DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes, support_mods=True)
def add_f95_to_env(env) -> None: def add_f95_to_env(env) -> None:
"""Add Builders and construction variables for f95 dialect.""" """Add Builders and construction variables for f95 dialect."""
try: F95Suffixes = env.get('F95FILESUFFIXES', ['.f95'])
F95Suffixes = env['F95FILESUFFIXES'] F95PPSuffixes = env.get('F95PPFILESUFFIXES', [])
except KeyError:
F95Suffixes = ['.f95']
try:
F95PPSuffixes = env['F95PPFILESUFFIXES']
except KeyError:
F95PPSuffixes = []
DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, support_mods=True) DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, support_mods=True)
def add_f03_to_env(env) -> None: def add_f03_to_env(env) -> None:
"""Add Builders and construction variables for f03 dialect.""" """Add Builders and construction variables for f03 dialect."""
try: F03Suffixes = env.get('F03FILESUFFIXES', ['.f03'])
F03Suffixes = env['F03FILESUFFIXES'] F03PPSuffixes = env.get('F03PPFILESUFFIXES', [])
except KeyError:
F03Suffixes = ['.f03']
try:
F03PPSuffixes = env['F03PPFILESUFFIXES']
except KeyError:
F03PPSuffixes = []
DialectAddToEnv(env, "F03", F03Suffixes, F03PPSuffixes, support_mods=True) DialectAddToEnv(env, "F03", F03Suffixes, F03PPSuffixes, support_mods=True)
def add_f08_to_env(env) -> None: def add_f08_to_env(env) -> None:
"""Add Builders and construction variables for f08 dialect.""" """Add Builders and construction variables for f08 dialect."""
try: F08Suffixes = env.get('F08FILESUFFIXES', ['.f08'])
F08Suffixes = env['F08FILESUFFIXES'] F08PPSuffixes = env.get('F08PPFILESUFFIXES', [])
except KeyError:
F08Suffixes = ['.f08']
try:
F08PPSuffixes = env['F08PPFILESUFFIXES']
except KeyError:
F08PPSuffixes = []
DialectAddToEnv(env, "F08", F08Suffixes, F08PPSuffixes, support_mods=True) DialectAddToEnv(env, "F08", F08Suffixes, F08PPSuffixes, support_mods=True)
def add_all_to_env(env) -> None: def add_all_to_env(env) -> None:

View file

@ -82,8 +82,8 @@ class _POTargetFactory:
default for all produced nodes. default for all produced nodes.
""" """
def __init__(self, env, nodefault=True, alias=None, precious=True def __init__(self, env, nodefault: bool=True, alias=None, precious: bool=True
, noclean=True): , noclean: bool=True) -> None:
""" Object constructor. """ Object constructor.
**Arguments** **Arguments**
@ -104,7 +104,7 @@ class _POTargetFactory:
self.noclean = noclean self.noclean = noclean
self.nodefault = nodefault self.nodefault = nodefault
def _create_node(self, name, factory, directory=None, create=1): def _create_node(self, name, factory, directory=None, create: int=1):
""" Create node, and set it up to factory settings. """ """ Create node, and set it up to factory settings. """
node = factory(name, directory, create) node = factory(name, directory, create)
node.set_noclean(self.noclean) node.set_noclean(self.noclean)
@ -115,11 +115,11 @@ class _POTargetFactory:
self.env.AlwaysBuild(self.env.Alias(self.alias, node)) self.env.AlwaysBuild(self.env.Alias(self.alias, node))
return node return node
def Entry(self, name, directory=None, create=1): def Entry(self, name, directory=None, create: int=1):
""" Create `SCons.Node.FS.Entry` """ """ Create `SCons.Node.FS.Entry` """
return self._create_node(name, self.env.fs.Entry, directory, create) return self._create_node(name, self.env.fs.Entry, directory, create)
def File(self, name, directory=None, create=1): def File(self, name, directory=None, create: int=1):
""" Create `SCons.Node.FS.File` """ """ Create `SCons.Node.FS.File` """
return self._create_node(name, self.env.fs.File, directory, create) return self._create_node(name, self.env.fs.File, directory, create)
@ -191,7 +191,7 @@ class _POFileBuilder(BuilderBase):
# and execute iterativelly (recursion) self._execute(None, source[i]). # and execute iterativelly (recursion) self._execute(None, source[i]).
# After that it calls emitter (which is quite too late). The emitter is # After that it calls emitter (which is quite too late). The emitter is
# also called in each iteration, what makes things yet worse. # also called in each iteration, what makes things yet worse.
def __init__(self, env, **kw): def __init__(self, env, **kw) -> None:
if 'suffix' not in kw: if 'suffix' not in kw:
kw['suffix'] = '$POSUFFIX' kw['suffix'] = '$POSUFFIX'
if 'src_suffix' not in kw: if 'src_suffix' not in kw:
@ -300,7 +300,7 @@ class RPaths:
# seems be enough for our purposes (don't need TARGET variable and # seems be enough for our purposes (don't need TARGET variable and
# SCons.Defaults.Variable_Caller stuff). # SCons.Defaults.Variable_Caller stuff).
def __init__(self, env): def __init__(self, env) -> None:
""" Initialize `RPaths` callable object. """ Initialize `RPaths` callable object.
**Arguments**: **Arguments**:

View file

@ -29,6 +29,8 @@ import glob
from pathlib import Path from pathlib import Path
from typing import List from typing import List
import SCons.Util
java_parsing = True java_parsing = True
default_java_version = '1.4' default_java_version = '1.4'
@ -100,7 +102,7 @@ if java_parsing:
"""The initial state for parsing a Java file for classes, """The initial state for parsing a Java file for classes,
interfaces, and anonymous inner classes.""" interfaces, and anonymous inner classes."""
def __init__(self, version=default_java_version): def __init__(self, version=default_java_version) -> None:
if version not in ( if version not in (
'1.1', '1.1',
'1.2', '1.2',
@ -121,6 +123,10 @@ if java_parsing:
'15.0', '15.0',
'16.0', '16.0',
'17.0', '17.0',
'18.0',
'19.0',
'20.0',
'21.0',
): ):
msg = "Java version %s not supported" % version msg = "Java version %s not supported" % version
raise NotImplementedError(msg) raise NotImplementedError(msg)
@ -136,7 +142,7 @@ if java_parsing:
self.anonStacksStack = [[0]] self.anonStacksStack = [[0]]
self.package = None self.package = None
def trace(self): def trace(self) -> None:
pass pass
def __getClassState(self): def __getClassState(self):
@ -175,10 +181,10 @@ if java_parsing:
def _getAnonStack(self): def _getAnonStack(self):
return self.anonStacksStack[-1] return self.anonStacksStack[-1]
def openBracket(self): def openBracket(self) -> None:
self.brackets = self.brackets + 1 self.brackets = self.brackets + 1
def closeBracket(self): def closeBracket(self) -> None:
self.brackets = self.brackets - 1 self.brackets = self.brackets - 1
if len(self.stackBrackets) and \ if len(self.stackBrackets) and \
self.brackets == self.stackBrackets[-1]: self.brackets == self.stackBrackets[-1]:
@ -223,7 +229,7 @@ if java_parsing:
return self.__getSkipState() return self.__getSkipState()
return self return self
def addAnonClass(self): def addAnonClass(self) -> None:
"""Add an anonymous inner class""" """Add an anonymous inner class"""
if self.version in ('1.1', '1.2', '1.3', '1.4'): if self.version in ('1.1', '1.2', '1.3', '1.4'):
clazz = self.listClasses[0] clazz = self.listClasses[0]
@ -245,6 +251,10 @@ if java_parsing:
'15.0', '15.0',
'16.0', '16.0',
'17.0', '17.0',
'18.0',
'19.0',
'20.0',
'21.0',
): ):
self.stackAnonClassBrackets.append(self.brackets) self.stackAnonClassBrackets.append(self.brackets)
className = [] className = []
@ -257,7 +267,7 @@ if java_parsing:
self.nextAnon = self.nextAnon + 1 self.nextAnon = self.nextAnon + 1
self._getAnonStack().append(0) self._getAnonStack().append(0)
def setPackage(self, package): def setPackage(self, package) -> None:
self.package = package self.package = package
@ -267,7 +277,7 @@ if java_parsing:
within the confines of a scope. within the confines of a scope.
""" """
def __init__(self, old_state): def __init__(self, old_state) -> None:
self.outer_state = old_state.outer_state self.outer_state = old_state.outer_state
self.old_state = old_state self.old_state = old_state
self.brackets = 0 self.brackets = 0
@ -296,10 +306,10 @@ if java_parsing:
self.skipState = ret self.skipState = ret
return ret return ret
def openBracket(self): def openBracket(self) -> None:
self.brackets = self.brackets + 1 self.brackets = self.brackets + 1
def closeBracket(self): def closeBracket(self) -> None:
self.brackets = self.brackets - 1 self.brackets = self.brackets - 1
def parseToken(self, token): def parseToken(self, token):
@ -332,7 +342,7 @@ if java_parsing:
class AnonClassState: class AnonClassState:
"""A state that looks for anonymous inner classes.""" """A state that looks for anonymous inner classes."""
def __init__(self, old_state): def __init__(self, old_state) -> None:
# outer_state is always an instance of OuterState # outer_state is always an instance of OuterState
self.outer_state = old_state.outer_state self.outer_state = old_state.outer_state
self.old_state = old_state self.old_state = old_state
@ -373,7 +383,7 @@ if java_parsing:
"""A state that will skip a specified number of tokens before """A state that will skip a specified number of tokens before
reverting to the previous state.""" reverting to the previous state."""
def __init__(self, tokens_to_skip, old_state): def __init__(self, tokens_to_skip, old_state) -> None:
self.tokens_to_skip = tokens_to_skip self.tokens_to_skip = tokens_to_skip
self.old_state = old_state self.old_state = old_state
@ -387,7 +397,7 @@ if java_parsing:
class ClassState: class ClassState:
"""A state we go into when we hit a class or interface keyword.""" """A state we go into when we hit a class or interface keyword."""
def __init__(self, outer_state): def __init__(self, outer_state) -> None:
# outer_state is always an instance of OuterState # outer_state is always an instance of OuterState
self.outer_state = outer_state self.outer_state = outer_state
@ -419,7 +429,7 @@ if java_parsing:
"""A state that will ignore all tokens until it gets to a """A state that will ignore all tokens until it gets to a
specified token.""" specified token."""
def __init__(self, ignore_until, old_state): def __init__(self, ignore_until, old_state) -> None:
self.ignore_until = ignore_until self.ignore_until = ignore_until
self.old_state = old_state self.old_state = old_state
@ -433,7 +443,7 @@ if java_parsing:
"""The state we enter when we encounter the package keyword. """The state we enter when we encounter the package keyword.
We assume the next token will be the package name.""" We assume the next token will be the package name."""
def __init__(self, outer_state): def __init__(self, outer_state) -> None:
# outer_state is always an instance of OuterState # outer_state is always an instance of OuterState
self.outer_state = outer_state self.outer_state = outer_state
@ -443,8 +453,8 @@ if java_parsing:
def parse_java_file(fn, version=default_java_version): def parse_java_file(fn, version=default_java_version):
with open(fn, 'r', encoding='utf-8') as f: with open(fn, "rb") as f:
data = f.read() data = SCons.Util.to_Text(f.read())
return parse_java(data, version) return parse_java(data, version)
@ -471,7 +481,7 @@ else:
# Java-file parsing takes too long (although it shouldn't relative # Java-file parsing takes too long (although it shouldn't relative
# to how long the Java compiler itself seems to take...). # to how long the Java compiler itself seems to take...).
def parse_java_file(fn): def parse_java_file(fn, version=default_java_version):
""" "Parse" a .java file. """ "Parse" a .java file.
This actually just splits the file name, so the assumption here This actually just splits the file name, so the assumption here

View file

@ -53,16 +53,16 @@ from ..common import (
_refs = [] _refs = []
def register_modulename(modname): def register_modulename(modname) -> None:
module = sys.modules[modname] module = sys.modules[modname]
_refs.append(module) _refs.append(module)
def register_class(ref): def register_class(ref) -> None:
_refs.append(ref) _refs.append(ref)
def reset(): def reset() -> None:
debug('') debug('')
for ref in _refs: for ref in _refs:
for method in ['reset', '_reset']: for method in ['reset', '_reset']:
@ -73,7 +73,7 @@ def reset():
func() func()
def verify(): def verify() -> None:
debug('') debug('')
for ref in _refs: for ref in _refs:
for method in ['verify', '_verify']: for method in ['verify', '_verify']:

View file

@ -46,7 +46,7 @@ Dispatcher.register_modulename(__name__)
# A null-terminated string that contains unexpanded references to environment variables. # A null-terminated string that contains unexpanded references to environment variables.
REG_EXPAND_SZ = 2 REG_EXPAND_SZ = 2
def read_value(hkey, subkey_valname, expand=True): def read_value(hkey, subkey_valname, expand: bool=True):
try: try:
rval_t = RegGetValue(hkey, subkey_valname) rval_t = RegGetValue(hkey, subkey_valname)
except OSError: except OSError:
@ -58,11 +58,11 @@ def read_value(hkey, subkey_valname, expand=True):
debug('hkey=%s, subkey=%s, rval=%s', repr(hkey), repr(subkey_valname), repr(rval)) debug('hkey=%s, subkey=%s, rval=%s', repr(hkey), repr(subkey_valname), repr(rval))
return rval return rval
def registry_query_path(key, val, suffix, expand=True): def registry_query_path(key, val, suffix, expand: bool=True):
extval = val + '\\' + suffix if suffix else val extval = val + '\\' + suffix if suffix else val
qpath = read_value(key, extval, expand=expand) qpath = read_value(key, extval, expand=expand)
if qpath and os.path.exists(qpath): if qpath and os.path.exists(qpath):
qpath = Util.process_path(qpath) qpath = Util.normalize_path(qpath)
else: else:
qpath = None qpath = None
return (qpath, key, val, extval) return (qpath, key, val, extval)
@ -74,20 +74,20 @@ REG_SOFTWARE_MICROSOFT = [
(HKEY_CURRENT_USER, r'Software\Microsoft'), (HKEY_CURRENT_USER, r'Software\Microsoft'),
] ]
def microsoft_query_paths(suffix, usrval=None, expand=True): def microsoft_query_paths(suffix, usrval=None, expand: bool=True):
paths = [] paths = []
records = [] records = []
for key, val in REG_SOFTWARE_MICROSOFT: for key, val in REG_SOFTWARE_MICROSOFT:
extval = val + '\\' + suffix if suffix else val extval = val + '\\' + suffix if suffix else val
qpath = read_value(key, extval, expand=expand) qpath = read_value(key, extval, expand=expand)
if qpath and os.path.exists(qpath): if qpath and os.path.exists(qpath):
qpath = Util.process_path(qpath) qpath = Util.normalize_path(qpath)
if qpath not in paths: if qpath not in paths:
paths.append(qpath) paths.append(qpath)
records.append((qpath, key, val, extval, usrval)) records.append((qpath, key, val, extval, usrval))
return records return records
def microsoft_query_keys(suffix, usrval=None, expand=True): def microsoft_query_keys(suffix, usrval=None, expand: bool=True):
records = [] records = []
for key, val in REG_SOFTWARE_MICROSOFT: for key, val in REG_SOFTWARE_MICROSOFT:
extval = val + '\\' + suffix if suffix else val extval = val + '\\' + suffix if suffix else val

View file

@ -73,7 +73,7 @@ def _verify_re_sdk_dispatch_map():
for sdk_version in Config.MSVC_SDK_VERSIONS: for sdk_version in Config.MSVC_SDK_VERSIONS:
if sdk_version in re_sdk_dispatch_map: if sdk_version in re_sdk_dispatch_map:
continue continue
err_msg = 'sdk version {} not in re_sdk_dispatch_map'.format(sdk_version) err_msg = f'sdk version {sdk_version} not in re_sdk_dispatch_map'
raise MSVCInternalError(err_msg) raise MSVCInternalError(err_msg)
return None return None
@ -107,12 +107,12 @@ _MSVC_FORCE_DEFAULT_TOOLSET = False
# Force default arguments # Force default arguments
_MSVC_FORCE_DEFAULT_ARGUMENTS = False _MSVC_FORCE_DEFAULT_ARGUMENTS = False
def _msvc_force_default_sdk(force=True): def _msvc_force_default_sdk(force: bool=True) -> None:
global _MSVC_FORCE_DEFAULT_SDK global _MSVC_FORCE_DEFAULT_SDK
_MSVC_FORCE_DEFAULT_SDK = force _MSVC_FORCE_DEFAULT_SDK = force
debug('_MSVC_FORCE_DEFAULT_SDK=%s', repr(force)) debug('_MSVC_FORCE_DEFAULT_SDK=%s', repr(force))
def _msvc_force_default_toolset(force=True): def _msvc_force_default_toolset(force: bool=True) -> None:
global _MSVC_FORCE_DEFAULT_TOOLSET global _MSVC_FORCE_DEFAULT_TOOLSET
_MSVC_FORCE_DEFAULT_TOOLSET = force _MSVC_FORCE_DEFAULT_TOOLSET = force
debug('_MSVC_FORCE_DEFAULT_TOOLSET=%s', repr(force)) debug('_MSVC_FORCE_DEFAULT_TOOLSET=%s', repr(force))
@ -227,7 +227,7 @@ def _msvc_script_argument_uwp(env, msvc, arglist):
return uwp_arg return uwp_arg
def _user_script_argument_uwp(env, uwp, user_argstr): def _user_script_argument_uwp(env, uwp, user_argstr) -> bool:
matches = [m for m in re_vcvars_uwp.finditer(user_argstr)] matches = [m for m in re_vcvars_uwp.finditer(user_argstr)]
if not matches: if not matches:
@ -235,7 +235,7 @@ def _user_script_argument_uwp(env, uwp, user_argstr):
if len(matches) > 1: if len(matches) > 1:
debug('multiple uwp declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr)) debug('multiple uwp declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr))
err_msg = "multiple uwp declarations: MSVC_SCRIPT_ARGS={}".format(repr(user_argstr)) err_msg = f"multiple uwp declarations: MSVC_SCRIPT_ARGS={user_argstr!r}"
raise MSVCArgumentError(err_msg) raise MSVCArgumentError(err_msg)
if not uwp: if not uwp:
@ -270,7 +270,7 @@ def _msvc_script_argument_sdk_constraints(msvc, sdk_version):
return None return None
debug('invalid: method exit: sdk_version=%s', repr(sdk_version)) debug('invalid: method exit: sdk_version=%s', repr(sdk_version))
err_msg = "MSVC_SDK_VERSION ({}) is not supported".format(repr(sdk_version)) err_msg = f"MSVC_SDK_VERSION ({sdk_version!r}) is not supported"
return err_msg return err_msg
def _msvc_script_argument_sdk_platform_constraints(msvc, toolset, sdk_version, platform_def): def _msvc_script_argument_sdk_platform_constraints(msvc, toolset, sdk_version, platform_def):
@ -331,7 +331,7 @@ def _msvc_script_argument_sdk(env, msvc, toolset, platform_def, arglist):
return sdk_version return sdk_version
def _msvc_script_default_sdk(env, msvc, platform_def, arglist, force_sdk=False): def _msvc_script_default_sdk(env, msvc, platform_def, arglist, force_sdk: bool=False):
if msvc.vs_def.vc_buildtools_def.vc_version_numeric < VS2015.vc_buildtools_def.vc_version_numeric: if msvc.vs_def.vc_buildtools_def.vc_version_numeric < VS2015.vc_buildtools_def.vc_version_numeric:
return None return None
@ -361,7 +361,7 @@ def _user_script_argument_sdk(env, sdk_version, user_argstr):
if len(matches) > 1: if len(matches) > 1:
debug('multiple sdk version declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr)) debug('multiple sdk version declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr))
err_msg = "multiple sdk version declarations: MSVC_SCRIPT_ARGS={}".format(repr(user_argstr)) err_msg = f"multiple sdk version declarations: MSVC_SCRIPT_ARGS={user_argstr!r}"
raise MSVCArgumentError(err_msg) raise MSVCArgumentError(err_msg)
if not sdk_version: if not sdk_version:
@ -390,7 +390,7 @@ def _msvc_have140_toolset():
return _toolset_have140_cache return _toolset_have140_cache
def _reset_have140_cache(): def _reset_have140_cache() -> None:
global _toolset_have140_cache global _toolset_have140_cache
debug('reset: cache') debug('reset: cache')
_toolset_have140_cache = None _toolset_have140_cache = None
@ -434,7 +434,7 @@ def _msvc_read_toolset_folders(msvc, vc_dir):
sxs_folder, sxs_version = _msvc_sxs_toolset_folder(msvc, sxs_folder) sxs_folder, sxs_version = _msvc_sxs_toolset_folder(msvc, sxs_folder)
if not sxs_version: if not sxs_version:
continue continue
filename = 'Microsoft.VCToolsVersion.{}.txt'.format(sxs_folder) filename = f'Microsoft.VCToolsVersion.{sxs_folder}.txt'
filepath = os.path.join(sxs_path, filename) filepath = os.path.join(sxs_path, filename)
debug('sxs toolset: check file=%s', repr(filepath)) debug('sxs toolset: check file=%s', repr(filepath))
if os.path.exists(filepath): if os.path.exists(filepath):
@ -496,7 +496,7 @@ def _msvc_read_toolset_default(msvc, vc_dir):
build_dir = os.path.join(vc_dir, "Auxiliary", "Build") build_dir = os.path.join(vc_dir, "Auxiliary", "Build")
# VS2019+ # VS2019+
filename = "Microsoft.VCToolsVersion.{}.default.txt".format(msvc.vs_def.vc_buildtools_def.vc_buildtools) filename = f"Microsoft.VCToolsVersion.{msvc.vs_def.vc_buildtools_def.vc_buildtools}.default.txt"
filepath = os.path.join(build_dir, filename) filepath = os.path.join(build_dir, filename)
debug('default toolset: check file=%s', repr(filepath)) debug('default toolset: check file=%s', repr(filepath))
@ -520,7 +520,7 @@ def _msvc_read_toolset_default(msvc, vc_dir):
_toolset_version_cache = {} _toolset_version_cache = {}
_toolset_default_cache = {} _toolset_default_cache = {}
def _reset_toolset_cache(): def _reset_toolset_cache() -> None:
global _toolset_version_cache global _toolset_version_cache
global _toolset_default_cache global _toolset_default_cache
debug('reset: toolset cache') debug('reset: toolset cache')
@ -639,7 +639,7 @@ def _msvc_script_argument_toolset_constraints(msvc, toolset_version):
return None return None
debug('invalid: method exit: toolset_version=%s', repr(toolset_version)) debug('invalid: method exit: toolset_version=%s', repr(toolset_version))
err_msg = "MSVC_TOOLSET_VERSION ({}) format is not supported".format(repr(toolset_version)) err_msg = f"MSVC_TOOLSET_VERSION ({toolset_version!r}) format is not supported"
return err_msg return err_msg
def _msvc_script_argument_toolset_vcvars(msvc, toolset_version, vc_dir): def _msvc_script_argument_toolset_vcvars(msvc, toolset_version, vc_dir):
@ -681,12 +681,12 @@ def _msvc_script_argument_toolset(env, msvc, vc_dir, arglist):
toolset_vcvars = _msvc_script_argument_toolset_vcvars(msvc, toolset_version, vc_dir) toolset_vcvars = _msvc_script_argument_toolset_vcvars(msvc, toolset_version, vc_dir)
# toolset may not be installed for host/target # toolset may not be installed for host/target
argpair = (SortOrder.TOOLSET, '-vcvars_ver={}'.format(toolset_vcvars)) argpair = (SortOrder.TOOLSET, f'-vcvars_ver={toolset_vcvars}')
arglist.append(argpair) arglist.append(argpair)
return toolset_vcvars return toolset_vcvars
def _msvc_script_default_toolset(env, msvc, vc_dir, arglist, force_toolset=False): def _msvc_script_default_toolset(env, msvc, vc_dir, arglist, force_toolset: bool=False):
if msvc.vs_def.vc_buildtools_def.vc_version_numeric < VS2017.vc_buildtools_def.vc_version_numeric: if msvc.vs_def.vc_buildtools_def.vc_version_numeric < VS2017.vc_buildtools_def.vc_version_numeric:
return None return None
@ -698,7 +698,7 @@ def _msvc_script_default_toolset(env, msvc, vc_dir, arglist, force_toolset=False
debug('MSVC_VERSION=%s, toolset_default=%s', repr(msvc.version), repr(toolset_default)) debug('MSVC_VERSION=%s, toolset_default=%s', repr(msvc.version), repr(toolset_default))
if force_toolset: if force_toolset:
argpair = (SortOrder.TOOLSET, '-vcvars_ver={}'.format(toolset_default)) argpair = (SortOrder.TOOLSET, f'-vcvars_ver={toolset_default}')
arglist.append(argpair) arglist.append(argpair)
return toolset_default return toolset_default
@ -711,7 +711,7 @@ def _user_script_argument_toolset(env, toolset_version, user_argstr):
if len(matches) > 1: if len(matches) > 1:
debug('multiple toolset version declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr)) debug('multiple toolset version declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr))
err_msg = "multiple toolset version declarations: MSVC_SCRIPT_ARGS={}".format(repr(user_argstr)) err_msg = f"multiple toolset version declarations: MSVC_SCRIPT_ARGS={user_argstr!r}"
raise MSVCArgumentError(err_msg) raise MSVCArgumentError(err_msg)
if not toolset_version: if not toolset_version:
@ -799,7 +799,7 @@ def _msvc_script_argument_spectre(env, msvc, vc_dir, toolset, platform_def, argl
spectre_arg = 'spectre' spectre_arg = 'spectre'
# spectre libs may not be installed for host/target # spectre libs may not be installed for host/target
argpair = (SortOrder.SPECTRE, '-vcvars_spectre_libs={}'.format(spectre_arg)) argpair = (SortOrder.SPECTRE, f'-vcvars_spectre_libs={spectre_arg}')
arglist.append(argpair) arglist.append(argpair)
return spectre_arg return spectre_arg
@ -812,7 +812,7 @@ def _user_script_argument_spectre(env, spectre, user_argstr):
if len(matches) > 1: if len(matches) > 1:
debug('multiple spectre declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr)) debug('multiple spectre declarations: MSVC_SCRIPT_ARGS=%s', repr(user_argstr))
err_msg = "multiple spectre declarations: MSVC_SCRIPT_ARGS={}".format(repr(user_argstr)) err_msg = f"multiple spectre declarations: MSVC_SCRIPT_ARGS={user_argstr!r}"
raise MSVCArgumentError(err_msg) raise MSVCArgumentError(err_msg)
if not spectre: if not spectre:
@ -853,7 +853,7 @@ def _msvc_script_argument_user(env, msvc, arglist):
return script_args return script_args
def _msvc_process_construction_variables(env): def _msvc_process_construction_variables(env) -> bool:
for cache_variable in [ for cache_variable in [
_MSVC_FORCE_DEFAULT_TOOLSET, _MSVC_FORCE_DEFAULT_TOOLSET,
@ -982,7 +982,7 @@ def _msvc_toolset_internal(msvc_version, toolset_version, vc_dir):
return toolset_vcvars return toolset_vcvars
def _msvc_toolset_versions_internal(msvc_version, vc_dir, full=True, sxs=False): def _msvc_toolset_versions_internal(msvc_version, vc_dir, full: bool=True, sxs: bool=False):
msvc = _msvc_version(msvc_version) msvc = _msvc_version(msvc_version)
@ -1020,12 +1020,12 @@ def _msvc_toolset_versions_spectre_internal(msvc_version, vc_dir):
return spectre_toolset_versions return spectre_toolset_versions
def reset(): def reset() -> None:
debug('') debug('')
_reset_have140_cache() _reset_have140_cache()
_reset_toolset_cache() _reset_toolset_cache()
def verify(): def verify() -> None:
debug('') debug('')
_verify_re_sdk_dispatch_map() _verify_re_sdk_dispatch_map()

View file

@ -54,7 +54,7 @@ class _Data:
need_init = True need_init = True
@classmethod @classmethod
def reset(cls): def reset(cls) -> None:
debug('msvc default:init') debug('msvc default:init')
cls.n_setup = 0 # number of calls to msvc_setup_env_once cls.n_setup = 0 # number of calls to msvc_setup_env_once
cls.default_ismsvc = False # is msvc the default compiler cls.default_ismsvc = False # is msvc the default compiler
@ -65,7 +65,7 @@ class _Data:
cls.msvc_nodefault = False # is there a default version of msvc cls.msvc_nodefault = False # is there a default version of msvc
cls.need_init = True # reset initialization indicator cls.need_init = True # reset initialization indicator
def _initialize(env, msvc_exists_func): def _initialize(env, msvc_exists_func) -> None:
if _Data.need_init: if _Data.need_init:
_Data.reset() _Data.reset()
_Data.need_init = False _Data.need_init = False
@ -88,7 +88,7 @@ def register_tool(env, tool, msvc_exists_func):
_Data.msvc_tools.add(tool) _Data.msvc_tools.add(tool)
debug('msvc default:tool=%s, msvc_tools=%s', tool, _Data.msvc_tools) debug('msvc default:tool=%s, msvc_tools=%s', tool, _Data.msvc_tools)
def register_setup(env, msvc_exists_func): def register_setup(env, msvc_exists_func) -> None:
if _Data.need_init: if _Data.need_init:
_initialize(env, msvc_exists_func) _initialize(env, msvc_exists_func)
_Data.n_setup += 1 _Data.n_setup += 1
@ -106,7 +106,7 @@ def register_setup(env, msvc_exists_func):
_Data.n_setup, _Data.msvc_installed, _Data.default_ismsvc _Data.n_setup, _Data.msvc_installed, _Data.default_ismsvc
) )
def set_nodefault(): def set_nodefault() -> None:
# default msvc version, msvc not installed # default msvc version, msvc not installed
_Data.msvc_nodefault = True _Data.msvc_nodefault = True
debug('msvc default:msvc_nodefault=%s', _Data.msvc_nodefault) debug('msvc default:msvc_nodefault=%s', _Data.msvc_nodefault)
@ -188,6 +188,9 @@ def register_iserror(env, tool, msvc_exists_func):
debug('msvc default:check tools:nchar=%d, tools=%s', tools_nchar, tools) debug('msvc default:check tools:nchar=%d, tools=%s', tools_nchar, tools)
# iteratively remove default tool sequences (longest to shortest) # iteratively remove default tool sequences (longest to shortest)
if not _Data.default_tools_re_list:
debug('default_tools_re_list=%s', _Data.default_tools_re_list)
else:
re_nchar_min, re_tools_min = _Data.default_tools_re_list[-1] re_nchar_min, re_tools_min = _Data.default_tools_re_list[-1]
if tools_nchar >= re_nchar_min and re_tools_min.search(tools): if tools_nchar >= re_nchar_min and re_tools_min.search(tools):
# minimum characters satisfied and minimum pattern exists # minimum characters satisfied and minimum pattern exists
@ -227,7 +230,7 @@ def register_iserror(env, tool, msvc_exists_func):
# return tool list in order presented # return tool list in order presented
return tools_found_list return tools_found_list
def reset(): def reset() -> None:
debug('') debug('')
_Data.reset() _Data.reset()

View file

@ -26,16 +26,25 @@ Helper functions for Microsoft Visual C/C++.
""" """
import os import os
import pathlib
import re import re
from collections import ( from collections import (
namedtuple, namedtuple,
) )
from ..common import debug
from . import Config from . import Config
# path utilities # path utilities
# windows drive specification (e.g., 'C:')
_RE_DRIVESPEC = re.compile(r'^[A-Za-z][:]$', re.IGNORECASE)
# windows path separators
_OS_PATH_SEPS = (os.path.sep, os.path.altsep) if os.path.altsep else (os.path.sep,)
def listdir_dirs(p): def listdir_dirs(p):
""" """
Return a list of tuples for each subdirectory of the given directory path. Return a list of tuples for each subdirectory of the given directory path.
@ -57,22 +66,92 @@ def listdir_dirs(p):
dirs.append((dir_name, dir_path)) dirs.append((dir_name, dir_path))
return dirs return dirs
def process_path(p): def resolve_path(p, ignore_drivespec=True):
""" """
Normalize a system path Make path absolute resolving any symlinks
Args: Args:
p: str p: str
system path system path
ignore_drivespec: bool
ignore drive specifications when True
Returns: Returns:
str: normalized system path str: absolute path with symlinks resolved
""" """
if p: if p:
if ignore_drivespec and _RE_DRIVESPEC.match(p):
# don't attempt to resolve drive specification (e.g., C:)
pass
else:
# both abspath and resolve necessary for an unqualified file name
# on a mapped network drive in order to return a mapped drive letter
# path rather than a UNC path.
p = os.path.abspath(p)
try:
p = str(pathlib.Path(p).resolve())
except OSError as e:
debug(
'caught exception: path=%s, exception=%s(%s)',
repr(p), type(e).__name__, repr(str(e))
)
return p
def normalize_path(
p,
strip=True,
preserve_trailing=False,
expand=False,
realpath=True,
ignore_drivespec=True,
):
"""
Normalize path
Args:
p: str
system path
strip: bool
remove leading and trailing whitespace when True
preserve_trailing: bool
preserve trailing path separator when True
expand: bool
apply expanduser and expandvars when True
realpath: bool
make the path absolute resolving any symlinks when True
ignore_drivespec: bool
ignore drive specifications for realpath when True
Returns:
str: normalized path
"""
if p and strip:
p = p.strip()
if p:
trailing = bool(preserve_trailing and p.endswith(_OS_PATH_SEPS))
if expand:
p = os.path.expanduser(p)
p = os.path.expandvars(p)
p = os.path.normpath(p) p = os.path.normpath(p)
p = os.path.realpath(p)
if realpath:
p = resolve_path(p, ignore_drivespec=ignore_drivespec)
p = os.path.normcase(p) p = os.path.normcase(p)
if trailing:
p += os.path.sep
return p return p
# msvc version and msvc toolset version regexes # msvc version and msvc toolset version regexes
@ -157,21 +236,21 @@ def get_msvc_version_prefix(version):
# toolset version query utilities # toolset version query utilities
def is_toolset_full(toolset_version): def is_toolset_full(toolset_version) -> bool:
rval = False rval = False
if toolset_version: if toolset_version:
if re_toolset_full.match(toolset_version): if re_toolset_full.match(toolset_version):
rval = True rval = True
return rval return rval
def is_toolset_140(toolset_version): def is_toolset_140(toolset_version) -> bool:
rval = False rval = False
if toolset_version: if toolset_version:
if re_toolset_140.match(toolset_version): if re_toolset_140.match(toolset_version):
rval = True rval = True
return rval return rval
def is_toolset_sxs(toolset_version): def is_toolset_sxs(toolset_version) -> bool:
rval = False rval = False
if toolset_version: if toolset_version:
if re_toolset_sxs.match(toolset_version): if re_toolset_sxs.match(toolset_version):
@ -228,7 +307,7 @@ def msvc_version_components(vcver):
msvc_vernum = float(msvc_verstr) msvc_vernum = float(msvc_verstr)
msvc_comps = tuple(msvc_verstr.split('.')) msvc_comps = tuple(msvc_verstr.split('.'))
msvc_major, msvc_minor = [int(x) for x in msvc_comps] msvc_major, msvc_minor = (int(x) for x in msvc_comps)
msvc_version_components_def = _MSVC_VERSION_COMPONENTS_DEFINITION( msvc_version_components_def = _MSVC_VERSION_COMPONENTS_DEFINITION(
msvc_version = msvc_version, msvc_version = msvc_version,
@ -291,7 +370,7 @@ def msvc_extended_version_components(version):
msvc_vernum = float(msvc_verstr) msvc_vernum = float(msvc_verstr)
msvc_comps = tuple(msvc_verstr.split('.')) msvc_comps = tuple(msvc_verstr.split('.'))
msvc_major, msvc_minor = [int(x) for x in msvc_comps] msvc_major, msvc_minor = (int(x) for x in msvc_comps)
msvc_extended_version_components_def = _MSVC_EXTENDED_VERSION_COMPONENTS_DEFINITION( msvc_extended_version_components_def = _MSVC_EXTENDED_VERSION_COMPONENTS_DEFINITION(
msvc_version = msvc_version, msvc_version = msvc_version,
@ -351,7 +430,7 @@ def msvc_sdk_version_components(version):
sdk_verstr = '.'.join(sdk_comps[:2]) sdk_verstr = '.'.join(sdk_comps[:2])
sdk_vernum = float(sdk_verstr) sdk_vernum = float(sdk_verstr)
sdk_major, sdk_minor = [int(x) for x in sdk_comps[:2]] sdk_major, sdk_minor = (int(x) for x in sdk_comps[:2])
msvc_sdk_version_components_def = _MSVC_SDK_VERSION_COMPONENTS_DEFINITION( msvc_sdk_version_components_def = _MSVC_SDK_VERSION_COMPONENTS_DEFINITION(
sdk_version = sdk_version, sdk_version = sdk_version,

View file

@ -83,7 +83,7 @@ def _sdk_10_layout(version):
if not version_nbr.startswith(folder_prefix): if not version_nbr.startswith(folder_prefix):
continue continue
sdk_inc_path = Util.process_path(os.path.join(version_nbr_path, 'um')) sdk_inc_path = Util.normalize_path(os.path.join(version_nbr_path, 'um'))
if not os.path.exists(sdk_inc_path): if not os.path.exists(sdk_inc_path):
continue continue
@ -127,7 +127,7 @@ def _sdk_81_layout(version):
# msvc does not check for existence of root or other files # msvc does not check for existence of root or other files
sdk_inc_path = Util.process_path(os.path.join(sdk_root, r'include\um')) sdk_inc_path = Util.normalize_path(os.path.join(sdk_root, r'include\um'))
if not os.path.exists(sdk_inc_path): if not os.path.exists(sdk_inc_path):
continue continue
@ -154,7 +154,7 @@ def _sdk_81_layout(version):
_sdk_map_cache = {} _sdk_map_cache = {}
_sdk_cache = {} _sdk_cache = {}
def _reset_sdk_cache(): def _reset_sdk_cache() -> None:
global _sdk_map_cache global _sdk_map_cache
global _sdk_cache global _sdk_cache
debug('') debug('')
@ -194,7 +194,7 @@ def _verify_sdk_dispatch_map():
for sdk_version in Config.MSVC_SDK_VERSIONS: for sdk_version in Config.MSVC_SDK_VERSIONS:
if sdk_version in _sdk_dispatch_map: if sdk_version in _sdk_dispatch_map:
continue continue
err_msg = 'sdk version {} not in sdk_dispatch_map'.format(sdk_version) err_msg = f'sdk version {sdk_version} not in sdk_dispatch_map'
raise MSVCInternalError(err_msg) raise MSVCInternalError(err_msg)
return None return None
@ -220,7 +220,7 @@ def _sdk_map(version_list):
_sdk_cache[key] = sdk_map _sdk_cache[key] = sdk_map
return sdk_map return sdk_map
def get_msvc_platform(is_uwp=False): def get_msvc_platform(is_uwp: bool=False):
platform_def = _UWP if is_uwp else _DESKTOP platform_def = _UWP if is_uwp else _DESKTOP
return platform_def return platform_def
@ -230,7 +230,7 @@ def get_sdk_version_list(vs_def, platform_def):
sdk_list = sdk_map.get(platform_def.vc_platform, []) sdk_list = sdk_map.get(platform_def.vc_platform, [])
return sdk_list return sdk_list
def get_msvc_sdk_version_list(msvc_version, msvc_uwp_app=False): def get_msvc_sdk_version_list(msvc_version, msvc_uwp_app: bool=False):
debug('msvc_version=%s, msvc_uwp_app=%s', repr(msvc_version), repr(msvc_uwp_app)) debug('msvc_version=%s, msvc_uwp_app=%s', repr(msvc_version), repr(msvc_uwp_app))
sdk_versions = [] sdk_versions = []
@ -254,11 +254,11 @@ def get_msvc_sdk_version_list(msvc_version, msvc_uwp_app=False):
return sdk_versions return sdk_versions
def reset(): def reset() -> None:
debug('') debug('')
_reset_sdk_cache() _reset_sdk_cache()
def verify(): def verify() -> None:
debug('') debug('')
_verify_sdk_dispatch_map() _verify_sdk_dispatch_map()

View file

@ -47,9 +47,9 @@ from . import ScriptArguments # noqa: F401
from . import Dispatcher as _Dispatcher from . import Dispatcher as _Dispatcher
def _reset(): def _reset() -> None:
_Dispatcher.reset() _Dispatcher.reset()
def _verify(): def _verify() -> None:
_Dispatcher.verify() _Dispatcher.verify()

View file

@ -40,7 +40,7 @@ The following issues are known to exist:
* The code to suppress the "No versions of the MSVC compiler were found" warning for * The code to suppress the "No versions of the MSVC compiler were found" warning for
the default environment was moved from ``MSCommon/vc.py`` to ``MSCommon/MSVC/SetupEnvDefault.py``. the default environment was moved from ``MSCommon/vc.py`` to ``MSCommon/MSVC/SetupEnvDefault.py``.
There very few, if any, existing unit tests. Now that the code is isolated in its own There are very few, if any, existing unit tests. Now that the code is isolated in its own
module with a limited API, unit tests may be easier to implement. module with a limited API, unit tests may be easier to implement.
@ -59,6 +59,7 @@ This is a proxy for using the toolset version for selection until that functiona
Example usage: Example usage:
:: ::
for version in [ for version in [
'14.3', '14.3',
'14.2', '14.2',
@ -90,6 +91,7 @@ Example usage:
Example output fragment Example output fragment
:: ::
Build: _build003 {'MSVC_VERSION': '14.3', 'MSVC_TOOLSET_VERSION': '14.29.30133'} Build: _build003 {'MSVC_VERSION': '14.3', 'MSVC_TOOLSET_VERSION': '14.29.30133'}
Where: C:\Software\MSVS-2022-143-Com\VC\Tools\MSVC\14.29.30133\bin\HostX64\x64\cl.exe Where: C:\Software\MSVS-2022-143-Com\VC\Tools\MSVC\14.29.30133\bin\HostX64\x64\cl.exe
Where: C:\Software\MSVS-2022-143-Com\Common7\Tools\guidgen.exe Where: C:\Software\MSVS-2022-143-Com\Common7\Tools\guidgen.exe
@ -138,6 +140,7 @@ for build failures. Refer to the documentation for details.
Change the default policy: Change the default policy:
:: ::
from SCons.Tool.MSCommon import msvc_set_scripterror_policy from SCons.Tool.MSCommon import msvc_set_scripterror_policy
msvc_set_scripterror_policy('Warning') msvc_set_scripterror_policy('Warning')
@ -169,6 +172,7 @@ detection of installed msvc instances.
Windows command-line sample invocations: Windows command-line sample invocations:
:: ::
@rem 64-Bit Windows @rem 64-Bit Windows
"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -all -sort -prerelease -products * -legacy -format json >MYVSWHEREOUTPUT.json "%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -all -sort -prerelease -products * -legacy -format json >MYVSWHEREOUTPUT.json
@ -241,6 +245,7 @@ toolset specification should be omitted entirely.
Local installation and summary test results: Local installation and summary test results:
:: ::
VS2022\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt VS2022\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt
14.31.31103 14.31.31103
@ -249,6 +254,7 @@ Local installation and summary test results:
Toolset version summary: Toolset version summary:
:: ::
14.31.31103 Environment() 14.31.31103 Environment()
14.31.31103 Environment(MSVC_TOOLSET_VERSION=None) 14.31.31103 Environment(MSVC_TOOLSET_VERSION=None)
@ -263,6 +269,7 @@ Toolset version summary:
VS2022\\Common7\\Tools\\vsdevcmd\\ext\\vcvars.bat usage fragment: VS2022\\Common7\\Tools\\vsdevcmd\\ext\\vcvars.bat usage fragment:
:: ::
@echo -vcvars_ver=version : Version of VC++ Toolset to select @echo -vcvars_ver=version : Version of VC++ Toolset to select
@echo ** [Default] : If -vcvars_ver=version is NOT specified, the toolset specified by @echo ** [Default] : If -vcvars_ver=version is NOT specified, the toolset specified by
@echo [VSInstallDir]\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt will be used. @echo [VSInstallDir]\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt will be used.
@ -283,6 +290,7 @@ VS2022\\Common7\\Tools\\vsdevcmd\\ext\\vcvars.bat usage fragment:
VS2022 batch file fragment to determine the default toolset version: VS2022 batch file fragment to determine the default toolset version:
:: ::
@REM Add MSVC @REM Add MSVC
set "__VCVARS_DEFAULT_CONFIG_FILE=%VCINSTALLDIR%Auxiliary\Build\Microsoft.VCToolsVersion.default.txt" set "__VCVARS_DEFAULT_CONFIG_FILE=%VCINSTALLDIR%Auxiliary\Build\Microsoft.VCToolsVersion.default.txt"
@ -349,33 +357,33 @@ v60 6.0 12.0 60
Product Versions Product Versions
---------------- ----------------
======== ===== ========= ============ ======== ===== ========= ======================
Product VSVER SDK BuildTools Product VSVER SDK BuildTools
======== ===== ========= ============ ======== ===== ========= ======================
2022 17.0 10.0, 8.1 v143 .. v140 2022 17.0 10.0, 8.1 v143, v142, v141, v140
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2019 16.0 10.0, 8.1 v142 .. v140 2019 16.0 10.0, 8.1 v142, v141, v140
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2017 15.0 10.0, 8.1 v141 .. v140 2017 15.0 10.0, 8.1 v141, v140
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2015 14.0 10.0, 8.1 v140 2015 14.0 10.0, 8.1 v140
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2013 12.0 v120 2013 12.0 v120
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2012 11.0 v110 2012 11.0 v110
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2010 10.0 v100 2010 10.0 v100
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2008 9.0 v90 2008 9.0 v90
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2005 8.0 v80 2005 8.0 v80
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2003.NET 7.1 v71 2003.NET 7.1 v71
-------- ----- --------- ------------ -------- ----- --------- ----------------------
2002.NET 7.0 v70 2002.NET 7.0 v70
-------- ----- --------- ------------ -------- ----- --------- ----------------------
6.0 6.0 v60 6.0 6.0 v60
======== ===== ========= ============ ======== ===== ========= ======================
SCons Implementation Notes SCons Implementation Notes

View file

@ -30,7 +30,7 @@ class ArchDefinition:
""" """
A class for defining architecture-specific settings and logic. A class for defining architecture-specific settings and logic.
""" """
def __init__(self, arch, synonyms=[]): def __init__(self, arch, synonyms=[]) -> None:
self.arch = arch self.arch = arch
self.synonyms = synonyms self.synonyms = synonyms

View file

@ -29,9 +29,9 @@ import copy
import json import json
import os import os
import re import re
import subprocess
import sys import sys
from contextlib import suppress from contextlib import suppress
from subprocess import DEVNULL, PIPE
from pathlib import Path from pathlib import Path
import SCons.Util import SCons.Util
@ -46,6 +46,9 @@ LOGFILE = os.environ.get('SCONS_MSCOMMON_DEBUG')
if LOGFILE: if LOGFILE:
import logging import logging
class _Debug_Filter(logging.Filter):
# custom filter for module relative filename
modulelist = ( modulelist = (
# root module and parent/root module # root module and parent/root module
'MSCommon', 'Tool', 'MSCommon', 'Tool',
@ -55,7 +58,7 @@ if LOGFILE:
'SCons', 'test', 'scons' 'SCons', 'test', 'scons'
) )
def get_relative_filename(filename, module_list): def get_relative_filename(self, filename, module_list):
if not filename: if not filename:
return filename return filename
for module in module_list: for module in module_list:
@ -66,17 +69,18 @@ if LOGFILE:
pass pass
return filename return filename
class _Debug_Filter(logging.Filter): def filter(self, record) -> bool:
# custom filter for module relative filename relfilename = self.get_relative_filename(record.pathname, self.modulelist)
def filter(self, record):
relfilename = get_relative_filename(record.pathname, modulelist)
relfilename = relfilename.replace('\\', '/') relfilename = relfilename.replace('\\', '/')
record.relfilename = relfilename record.relfilename = relfilename
return True return True
class _CustomFormatter(logging.Formatter):
# Log format looks like: # Log format looks like:
# 00109ms:MSCommon/vc.py:find_vc_pdir#447: VC found '14.3' [file] # 00109ms:MSCommon/vc.py:find_vc_pdir#447: VC found '14.3' [file]
# debug: 00109ms:MSCommon/vc.py:find_vc_pdir#447: VC found '14.3' [stdout] # debug: 00109ms:MSCommon/vc.py:find_vc_pdir#447: VC found '14.3' [stdout]
log_format=( log_format=(
'%(relativeCreated)05dms' '%(relativeCreated)05dms'
':%(relfilename)s' ':%(relfilename)s'
@ -84,25 +88,76 @@ if LOGFILE:
'#%(lineno)s' '#%(lineno)s'
': %(message)s' ': %(message)s'
) )
log_format_classname=(
'%(relativeCreated)05dms'
':%(relfilename)s'
':%(classname)s'
'.%(funcName)s'
'#%(lineno)s'
': %(message)s'
)
def __init__(self, log_prefix):
super().__init__()
if log_prefix:
self.log_format = log_prefix + self.log_format
self.log_format_classname = log_prefix + self.log_format_classname
log_record = logging.LogRecord(
'', # name (str)
0, # level (int)
'', # pathname (str)
0, # lineno (int)
None, # msg (Any)
{}, # args (tuple | dict[str, Any])
None # exc_info (tuple[type[BaseException], BaseException, types.TracebackType] | None)
)
self.default_attrs = set(log_record.__dict__.keys())
self.default_attrs.add('relfilename')
def format(self, record):
extras = set(record.__dict__.keys()) - self.default_attrs
if 'classname' in extras:
log_format = self.log_format_classname
else:
log_format = self.log_format
formatter = logging.Formatter(log_format)
return formatter.format(record)
if LOGFILE == '-': if LOGFILE == '-':
log_format = 'debug: ' + log_format log_prefix = 'debug: '
log_handler = logging.StreamHandler(sys.stdout) log_handler = logging.StreamHandler(sys.stdout)
else: else:
log_prefix = ''
log_handler = logging.FileHandler(filename=LOGFILE) log_handler = logging.FileHandler(filename=LOGFILE)
log_formatter = logging.Formatter(log_format) log_formatter = _CustomFormatter(log_prefix)
log_handler.setFormatter(log_formatter) log_handler.setFormatter(log_formatter)
logger = logging.getLogger(name=__name__) logger = logging.getLogger(name=__name__)
logger.setLevel(level=logging.DEBUG) logger.setLevel(level=logging.DEBUG)
logger.addHandler(log_handler) logger.addHandler(log_handler)
logger.addFilter(_Debug_Filter()) logger.addFilter(_Debug_Filter())
debug = logger.debug debug = logger.debug
def debug_extra(cls=None):
if cls:
extra = {'classname': cls.__qualname__}
else:
extra = None
return extra
DEBUG_ENABLED = True
else: else:
def debug(x, *args): def debug(x, *args, **kwargs):
return None return None
def debug_extra(*args, **kwargs):
return None
DEBUG_ENABLED = False
# SCONS_CACHE_MSVC_CONFIG is public, and is documented. # SCONS_CACHE_MSVC_CONFIG is public, and is documented.
CONFIG_CACHE = os.environ.get('SCONS_CACHE_MSVC_CONFIG') CONFIG_CACHE = os.environ.get('SCONS_CACHE_MSVC_CONFIG', '')
if CONFIG_CACHE in ('1', 'true', 'True'): if CONFIG_CACHE in ('1', 'true', 'True'):
CONFIG_CACHE = os.path.join(os.path.expanduser('~'), 'scons_msvc_cache.json') CONFIG_CACHE = os.path.join(os.path.expanduser('~'), 'scons_msvc_cache.json')
@ -112,56 +167,69 @@ if CONFIG_CACHE:
if os.environ.get('SCONS_CACHE_MSVC_FORCE_DEFAULTS') in ('1', 'true', 'True'): if os.environ.get('SCONS_CACHE_MSVC_FORCE_DEFAULTS') in ('1', 'true', 'True'):
CONFIG_CACHE_FORCE_DEFAULT_ARGUMENTS = True CONFIG_CACHE_FORCE_DEFAULT_ARGUMENTS = True
def read_script_env_cache(): def read_script_env_cache() -> dict:
""" fetch cached msvc env vars if requested, else return empty dict """ """ fetch cached msvc env vars if requested, else return empty dict """
envcache = {} envcache = {}
if CONFIG_CACHE:
try:
p = Path(CONFIG_CACHE) p = Path(CONFIG_CACHE)
with p.open('r') as f: if not CONFIG_CACHE or not p.is_file():
return envcache
with SCons.Util.FileLock(CONFIG_CACHE, timeout=5, writer=False), p.open('r') as f:
# Convert the list of cache entry dictionaries read from # Convert the list of cache entry dictionaries read from
# json to the cache dictionary. Reconstruct the cache key # json to the cache dictionary. Reconstruct the cache key
# tuple from the key list written to json. # tuple from the key list written to json.
# Note we need to take a write lock on the cachefile, as if there's
# an error and we try to remove it, that's "writing" on Windows.
try:
envcache_list = json.load(f) envcache_list = json.load(f)
except json.JSONDecodeError:
# If we couldn't decode it, it could be corrupt. Toss.
with suppress(FileNotFoundError):
p.unlink()
warn_msg = "Could not decode msvc cache file %s: dropping."
SCons.Warnings.warn(MSVCCacheInvalidWarning, warn_msg % CONFIG_CACHE)
debug(warn_msg, CONFIG_CACHE)
else:
if isinstance(envcache_list, list): if isinstance(envcache_list, list):
envcache = {tuple(d['key']): d['data'] for d in envcache_list} envcache = {tuple(d['key']): d['data'] for d in envcache_list}
else: else:
# don't fail if incompatible format, just proceed without it # don't fail if incompatible format, just proceed without it
warn_msg = "Incompatible format for msvc cache file {}: file may be overwritten.".format( warn_msg = "Incompatible format for msvc cache file %s: file may be overwritten."
repr(CONFIG_CACHE) SCons.Warnings.warn(MSVCCacheInvalidWarning, warn_msg % CONFIG_CACHE)
) debug(warn_msg, CONFIG_CACHE)
SCons.Warnings.warn(MSVCCacheInvalidWarning, warn_msg)
debug(warn_msg)
except FileNotFoundError:
# don't fail if no cache file, just proceed without it
pass
return envcache return envcache
def write_script_env_cache(cache): def write_script_env_cache(cache) -> None:
""" write out cache of msvc env vars if requested """ """ write out cache of msvc env vars if requested """
if CONFIG_CACHE: if not CONFIG_CACHE:
try: return
p = Path(CONFIG_CACHE) p = Path(CONFIG_CACHE)
with p.open('w') as f: try:
with SCons.Util.FileLock(CONFIG_CACHE, timeout=5, writer=True), p.open('w') as f:
# Convert the cache dictionary to a list of cache entry # Convert the cache dictionary to a list of cache entry
# dictionaries. The cache key is converted from a tuple to # dictionaries. The cache key is converted from a tuple to
# a list for compatibility with json. # a list for compatibility with json.
envcache_list = [{'key': list(key), 'data': data} for key, data in cache.items()] envcache_list = [
{'key': list(key), 'data': data} for key, data in cache.items()
]
json.dump(envcache_list, f, indent=2) json.dump(envcache_list, f, indent=2)
except TypeError: except TypeError:
# data can't serialize to json, don't leave partial file # data can't serialize to json, don't leave partial file
with suppress(FileNotFoundError): with suppress(FileNotFoundError):
p.unlink() p.unlink()
except IOError: except OSError:
# can't write the file, just skip # can't write the file, just skip
pass pass
return
_is_win64 = None _is_win64 = None
def is_win64(): def is_win64() -> bool:
"""Return true if running on windows 64 bits. """Return true if running on windows 64 bits.
Works whether python itself runs in 64 bits or 32 bits.""" Works whether python itself runs in 64 bits or 32 bits."""
@ -196,9 +264,8 @@ def read_reg(value, hkroot=SCons.Util.HKEY_LOCAL_MACHINE):
return SCons.Util.RegGetValue(hkroot, value)[0] return SCons.Util.RegGetValue(hkroot, value)[0]
def has_reg(value): def has_reg(value) -> bool:
"""Return True if the given key exists in HKEY_LOCAL_MACHINE, False """Return True if the given key exists in HKEY_LOCAL_MACHINE."""
otherwise."""
try: try:
SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value) SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value)
ret = True ret = True
@ -209,7 +276,18 @@ def has_reg(value):
# Functions for fetching environment variable settings from batch files. # Functions for fetching environment variable settings from batch files.
def normalize_env(env, keys, force=False): def _force_vscmd_skip_sendtelemetry(env):
if 'VSCMD_SKIP_SENDTELEMETRY' in env['ENV']:
return False
env['ENV']['VSCMD_SKIP_SENDTELEMETRY'] = '1'
debug("force env['ENV']['VSCMD_SKIP_SENDTELEMETRY']=%s", env['ENV']['VSCMD_SKIP_SENDTELEMETRY'])
return True
def normalize_env(env, keys, force: bool=False):
"""Given a dictionary representing a shell environment, add the variables """Given a dictionary representing a shell environment, add the variables
from os.environ needed for the processing of .bat files; the keys are from os.environ needed for the processing of .bat files; the keys are
controlled by the keys argument. controlled by the keys argument.
@ -257,7 +335,7 @@ def normalize_env(env, keys, force=False):
return normenv return normenv
def get_output(vcbat, args=None, env=None): def get_output(vcbat, args=None, env=None, skip_sendtelemetry=False):
"""Parse the output of given bat file, with given args.""" """Parse the output of given bat file, with given args."""
if env is None: if env is None:
@ -296,51 +374,36 @@ def get_output(vcbat, args=None, env=None):
] ]
env['ENV'] = normalize_env(env['ENV'], vs_vc_vars, force=False) env['ENV'] = normalize_env(env['ENV'], vs_vc_vars, force=False)
if skip_sendtelemetry:
_force_vscmd_skip_sendtelemetry(env)
if args: if args:
debug("Calling '%s %s'", vcbat, args) debug("Calling '%s %s'", vcbat, args)
popen = SCons.Action._subproc(env, cmd_str = '"%s" %s & set' % (vcbat, args)
'"%s" %s & set' % (vcbat, args),
stdin='devnull',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
else: else:
debug("Calling '%s'", vcbat) debug("Calling '%s'", vcbat)
popen = SCons.Action._subproc(env, cmd_str = '"%s" & set' % vcbat
'"%s" & set' % vcbat,
stdin='devnull',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Use the .stdout and .stderr attributes directly because the cp = SCons.Action.scons_subproc_run(
# .communicate() method uses the threading module on Windows env, cmd_str, stdin=DEVNULL, stdout=PIPE, stderr=PIPE,
# and won't work under Pythons not built with threading. )
with popen.stdout:
stdout = popen.stdout.read()
with popen.stderr:
stderr = popen.stderr.read()
# Extra debug logic, uncomment if necessary # Extra debug logic, uncomment if necessary
# debug('stdout:%s', stdout) # debug('stdout:%s', cp.stdout)
# debug('stderr:%s', stderr) # debug('stderr:%s', cp.stderr)
# Ongoing problems getting non-corrupted text led to this # Ongoing problems getting non-corrupted text led to this
# changing to "oem" from "mbcs" - the scripts run presumably # changing to "oem" from "mbcs" - the scripts run presumably
# attached to a console, so some particular rules apply. # attached to a console, so some particular rules apply.
# Unfortunately, "oem" not defined in Python 3.5, so get another way
if sys.version_info.major == 3 and sys.version_info.minor < 6:
from ctypes import windll
OEM = "cp{}".format(windll.kernel32.GetConsoleOutputCP())
else:
OEM = "oem" OEM = "oem"
if stderr: if cp.stderr:
# TODO: find something better to do with stderr; # TODO: find something better to do with stderr;
# this at least prevents errors from getting swallowed. # this at least prevents errors from getting swallowed.
sys.stderr.write(stderr.decode(OEM)) sys.stderr.write(cp.stderr.decode(OEM))
if popen.wait() != 0: if cp.returncode != 0:
raise IOError(stderr.decode(OEM)) raise OSError(cp.stderr.decode(OEM))
return stdout.decode(OEM) return cp.stdout.decode(OEM)
KEEPLIST = ( KEEPLIST = (
@ -367,9 +430,9 @@ def parse_output(output, keep=KEEPLIST):
# rdk will keep the regex to match the .bat file output line starts # rdk will keep the regex to match the .bat file output line starts
rdk = {} rdk = {}
for i in keep: for i in keep:
rdk[i] = re.compile('%s=(.*)' % i, re.I) rdk[i] = re.compile(r'%s=(.*)' % i, re.I)
def add_env(rmatch, key, dkeep=dkeep): def add_env(rmatch, key, dkeep=dkeep) -> None:
path_list = rmatch.group(1).split(os.pathsep) path_list = rmatch.group(1).split(os.pathsep)
for path in path_list: for path in path_list:
# Do not add empty paths (when a var ends with ;) # Do not add empty paths (when a var ends with ;)

View file

@ -45,10 +45,9 @@ from .common import debug, read_reg
# seem to be any sane registry key, so the precise location is hardcoded. # seem to be any sane registry key, so the precise location is hardcoded.
# #
# For versions below 2003R1, it seems the PSDK is included with Visual Studio? # For versions below 2003R1, it seems the PSDK is included with Visual Studio?
#
# Also, per the following:
# http://benjamin.smedbergs.us/blog/tag/atl/
# VC++ Professional comes with the SDK, VC++ Express does not. # VC++ Professional comes with the SDK, VC++ Express does not.
#
# Of course, all this changed again after Express was phased out (2005).
# Location of the SDK (checked for 6.1 only) # Location of the SDK (checked for 6.1 only)
_CURINSTALLED_SDK_HKEY_ROOT = \ _CURINSTALLED_SDK_HKEY_ROOT = \
@ -59,7 +58,7 @@ class SDKDefinition:
""" """
An abstract base class for trying to find installed SDK directories. An abstract base class for trying to find installed SDK directories.
""" """
def __init__(self, version, **kw): def __init__(self, version, **kw) -> None:
self.version = version self.version = version
self.__dict__.update(kw) self.__dict__.update(kw)
@ -130,7 +129,7 @@ class WindowsSDK(SDKDefinition):
A subclass for trying to find installed Windows SDK directories. A subclass for trying to find installed Windows SDK directories.
""" """
HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder' HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder'
def __init__(self, *args, **kw): def __init__(self, *args, **kw) -> None:
super().__init__(*args, **kw) super().__init__(*args, **kw)
self.hkey_data = self.version self.hkey_data = self.version
@ -139,7 +138,7 @@ class PlatformSDK(SDKDefinition):
A subclass for trying to find installed Platform SDK directories. A subclass for trying to find installed Platform SDK directories.
""" """
HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir' HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir'
def __init__(self, *args, **kw): def __init__(self, *args, **kw) -> None:
super().__init__(*args, **kw) super().__init__(*args, **kw)
self.hkey_data = self.uuid self.hkey_data = self.uuid
@ -306,7 +305,7 @@ def get_installed_sdks():
SDKEnvironmentUpdates = {} SDKEnvironmentUpdates = {}
def set_sdk_by_directory(env, sdk_dir): def set_sdk_by_directory(env, sdk_dir) -> None:
global SDKEnvironmentUpdates global SDKEnvironmentUpdates
debug('set_sdk_by_directory: Using dir:%s', sdk_dir) debug('set_sdk_by_directory: Using dir:%s', sdk_dir)
try: try:
@ -334,7 +333,7 @@ def set_sdk_by_directory(env, sdk_dir):
def get_sdk_by_version(mssdk): def get_sdk_by_version(mssdk):
if mssdk not in SupportedSDKMap: if mssdk not in SupportedSDKMap:
raise SCons.Errors.UserError("SDK version {} is not supported".format(repr(mssdk))) raise SCons.Errors.UserError(f"SDK version {mssdk!r} is not supported")
get_installed_sdks() get_installed_sdks()
return InstalledSDKMap.get(mssdk) return InstalledSDKMap.get(mssdk)

View file

@ -43,6 +43,7 @@ import SCons.compat
import subprocess import subprocess
import os import os
import platform import platform
import sysconfig
from pathlib import Path from pathlib import Path
from string import digits as string_digits from string import digits as string_digits
from subprocess import PIPE from subprocess import PIPE
@ -69,8 +70,7 @@ from .MSVC.Exceptions import (
MSVCToolsetVersionNotFound, MSVCToolsetVersionNotFound,
) )
class UnsupportedVersion(VisualCException): # external exceptions
pass
class MSVCUnsupportedHostArch(VisualCException): class MSVCUnsupportedHostArch(VisualCException):
pass pass
@ -78,21 +78,32 @@ class MSVCUnsupportedHostArch(VisualCException):
class MSVCUnsupportedTargetArch(VisualCException): class MSVCUnsupportedTargetArch(VisualCException):
pass pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
class MSVCScriptNotFound(MSVCUserError): class MSVCScriptNotFound(MSVCUserError):
pass pass
class MSVCUseSettingsError(MSVCUserError): class MSVCUseSettingsError(MSVCUserError):
pass pass
# internal exceptions
class UnsupportedVersion(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# undefined object for dict.get() in case key exists and value is None
UNDEFINED = object()
# powershell error sending telemetry for arm32 process on arm64 host (VS2019+):
# True: force VSCMD_SKIP_SENDTELEMETRY=1 (if necessary)
# False: do nothing
_ARM32_ON_ARM64_SKIP_SENDTELEMETRY = True
# MSVC 9.0 preferred query order:
# True: VCForPython, VisualStudio
# FAlse: VisualStudio, VCForPython
_VC90_Prefer_VCForPython = True
# Dict to 'canonalize' the arch # Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = { _ARCH_TO_CANONICAL = {
@ -282,7 +293,9 @@ def _host_target_config_factory(*, label, host_all_hosts, host_all_targets, host
# The cl path fragment under the toolset version folder is the second value of # The cl path fragment under the toolset version folder is the second value of
# the stored tuple. # the stored tuple.
_GE2017_HOST_TARGET_BATCHFILE_CLPATHCOMPS = { # 14.3 (VS2022) and later
_GE2022_HOST_TARGET_BATCHFILE_CLPATHCOMPS = {
('amd64', 'amd64') : ('vcvars64.bat', ('bin', 'Hostx64', 'x64')), ('amd64', 'amd64') : ('vcvars64.bat', ('bin', 'Hostx64', 'x64')),
('amd64', 'x86') : ('vcvarsamd64_x86.bat', ('bin', 'Hostx64', 'x86')), ('amd64', 'x86') : ('vcvarsamd64_x86.bat', ('bin', 'Hostx64', 'x86')),
@ -294,11 +307,66 @@ _GE2017_HOST_TARGET_BATCHFILE_CLPATHCOMPS = {
('x86', 'arm') : ('vcvarsx86_arm.bat', ('bin', 'Hostx86', 'arm')), ('x86', 'arm') : ('vcvarsx86_arm.bat', ('bin', 'Hostx86', 'arm')),
('x86', 'arm64') : ('vcvarsx86_arm64.bat', ('bin', 'Hostx86', 'arm64')), ('x86', 'arm64') : ('vcvarsx86_arm64.bat', ('bin', 'Hostx86', 'arm64')),
('arm64', 'amd64') : ('vcvarsarm64_amd64.bat', ('bin', 'Hostarm64', 'arm64_amd64')),
('arm64', 'x86') : ('vcvarsarm64_x86.bat', ('bin', 'Hostarm64', 'arm64_x86')),
('arm64', 'arm') : ('vcvarsarm64_arm.bat', ('bin', 'Hostarm64', 'arm64_arm')),
('arm64', 'arm64') : ('vcvarsarm64.bat', ('bin', 'Hostarm64', 'arm64')),
} }
_GE2017_HOST_TARGET_CFG = _host_target_config_factory( _GE2022_HOST_TARGET_CFG = _host_target_config_factory(
label = 'GE2017', label = 'GE2022',
host_all_hosts = OrderedDict([
('amd64', ['amd64', 'x86']),
('x86', ['x86']),
('arm64', ['arm64', 'amd64', 'x86']),
('arm', ['x86']),
]),
host_all_targets = {
'amd64': ['amd64', 'x86', 'arm64', 'arm'],
'x86': ['x86', 'amd64', 'arm', 'arm64'],
'arm64': ['arm64', 'amd64', 'arm', 'x86'],
'arm': [],
},
host_def_targets = {
'amd64': ['amd64', 'x86'],
'x86': ['x86'],
'arm64': ['arm64', 'amd64', 'arm', 'x86'],
'arm': ['arm'],
},
)
# debug("_GE2022_HOST_TARGET_CFG: %s", _GE2022_HOST_TARGET_CFG)
# 14.2 (VS2019) to 14.1 (VS2017)
_LE2019_HOST_TARGET_BATCHFILE_CLPATHCOMPS = {
('amd64', 'amd64') : ('vcvars64.bat', ('bin', 'Hostx64', 'x64')),
('amd64', 'x86') : ('vcvarsamd64_x86.bat', ('bin', 'Hostx64', 'x86')),
('amd64', 'arm') : ('vcvarsamd64_arm.bat', ('bin', 'Hostx64', 'arm')),
('amd64', 'arm64') : ('vcvarsamd64_arm64.bat', ('bin', 'Hostx64', 'arm64')),
('x86', 'amd64') : ('vcvarsx86_amd64.bat', ('bin', 'Hostx86', 'x64')),
('x86', 'x86') : ('vcvars32.bat', ('bin', 'Hostx86', 'x86')),
('x86', 'arm') : ('vcvarsx86_arm.bat', ('bin', 'Hostx86', 'arm')),
('x86', 'arm64') : ('vcvarsx86_arm64.bat', ('bin', 'Hostx86', 'arm64')),
('arm64', 'amd64') : ('vcvars64.bat', ('bin', 'Hostx64', 'x64')),
('arm64', 'x86') : ('vcvarsamd64_x86.bat', ('bin', 'Hostx64', 'x86')),
('arm64', 'arm') : ('vcvarsamd64_arm.bat', ('bin', 'Hostx64', 'arm')),
('arm64', 'arm64') : ('vcvarsamd64_arm64.bat', ('bin', 'Hostx64', 'arm64')),
}
_LE2019_HOST_TARGET_CFG = _host_target_config_factory(
label = 'LE2019',
host_all_hosts = OrderedDict([ host_all_hosts = OrderedDict([
('amd64', ['amd64', 'x86']), ('amd64', ['amd64', 'x86']),
@ -310,20 +378,20 @@ _GE2017_HOST_TARGET_CFG = _host_target_config_factory(
host_all_targets = { host_all_targets = {
'amd64': ['amd64', 'x86', 'arm64', 'arm'], 'amd64': ['amd64', 'x86', 'arm64', 'arm'],
'x86': ['x86', 'amd64', 'arm', 'arm64'], 'x86': ['x86', 'amd64', 'arm', 'arm64'],
'arm64': [], 'arm64': ['arm64', 'amd64', 'arm', 'x86'],
'arm': [], 'arm': [],
}, },
host_def_targets = { host_def_targets = {
'amd64': ['amd64', 'x86'], 'amd64': ['amd64', 'x86'],
'x86': ['x86'], 'x86': ['x86'],
'arm64': ['arm64', 'arm'], 'arm64': ['arm64', 'amd64', 'arm', 'x86'],
'arm': ['arm'], 'arm': ['arm'],
}, },
) )
# debug("_GE2017_HOST_TARGET_CFG: %s", _GE2017_HOST_TARGET_CFG) # debug("_LE2019_HOST_TARGET_CFG: %s", _LE2019_HOST_TARGET_CFG)
# 14.0 (VS2015) to 8.0 (VS2005) # 14.0 (VS2015) to 8.0 (VS2005)
@ -345,6 +413,10 @@ _LE2015_HOST_TARGET_BATCHARG_CLPATHCOMPS = {
('x86', 'arm') : ('x86_arm', ('bin', 'x86_arm')), ('x86', 'arm') : ('x86_arm', ('bin', 'x86_arm')),
('x86', 'ia64') : ('x86_ia64', ('bin', 'x86_ia64')), ('x86', 'ia64') : ('x86_ia64', ('bin', 'x86_ia64')),
('arm64', 'amd64') : ('amd64', ('bin', 'amd64')),
('arm64', 'x86') : ('amd64_x86', ('bin', 'amd64_x86')),
('arm64', 'arm') : ('amd64_arm', ('bin', 'amd64_arm')),
('arm', 'arm') : ('arm', ('bin', 'arm')), ('arm', 'arm') : ('arm', ('bin', 'arm')),
('ia64', 'ia64') : ('ia64', ('bin', 'ia64')), ('ia64', 'ia64') : ('ia64', ('bin', 'ia64')),
@ -357,6 +429,7 @@ _LE2015_HOST_TARGET_CFG = _host_target_config_factory(
host_all_hosts = OrderedDict([ host_all_hosts = OrderedDict([
('amd64', ['amd64', 'x86']), ('amd64', ['amd64', 'x86']),
('x86', ['x86']), ('x86', ['x86']),
('arm64', ['amd64', 'x86']),
('arm', ['arm']), ('arm', ['arm']),
('ia64', ['ia64']), ('ia64', ['ia64']),
]), ]),
@ -364,6 +437,7 @@ _LE2015_HOST_TARGET_CFG = _host_target_config_factory(
host_all_targets = { host_all_targets = {
'amd64': ['amd64', 'x86', 'arm'], 'amd64': ['amd64', 'x86', 'arm'],
'x86': ['x86', 'amd64', 'arm', 'ia64'], 'x86': ['x86', 'amd64', 'arm', 'ia64'],
'arm64': ['amd64', 'x86', 'arm'],
'arm': ['arm'], 'arm': ['arm'],
'ia64': ['ia64'], 'ia64': ['ia64'],
}, },
@ -371,6 +445,7 @@ _LE2015_HOST_TARGET_CFG = _host_target_config_factory(
host_def_targets = { host_def_targets = {
'amd64': ['amd64', 'x86'], 'amd64': ['amd64', 'x86'],
'x86': ['x86'], 'x86': ['x86'],
'arm64': ['amd64', 'arm', 'x86'],
'arm': ['arm'], 'arm': ['arm'],
'ia64': ['ia64'], 'ia64': ['ia64'],
}, },
@ -391,16 +466,19 @@ _LE2003_HOST_TARGET_CFG = _host_target_config_factory(
host_all_hosts = OrderedDict([ host_all_hosts = OrderedDict([
('amd64', ['x86']), ('amd64', ['x86']),
('x86', ['x86']), ('x86', ['x86']),
('arm64', ['x86']),
]), ]),
host_all_targets = { host_all_targets = {
'amd64': ['x86'], 'amd64': ['x86'],
'x86': ['x86'], 'x86': ['x86'],
'arm64': ['x86'],
}, },
host_def_targets = { host_def_targets = {
'amd64': ['x86'], 'amd64': ['x86'],
'x86': ['x86'], 'x86': ['x86'],
'arm64': ['x86'],
}, },
) )
@ -444,28 +522,54 @@ def get_host_platform(host_platform):
return host return host
_native_host_architecture = None
def get_native_host_architecture():
"""Return the native host architecture."""
global _native_host_architecture
if _native_host_architecture is None:
try:
arch = common.read_reg(
r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment\PROCESSOR_ARCHITECTURE'
)
except OSError:
arch = None
if not arch:
arch = platform.machine()
_native_host_architecture = arch
return _native_host_architecture
_native_host_platform = None _native_host_platform = None
def get_native_host_platform(): def get_native_host_platform():
global _native_host_platform global _native_host_platform
if _native_host_platform is None: if _native_host_platform is None:
arch = get_native_host_architecture()
_native_host_platform = get_host_platform(platform.machine()) _native_host_platform = get_host_platform(arch)
return _native_host_platform return _native_host_platform
def get_host_target(env, msvc_version, all_host_targets=False): def get_host_target(env, msvc_version, all_host_targets: bool=False):
vernum = float(get_msvc_version_numeric(msvc_version)) vernum = float(get_msvc_version_numeric(msvc_version))
vernum_int = int(vernum * 10)
if vernum > 14: if vernum_int >= 143:
# 14.1 (VS2017) and later # 14.3 (VS2022) and later
host_target_cfg = _GE2017_HOST_TARGET_CFG host_target_cfg = _GE2022_HOST_TARGET_CFG
elif 14 >= vernum >= 8: elif 143 > vernum_int >= 141:
# 14.2 (VS2019) to 14.1 (VS2017)
host_target_cfg = _LE2019_HOST_TARGET_CFG
elif 141 > vernum_int >= 80:
# 14.0 (VS2015) to 8.0 (VS2005) # 14.0 (VS2015) to 8.0 (VS2005)
host_target_cfg = _LE2015_HOST_TARGET_CFG host_target_cfg = _LE2015_HOST_TARGET_CFG
else: else: # 80 > vernum_int
# 7.1 (VS2003) and earlier # 7.1 (VS2003) and earlier
host_target_cfg = _LE2003_HOST_TARGET_CFG host_target_cfg = _LE2003_HOST_TARGET_CFG
@ -520,6 +624,53 @@ def get_host_target(env, msvc_version, all_host_targets=False):
return host_platform, target_platform, host_target_list return host_platform, target_platform, host_target_list
_arm32_process_arm64_host = None
def is_arm32_process_arm64_host():
global _arm32_process_arm64_host
if _arm32_process_arm64_host is None:
host = get_native_host_architecture()
host = _ARCH_TO_CANONICAL.get(host.lower(),'')
host_isarm64 = host == 'arm64'
process = sysconfig.get_platform()
process_isarm32 = process == 'win-arm32'
_arm32_process_arm64_host = host_isarm64 and process_isarm32
return _arm32_process_arm64_host
_check_skip_sendtelemetry = None
def _skip_sendtelemetry(env):
global _check_skip_sendtelemetry
if _check_skip_sendtelemetry is None:
if _ARM32_ON_ARM64_SKIP_SENDTELEMETRY and is_arm32_process_arm64_host():
_check_skip_sendtelemetry = True
else:
_check_skip_sendtelemetry = False
if not _check_skip_sendtelemetry:
return False
msvc_version = env.get('MSVC_VERSION') if env else None
if not msvc_version:
msvc_version = msvc_default_version(env)
if not msvc_version:
return False
vernum = float(get_msvc_version_numeric(msvc_version))
if vernum < 14.2: # VS2019
return False
# arm32 process, arm64 host, VS2019+
return True
# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the # If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the
# MSVC_VERSION documentation in Tool/msvc.xml. # MSVC_VERSION documentation in Tool/msvc.xml.
_VCVER = [ _VCVER = [
@ -567,6 +718,7 @@ _VCVER_TO_PRODUCT_DIR = {
'14.0': [ '14.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir')], (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir')],
'14.0Exp': [ '14.0Exp': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\WDExpress\14.0\Setup\VS\ProductDir'),
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\14.0\Setup\VC\ProductDir')], (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\14.0\Setup\VC\ProductDir')],
'12.0': [ '12.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'), (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'),
@ -589,6 +741,9 @@ _VCVER_TO_PRODUCT_DIR = {
'9.0': [ '9.0': [
(SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',), (SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',),
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',), (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',),
] if _VC90_Prefer_VCForPython else [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',),
(SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',),
], ],
'9.0Exp': [ '9.0Exp': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'), (SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'),
@ -716,9 +871,8 @@ def find_vc_pdir(env, msvc_version):
Raises: Raises:
UnsupportedVersion: if the version is not known by this file. UnsupportedVersion: if the version is not known by this file.
MissingConfiguration: found version but the directory is missing.
Both exceptions inherit from VisualCException. UnsupportedVersion inherits from VisualCException.
""" """
root = 'Software\\' root = 'Software\\'
@ -752,18 +906,22 @@ def find_vc_pdir(env, msvc_version):
except OSError: except OSError:
debug('no VC registry key %s', repr(key)) debug('no VC registry key %s', repr(key))
else: else:
if msvc_version == '9.0' and key.lower().endswith('\\vcforpython\\9.0\\installdir'): if msvc_version == '9.0':
if key.lower().endswith('\\vcforpython\\9.0\\installdir'):
# Visual C++ for Python registry key is installdir (root) not productdir (vc) # Visual C++ for Python registry key is installdir (root) not productdir (vc)
comps = os.path.join(comps, 'VC') comps = os.path.join(comps, 'VC')
elif msvc_version == '14.0Exp':
if key.lower().endswith('\\setup\\vs\\productdir'):
# Visual Studio 14.0 Express registry key is installdir (root) not productdir (vc)
comps = os.path.join(comps, 'VC')
debug('found VC in registry: %s', comps) debug('found VC in registry: %s', comps)
if os.path.exists(comps): if os.path.exists(comps):
return comps return comps
else: else:
debug('reg says dir is %s, but it does not exist. (ignoring)', comps) debug('reg says dir is %s, but it does not exist. (ignoring)', comps)
raise MissingConfiguration("registry dir {} not found on the filesystem".format(comps))
return None return None
def find_batch_file(env, msvc_version, host_arch, target_arch): def find_batch_file(msvc_version, host_arch, target_arch, pdir):
""" """
Find the location of the batch script which should set up the compiler Find the location of the batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
@ -772,58 +930,79 @@ def find_batch_file(env, msvc_version, host_arch, target_arch):
scripts named with a host_target pair that calls vcvarsall.bat properly, scripts named with a host_target pair that calls vcvarsall.bat properly,
so use that and return an empty argument. so use that and return an empty argument.
""" """
pdir = find_vc_pdir(env, msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('looking in %s', pdir)
# filter out e.g. "Exp" from the version name # filter out e.g. "Exp" from the version name
msvc_ver_numeric = get_msvc_version_numeric(msvc_version) vernum = float(get_msvc_version_numeric(msvc_version))
vernum = float(msvc_ver_numeric) vernum_int = int(vernum * 10)
sdk_pdir = pdir
arg = '' arg = ''
vcdir = None vcdir = None
clexe = None
if vernum > 14: if vernum_int >= 143:
# 14.1 (VS2017) and later # 14.3 (VS2022) and later
batfiledir = os.path.join(pdir, "Auxiliary", "Build") batfiledir = os.path.join(pdir, "Auxiliary", "Build")
batfile, _ = _GE2017_HOST_TARGET_BATCHFILE_CLPATHCOMPS[(host_arch, target_arch)] batfile, _ = _GE2022_HOST_TARGET_BATCHFILE_CLPATHCOMPS[(host_arch, target_arch)]
batfilename = os.path.join(batfiledir, batfile) batfilename = os.path.join(batfiledir, batfile)
vcdir = pdir vcdir = pdir
elif 14 >= vernum >= 8: elif 143 > vernum_int >= 141:
# 14.2 (VS2019) to 14.1 (VS2017)
batfiledir = os.path.join(pdir, "Auxiliary", "Build")
batfile, _ = _LE2019_HOST_TARGET_BATCHFILE_CLPATHCOMPS[(host_arch, target_arch)]
batfilename = os.path.join(batfiledir, batfile)
vcdir = pdir
elif 141 > vernum_int >= 80:
# 14.0 (VS2015) to 8.0 (VS2005) # 14.0 (VS2015) to 8.0 (VS2005)
arg, _ = _LE2015_HOST_TARGET_BATCHARG_CLPATHCOMPS[(host_arch, target_arch)] arg, cl_path_comps = _LE2015_HOST_TARGET_BATCHARG_CLPATHCOMPS[(host_arch, target_arch)]
batfilename = os.path.join(pdir, "vcvarsall.bat") batfilename = os.path.join(pdir, "vcvarsall.bat")
if msvc_version == '9.0' and not os.path.exists(batfilename): if msvc_version == '9.0' and not os.path.exists(batfilename):
# Visual C++ for Python batch file is in installdir (root) not productdir (vc) # Visual C++ for Python batch file is in installdir (root) not productdir (vc)
batfilename = os.path.normpath(os.path.join(pdir, os.pardir, "vcvarsall.bat")) batfilename = os.path.normpath(os.path.join(pdir, os.pardir, "vcvarsall.bat"))
else: # Visual C++ for Python sdk batch files do not point to the VCForPython installation
sdk_pdir = None
clexe = os.path.join(pdir, *cl_path_comps, _CL_EXE_NAME)
else: # 80 > vernum_int
# 7.1 (VS2003) and earlier # 7.1 (VS2003) and earlier
pdir = os.path.join(pdir, "Bin") pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat") batfilename = os.path.join(pdir, "vcvars32.bat")
clexe = os.path.join(pdir, _CL_EXE_NAME)
if not os.path.exists(batfilename): if not os.path.exists(batfilename):
debug("Not found: %s", batfilename) debug("batch file not found: %s", batfilename)
batfilename = None batfilename = None
if clexe and not os.path.exists(clexe):
debug("cl.exe not found: %s", clexe)
batfilename = None
return batfilename, arg, vcdir, sdk_pdir
def find_batch_file_sdk(host_arch, target_arch, sdk_pdir):
"""
Find the location of the sdk batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
"""
installed_sdks = get_installed_sdks() installed_sdks = get_installed_sdks()
for _sdk in installed_sdks: for _sdk in installed_sdks:
sdk_bat_file = _sdk.get_sdk_vc_script(host_arch, target_arch) sdk_bat_file = _sdk.get_sdk_vc_script(host_arch, target_arch)
if not sdk_bat_file: if not sdk_bat_file:
debug("batch file not found:%s", _sdk) debug("sdk batch file not found:%s", _sdk)
else: else:
sdk_bat_file_path = os.path.join(pdir, sdk_bat_file) sdk_bat_file_path = os.path.join(sdk_pdir, sdk_bat_file)
if os.path.exists(sdk_bat_file_path): if os.path.exists(sdk_bat_file_path):
debug('sdk_bat_file_path:%s', sdk_bat_file_path) debug('sdk_bat_file_path:%s', sdk_bat_file_path)
return batfilename, arg, vcdir, sdk_bat_file_path return sdk_bat_file_path
return batfilename, arg, vcdir, None return None
__INSTALLED_VCS_RUN = None __INSTALLED_VCS_RUN = None
_VC_TOOLS_VERSION_FILE_PATH = ['Auxiliary', 'Build', 'Microsoft.VCToolsVersion.default.txt'] _VC_TOOLS_VERSION_FILE_PATH = ['Auxiliary', 'Build', 'Microsoft.VCToolsVersion.default.txt']
_VC_TOOLS_VERSION_FILE = os.sep.join(_VC_TOOLS_VERSION_FILE_PATH) _VC_TOOLS_VERSION_FILE = os.sep.join(_VC_TOOLS_VERSION_FILE_PATH)
def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version): def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version) -> bool:
"""Return status of finding a cl.exe to use. """Return status of finding a cl.exe to use.
Locates cl in the vc_dir depending on TARGET_ARCH, HOST_ARCH and the Locates cl in the vc_dir depending on TARGET_ARCH, HOST_ARCH and the
@ -852,9 +1031,10 @@ def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
host_platform, target_platform, host_target_list = platforms host_platform, target_platform, host_target_list = platforms
vernum = float(get_msvc_version_numeric(msvc_version)) vernum = float(get_msvc_version_numeric(msvc_version))
vernum_int = int(vernum * 10)
# make sure the cl.exe exists meaning the tool is installed # make sure the cl.exe exists meaning the tool is installed
if vernum > 14: if vernum_int >= 141:
# 14.1 (VS2017) and later # 14.1 (VS2017) and later
# 2017 and newer allowed multiple versions of the VC toolset to be # 2017 and newer allowed multiple versions of the VC toolset to be
# installed at the same time. This changes the layout. # installed at the same time. This changes the layout.
@ -864,18 +1044,25 @@ def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
try: try:
with open(default_toolset_file) as f: with open(default_toolset_file) as f:
vc_specific_version = f.readlines()[0].strip() vc_specific_version = f.readlines()[0].strip()
except IOError: except OSError:
debug('failed to read %s', default_toolset_file) debug('failed to read %s', default_toolset_file)
return False return False
except IndexError: except IndexError:
debug('failed to find MSVC version in %s', default_toolset_file) debug('failed to find MSVC version in %s', default_toolset_file)
return False return False
if vernum_int >= 143:
# 14.3 (VS2022) and later
host_target_batchfile_clpathcomps = _GE2022_HOST_TARGET_BATCHFILE_CLPATHCOMPS
else:
# 14.2 (VS2019) to 14.1 (VS2017)
host_target_batchfile_clpathcomps = _LE2019_HOST_TARGET_BATCHFILE_CLPATHCOMPS
for host_platform, target_platform in host_target_list: for host_platform, target_platform in host_target_list:
debug('host platform %s, target platform %s for version %s', host_platform, target_platform, msvc_version) debug('host platform %s, target platform %s for version %s', host_platform, target_platform, msvc_version)
batchfile_clpathcomps = _GE2017_HOST_TARGET_BATCHFILE_CLPATHCOMPS.get((host_platform, target_platform), None) batchfile_clpathcomps = host_target_batchfile_clpathcomps.get((host_platform, target_platform), None)
if batchfile_clpathcomps is None: if batchfile_clpathcomps is None:
debug('unsupported host/target platform combo: (%s,%s)', host_platform, target_platform) debug('unsupported host/target platform combo: (%s,%s)', host_platform, target_platform)
continue continue
@ -888,7 +1075,7 @@ def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
debug('found %s!', _CL_EXE_NAME) debug('found %s!', _CL_EXE_NAME)
return True return True
elif 14 >= vernum >= 8: elif 141 > vernum_int >= 80:
# 14.0 (VS2015) to 8.0 (VS2005) # 14.0 (VS2015) to 8.0 (VS2005)
for host_platform, target_platform in host_target_list: for host_platform, target_platform in host_target_list:
@ -908,7 +1095,7 @@ def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
debug('found %s', _CL_EXE_NAME) debug('found %s', _CL_EXE_NAME)
return True return True
elif 8 > vernum >= 6: elif 80 > vernum_int >= 60:
# 7.1 (VS2003) to 6.0 (VS6) # 7.1 (VS2003) to 6.0 (VS6)
# quick check for vc_dir/bin and vc_dir/ before walk # quick check for vc_dir/bin and vc_dir/ before walk
@ -928,7 +1115,7 @@ def _check_cl_exists_in_vc_dir(env, vc_dir, msvc_version):
return False return False
else: else:
# version not support return false # version not supported return false
debug('unsupported MSVC version: %s', str(vernum)) debug('unsupported MSVC version: %s', str(vernum))
return False return False
@ -939,6 +1126,13 @@ def get_installed_vcs(env=None):
if __INSTALLED_VCS_RUN is not None: if __INSTALLED_VCS_RUN is not None:
return __INSTALLED_VCS_RUN return __INSTALLED_VCS_RUN
save_target_arch = env.get('TARGET_ARCH', UNDEFINED) if env else None
force_target = env and save_target_arch and save_target_arch != UNDEFINED
if force_target:
del env['TARGET_ARCH']
debug("delete env['TARGET_ARCH']")
installed_versions = [] installed_versions = []
for ver in _VCVER: for ver in _VCVER:
@ -960,10 +1154,15 @@ def get_installed_vcs(env=None):
except VisualCException as e: except VisualCException as e:
debug('did not find VC %s: caught exception %s', ver, str(e)) debug('did not find VC %s: caught exception %s', ver, str(e))
if force_target:
env['TARGET_ARCH'] = save_target_arch
debug("restore env['TARGET_ARCH']=%s", save_target_arch)
__INSTALLED_VCS_RUN = installed_versions __INSTALLED_VCS_RUN = installed_versions
debug("__INSTALLED_VCS_RUN=%s", __INSTALLED_VCS_RUN)
return __INSTALLED_VCS_RUN return __INSTALLED_VCS_RUN
def reset_installed_vcs(): def reset_installed_vcs() -> None:
"""Make it try again to find VC. This is just for the tests.""" """Make it try again to find VC. This is just for the tests."""
global __INSTALLED_VCS_RUN global __INSTALLED_VCS_RUN
__INSTALLED_VCS_RUN = None __INSTALLED_VCS_RUN = None
@ -982,6 +1181,19 @@ def get_installed_vcs_components(env=None):
msvc_version_component_defs = [MSVC.Util.msvc_version_components(vcver) for vcver in vcs] msvc_version_component_defs = [MSVC.Util.msvc_version_components(vcver) for vcver in vcs]
return msvc_version_component_defs return msvc_version_component_defs
def _check_cl_exists_in_script_env(data):
"""Find cl.exe in the script environment path."""
cl_path = None
if data and 'PATH' in data:
for p in data['PATH']:
cl_exe = os.path.join(p, _CL_EXE_NAME)
if os.path.exists(cl_exe):
cl_path = cl_exe
break
have_cl = True if cl_path else False
debug('have_cl: %s, cl_path: %s', have_cl, cl_path)
return have_cl, cl_path
# Running these batch files isn't cheap: most of the time spent in # Running these batch files isn't cheap: most of the time spent in
# msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'" # msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'"
# in multiple environments, for example: # in multiple environments, for example:
@ -1027,7 +1239,8 @@ def script_env(env, script, args=None):
cache_data = None cache_data = None
if cache_data is None: if cache_data is None:
stdout = common.get_output(script, args) skip_sendtelemetry = _skip_sendtelemetry(env)
stdout = common.get_output(script, args, skip_sendtelemetry=skip_sendtelemetry)
cache_data = common.parse_output(stdout) cache_data = common.parse_output(stdout)
# debug(stdout) # debug(stdout)
@ -1044,12 +1257,7 @@ def script_env(env, script, args=None):
if script_errlog: if script_errlog:
script_errmsg = '\n'.join(script_errlog) script_errmsg = '\n'.join(script_errlog)
have_cl = False have_cl, _ = _check_cl_exists_in_script_env(cache_data)
if cache_data and 'PATH' in cache_data:
for p in cache_data['PATH']:
if os.path.exists(os.path.join(p, _CL_EXE_NAME)):
have_cl = True
break
debug( debug(
'script=%s args=%s have_cl=%s, errors=%s', 'script=%s args=%s have_cl=%s, errors=%s',
@ -1099,7 +1307,7 @@ def get_default_version(env):
return msvc_version return msvc_version
def msvc_setup_env_once(env, tool=None): def msvc_setup_env_once(env, tool=None) -> None:
try: try:
has_run = env["MSVC_SETUP_RUN"] has_run = env["MSVC_SETUP_RUN"]
except KeyError: except KeyError:
@ -1130,6 +1338,15 @@ def msvc_find_valid_batch_script(env, version):
get it right. get it right.
""" """
# Find the product directory
pdir = None
try:
pdir = find_vc_pdir(env, version)
except UnsupportedVersion:
# Unsupported msvc version (raise MSVCArgumentError?)
pass
debug('product directory: version=%s, pdir=%s', version, pdir)
# Find the host, target, and all candidate (host, target) platform combinations: # Find the host, target, and all candidate (host, target) platform combinations:
platforms = get_host_target(env, version) platforms = get_host_target(env, version)
debug("host_platform %s, target_platform %s host_target_list %s", *platforms) debug("host_platform %s, target_platform %s host_target_list %s", *platforms)
@ -1137,6 +1354,12 @@ def msvc_find_valid_batch_script(env, version):
d = None d = None
version_installed = False version_installed = False
if pdir:
# Query all candidate sdk (host, target, sdk_pdir) after vc_script pass if necessary
sdk_queries = []
for host_arch, target_arch, in host_target_list: for host_arch, target_arch, in host_target_list:
# Set to current arch. # Set to current arch.
env['TARGET_ARCH'] = target_arch env['TARGET_ARCH'] = target_arch
@ -1144,8 +1367,8 @@ def msvc_find_valid_batch_script(env, version):
# Try to locate a batch file for this host/target platform combo # Try to locate a batch file for this host/target platform combo
try: try:
(vc_script, arg, vc_dir, sdk_script) = find_batch_file(env, version, host_arch, target_arch) (vc_script, arg, vc_dir, sdk_pdir) = find_batch_file(version, host_arch, target_arch, pdir)
debug('vc_script:%s vc_script_arg:%s sdk_script:%s', vc_script, arg, sdk_script) debug('vc_script:%s vc_script_arg:%s', vc_script, arg)
version_installed = True version_installed = True
except VisualCException as e: except VisualCException as e:
msg = str(e) msg = str(e)
@ -1153,33 +1376,60 @@ def msvc_find_valid_batch_script(env, version):
version_installed = False version_installed = False
continue continue
# Try to use the located batch file for this host/target platform combo # Save (host, target, sdk_pdir) platform combo for sdk queries
debug('use_script 2 %s, args:%s', repr(vc_script), arg) if sdk_pdir:
found = None sdk_query = (host_arch, target_arch, sdk_pdir)
if vc_script: if sdk_query not in sdk_queries:
arg = MSVC.ScriptArguments.msvc_script_arguments(env, version, vc_dir, arg) debug('save sdk_query host=%s, target=%s, sdk_pdir=%s', host_arch, target_arch, sdk_pdir)
try: sdk_queries.append(sdk_query)
d = script_env(env, vc_script, args=arg)
found = vc_script if not vc_script:
except BatchFileExecutionError as e:
debug('use_script 3: failed running VC script %s: %s: Error:%s', repr(vc_script), arg, e)
vc_script=None
continue
if not vc_script and sdk_script:
debug('use_script 4: trying sdk script: %s', sdk_script)
try:
d = script_env(env, sdk_script)
found = sdk_script
except BatchFileExecutionError as e:
debug('use_script 5: failed running SDK script %s: Error:%s', repr(sdk_script), e)
continue
elif not vc_script and not sdk_script:
debug('use_script 6: Neither VC script nor SDK script found')
continue continue
debug("Found a working script/target: %s/%s", repr(found), arg) # Try to use the located batch file for this host/target platform combo
arg = MSVC.ScriptArguments.msvc_script_arguments(env, version, vc_dir, arg)
debug('trying vc_script:%s, vc_script_args:%s', repr(vc_script), arg)
try:
d = script_env(env, vc_script, args=arg)
except BatchFileExecutionError as e:
debug('failed vc_script:%s, vc_script_args:%s, error:%s', repr(vc_script), arg, e)
vc_script = None
continue
have_cl, _ = _check_cl_exists_in_script_env(d)
if not have_cl:
debug('skip cl.exe not found vc_script:%s, vc_script_args:%s', repr(vc_script), arg)
continue
debug("Found a working script/target: %s/%s", repr(vc_script), arg)
break # We've found a working target_platform, so stop looking break # We've found a working target_platform, so stop looking
if not d:
for host_arch, target_arch, sdk_pdir in sdk_queries:
# Set to current arch.
env['TARGET_ARCH'] = target_arch
sdk_script = find_batch_file_sdk(host_arch, target_arch, sdk_pdir)
if not sdk_script:
continue
# Try to use the sdk batch file for this (host, target, sdk_pdir) combo
debug('trying sdk_script:%s', repr(sdk_script))
try:
d = script_env(env, sdk_script)
version_installed = True
except BatchFileExecutionError as e:
debug('failed sdk_script:%s, error=%s', repr(sdk_script), e)
continue
have_cl, _ = _check_cl_exists_in_script_env(d)
if not have_cl:
debug('skip cl.exe not found sdk_script:%s', repr(sdk_script))
continue
debug("Found a working script/target: %s", repr(sdk_script))
break # We've found a working script, so stop looking
# If we cannot find a viable installed compiler, reset the TARGET_ARCH # If we cannot find a viable installed compiler, reset the TARGET_ARCH
# To it's initial value # To it's initial value
if not d: if not d:
@ -1206,8 +1456,6 @@ def msvc_find_valid_batch_script(env, version):
return d return d
_UNDEFINED = object()
def get_use_script_use_settings(env): def get_use_script_use_settings(env):
# use_script use_settings return values action # use_script use_settings return values action
@ -1217,9 +1465,9 @@ def get_use_script_use_settings(env):
# None (documentation) or evaluates False (code): bypass detection # None (documentation) or evaluates False (code): bypass detection
# need to distinguish between undefined and None # need to distinguish between undefined and None
use_script = env.get('MSVC_USE_SCRIPT', _UNDEFINED) use_script = env.get('MSVC_USE_SCRIPT', UNDEFINED)
if use_script != _UNDEFINED: if use_script != UNDEFINED:
# use_script defined, use_settings ignored (not type checked) # use_script defined, use_settings ignored (not type checked)
return use_script, None return use_script, None
@ -1251,7 +1499,7 @@ def msvc_setup_env(env):
if SCons.Util.is_String(use_script): if SCons.Util.is_String(use_script):
use_script = use_script.strip() use_script = use_script.strip()
if not os.path.exists(use_script): if not os.path.exists(use_script):
raise MSVCScriptNotFound('Script specified by MSVC_USE_SCRIPT not found: "{}"'.format(use_script)) raise MSVCScriptNotFound(f'Script specified by MSVC_USE_SCRIPT not found: "{use_script}"')
args = env.subst('$MSVC_USE_SCRIPT_ARGS') args = env.subst('$MSVC_USE_SCRIPT_ARGS')
debug('use_script 1 %s %s', repr(use_script), repr(args)) debug('use_script 1 %s %s', repr(use_script), repr(args))
d = script_env(env, use_script, args) d = script_env(env, use_script, args)
@ -1262,7 +1510,7 @@ def msvc_setup_env(env):
return d return d
elif use_settings is not None: elif use_settings is not None:
if not SCons.Util.is_Dict(use_settings): if not SCons.Util.is_Dict(use_settings):
error_msg = 'MSVC_USE_SETTINGS type error: expected a dictionary, found {}'.format(type(use_settings).__name__) error_msg = f'MSVC_USE_SETTINGS type error: expected a dictionary, found {type(use_settings).__name__}'
raise MSVCUseSettingsError(error_msg) raise MSVCUseSettingsError(error_msg)
d = use_settings d = use_settings
debug('use_settings %s', d) debug('use_settings %s', d)
@ -1273,18 +1521,30 @@ def msvc_setup_env(env):
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None return None
found_cl_path = None
found_cl_envpath = None
seen_path = False
for k, v in d.items(): for k, v in d.items():
if not seen_path and k == 'PATH':
seen_path = True
found_cl_path = SCons.Util.WhereIs('cl', v)
found_cl_envpath = SCons.Util.WhereIs('cl', env['ENV'].get(k, []))
env.PrependENVPath(k, v, delete_existing=True) env.PrependENVPath(k, v, delete_existing=True)
debug("env['ENV']['%s'] = %s", k, env['ENV'][k]) debug("env['ENV']['%s'] = %s", k, env['ENV'][k])
# final check to issue a warning if the compiler is not present debug("cl paths: d['PATH']=%s, ENV['PATH']=%s", repr(found_cl_path), repr(found_cl_envpath))
if not find_program_path(env, 'cl'):
debug("did not find %s", _CL_EXE_NAME) # final check to issue a warning if the requested compiler is not present
if not found_cl_path:
warn_msg = "Could not find requested MSVC compiler 'cl'."
if CONFIG_CACHE: if CONFIG_CACHE:
propose = "SCONS_CACHE_MSVC_CONFIG caching enabled, remove cache file {} if out of date.".format(CONFIG_CACHE) warn_msg += f" SCONS_CACHE_MSVC_CONFIG caching enabled, remove cache file {CONFIG_CACHE} if out of date."
else: else:
propose = "It may need to be installed separately with Visual Studio." warn_msg += " It may need to be installed separately with Visual Studio."
warn_msg = "Could not find MSVC compiler 'cl'. {}".format(propose) if found_cl_envpath:
warn_msg += " A 'cl' was found on the scons ENV path which may be erroneous."
debug(warn_msg)
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg) SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
def msvc_exists(env=None, version=None): def msvc_exists(env=None, version=None):
@ -1293,7 +1553,6 @@ def msvc_exists(env=None, version=None):
rval = len(vcs) > 0 rval = len(vcs) > 0
else: else:
rval = version in vcs rval = version in vcs
if not rval:
debug('version=%s, return=%s', repr(version), rval) debug('version=%s, return=%s', repr(version), rval)
return rval return rval
@ -1340,7 +1599,7 @@ def msvc_setup_env_tool(env=None, version=None, tool=None):
rval = True rval = True
return rval return rval
def msvc_sdk_versions(version=None, msvc_uwp_app=False): def msvc_sdk_versions(version=None, msvc_uwp_app: bool=False):
debug('version=%s, msvc_uwp_app=%s', repr(version), repr(msvc_uwp_app)) debug('version=%s, msvc_uwp_app=%s', repr(version), repr(msvc_uwp_app))
rval = [] rval = []
@ -1354,13 +1613,13 @@ def msvc_sdk_versions(version=None, msvc_uwp_app=False):
version_def = MSVC.Util.msvc_extended_version_components(version) version_def = MSVC.Util.msvc_extended_version_components(version)
if not version_def: if not version_def:
msg = 'Unsupported version {}'.format(repr(version)) msg = f'Unsupported version {version!r}'
raise MSVCArgumentError(msg) raise MSVCArgumentError(msg)
rval = MSVC.WinSDK.get_msvc_sdk_version_list(version, msvc_uwp_app) rval = MSVC.WinSDK.get_msvc_sdk_version_list(version, msvc_uwp_app)
return rval return rval
def msvc_toolset_versions(msvc_version=None, full=True, sxs=False): def msvc_toolset_versions(msvc_version=None, full: bool=True, sxs: bool=False):
debug('msvc_version=%s, full=%s, sxs=%s', repr(msvc_version), repr(full), repr(sxs)) debug('msvc_version=%s, full=%s, sxs=%s', repr(msvc_version), repr(full), repr(sxs))
env = None env = None
@ -1374,7 +1633,7 @@ def msvc_toolset_versions(msvc_version=None, full=True, sxs=False):
return rval return rval
if msvc_version not in _VCVER: if msvc_version not in _VCVER:
msg = 'Unsupported msvc version {}'.format(repr(msvc_version)) msg = f'Unsupported msvc version {msvc_version!r}'
raise MSVCArgumentError(msg) raise MSVCArgumentError(msg)
vc_dir = find_vc_pdir(env, msvc_version) vc_dir = find_vc_pdir(env, msvc_version)
@ -1399,7 +1658,7 @@ def msvc_toolset_versions_spectre(msvc_version=None):
return rval return rval
if msvc_version not in _VCVER: if msvc_version not in _VCVER:
msg = 'Unsupported msvc version {}'.format(repr(msvc_version)) msg = f'Unsupported msvc version {msvc_version!r}'
raise MSVCArgumentError(msg) raise MSVCArgumentError(msg)
vc_dir = find_vc_pdir(env, msvc_version) vc_dir = find_vc_pdir(env, msvc_version)
@ -1410,7 +1669,7 @@ def msvc_toolset_versions_spectre(msvc_version=None):
rval = MSVC.ScriptArguments._msvc_toolset_versions_spectre_internal(msvc_version, vc_dir) rval = MSVC.ScriptArguments._msvc_toolset_versions_spectre_internal(msvc_version, vc_dir)
return rval return rval
def msvc_query_version_toolset(version=None, prefer_newest=True): def msvc_query_version_toolset(version=None, prefer_newest: bool=True):
""" """
Returns an msvc version and a toolset version given a version Returns an msvc version and a toolset version given a version
specification. specification.
@ -1471,13 +1730,13 @@ def msvc_query_version_toolset(version=None, prefer_newest=True):
version_def = MSVC.Util.msvc_extended_version_components(version) version_def = MSVC.Util.msvc_extended_version_components(version)
if not version_def: if not version_def:
msg = 'Unsupported msvc version {}'.format(repr(version)) msg = f'Unsupported msvc version {version!r}'
raise MSVCArgumentError(msg) raise MSVCArgumentError(msg)
if version_def.msvc_suffix: if version_def.msvc_suffix:
if version_def.msvc_verstr != version_def.msvc_toolset_version: if version_def.msvc_verstr != version_def.msvc_toolset_version:
# toolset version with component suffix # toolset version with component suffix
msg = 'Unsupported toolset version {}'.format(repr(version)) msg = f'Unsupported toolset version {version!r}'
raise MSVCArgumentError(msg) raise MSVCArgumentError(msg)
if version_def.msvc_vernum > 14.0: if version_def.msvc_vernum > 14.0:
@ -1556,11 +1815,11 @@ def msvc_query_version_toolset(version=None, prefer_newest=True):
) )
if version_def.msvc_verstr == msvc_toolset_version: if version_def.msvc_verstr == msvc_toolset_version:
msg = 'MSVC version {} was not found'.format(repr(version)) msg = f'MSVC version {version!r} was not found'
MSVC.Policy.msvc_notfound_handler(None, msg) MSVC.Policy.msvc_notfound_handler(None, msg)
return msvc_version, msvc_toolset_version return msvc_version, msvc_toolset_version
msg = 'MSVC toolset version {} not found'.format(repr(version)) msg = f'MSVC toolset version {version!r} not found'
raise MSVCToolsetVersionNotFound(msg) raise MSVCToolsetVersionNotFound(msg)

View file

@ -46,7 +46,7 @@ class VisualStudio:
An abstract base class for trying to find installed versions of An abstract base class for trying to find installed versions of
Visual Studio. Visual Studio.
""" """
def __init__(self, version, **kw): def __init__(self, version, **kw) -> None:
self.version = version self.version = version
kw['vc_version'] = kw.get('vc_version', version) kw['vc_version'] = kw.get('vc_version', version)
kw['sdk_version'] = kw.get('sdk_version', version) kw['sdk_version'] = kw.get('sdk_version', version)
@ -148,7 +148,7 @@ class VisualStudio:
self._cache['supported_arch'] = self.supported_arch self._cache['supported_arch'] = self.supported_arch
return self.supported_arch return self.supported_arch
def reset(self): def reset(self) -> None:
self._cache = {} self._cache = {}
# The list of supported Visual Studio versions we know how to detect. # The list of supported Visual Studio versions we know how to detect.
@ -207,7 +207,7 @@ SupportedVSList = [
executable_path=r'Common7\IDE\devenv.com', executable_path=r'Common7\IDE\devenv.com',
# should be a fallback, prefer use vswhere installationPath # should be a fallback, prefer use vswhere installationPath
batch_file_path=r'Common7\Tools\VsDevCmd.bat', batch_file_path=r'Common7\Tools\VsDevCmd.bat',
supported_arch=['x86', 'amd64', "arm"], supported_arch=['x86', 'amd64', "arm", 'arm64'],
), ),
# Visual Studio 2019 # Visual Studio 2019
@ -219,7 +219,7 @@ SupportedVSList = [
executable_path=r'Common7\IDE\devenv.com', executable_path=r'Common7\IDE\devenv.com',
# should be a fallback, prefer use vswhere installationPath # should be a fallback, prefer use vswhere installationPath
batch_file_path=r'Common7\Tools\VsDevCmd.bat', batch_file_path=r'Common7\Tools\VsDevCmd.bat',
supported_arch=['x86', 'amd64', "arm"], supported_arch=['x86', 'amd64', "arm", 'arm64'],
), ),
# Visual Studio 2017 # Visual Studio 2017
@ -231,7 +231,7 @@ SupportedVSList = [
executable_path=r'Common7\IDE\devenv.com', executable_path=r'Common7\IDE\devenv.com',
# should be a fallback, prefer use vswhere installationPath # should be a fallback, prefer use vswhere installationPath
batch_file_path=r'Common7\Tools\VsDevCmd.bat', batch_file_path=r'Common7\Tools\VsDevCmd.bat',
supported_arch=['x86', 'amd64', "arm"], supported_arch=['x86', 'amd64', "arm", 'arm64'],
), ),
# Visual C++ 2017 Express Edition (for Desktop) # Visual C++ 2017 Express Edition (for Desktop)
@ -243,7 +243,7 @@ SupportedVSList = [
executable_path=r'Common7\IDE\WDExpress.exe', executable_path=r'Common7\IDE\WDExpress.exe',
# should be a fallback, prefer use vswhere installationPath # should be a fallback, prefer use vswhere installationPath
batch_file_path=r'Common7\Tools\VsDevCmd.bat', batch_file_path=r'Common7\Tools\VsDevCmd.bat',
supported_arch=['x86', 'amd64', "arm"], supported_arch=['x86', 'amd64', "arm", 'arm64'],
), ),
# Visual Studio 2015 # Visual Studio 2015
@ -439,7 +439,7 @@ def get_installed_visual_studios(env=None):
InstalledVSMap[vs.version] = vs InstalledVSMap[vs.version] = vs
return InstalledVSList return InstalledVSList
def reset_installed_visual_studios(): def reset_installed_visual_studios() -> None:
global InstalledVSList global InstalledVSList
global InstalledVSMap global InstalledVSMap
InstalledVSList = None InstalledVSList = None
@ -564,12 +564,12 @@ def get_default_arch(env):
return arch return arch
def merge_default_version(env): def merge_default_version(env) -> None:
version = get_default_version(env) version = get_default_version(env)
arch = get_default_arch(env) arch = get_default_arch(env)
# TODO: refers to versions and arch which aren't defined; called nowhere. Drop? # TODO: refers to versions and arch which aren't defined; called nowhere. Drop?
def msvs_setup_env(env): def msvs_setup_env(env) -> None:
msvs = get_vs_by_version(version) msvs = get_vs_by_version(version)
if msvs is None: if msvs is None:
return return

View file

@ -79,14 +79,14 @@ def getPharLapVersion():
include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h")) include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h"))
if not os.path.exists(include_path): if not os.path.exists(include_path):
raise SCons.Errors.UserError("Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?") raise SCons.Errors.UserError("Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?")
with open(include_path, 'r') as f: with open(include_path) as f:
mo = REGEX_ETS_VER.search(f.read()) mo = REGEX_ETS_VER.search(f.read())
if mo: if mo:
return int(mo.group(1)) return int(mo.group(1))
# Default return for Phar Lap 9.1 # Default return for Phar Lap 9.1
return 910 return 910
def addPharLapPaths(env): def addPharLapPaths(env) -> None:
"""This function adds the path to the Phar Lap binaries, includes, """This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there.""" and libraries, if they are not already there."""
ph_path = getPharLapPath() ph_path = getPharLapPath()

View file

@ -105,7 +105,7 @@ TOOL_ALIASES = {
class Tool: class Tool:
def __init__(self, name, toolpath=None, **kwargs): def __init__(self, name, toolpath=None, **kwargs) -> None:
if toolpath is None: if toolpath is None:
toolpath = [] toolpath = []
@ -241,7 +241,7 @@ class Tool:
msg = "No tool named '{self.name}': {e}" msg = "No tool named '{self.name}': {e}"
raise SCons.Errors.SConsEnvironmentError(msg) raise SCons.Errors.SConsEnvironmentError(msg)
def __call__(self, env, *args, **kw): def __call__(self, env, *args, **kw) -> None:
if self.init_kw is not None: if self.init_kw is not None:
# Merge call kws into init kws; # Merge call kws into init kws;
# but don't bash self.init_kw. # but don't bash self.init_kw.
@ -264,7 +264,7 @@ class Tool:
self.generate(env, *args, **kw) self.generate(env, *args, **kw)
def __str__(self): def __str__(self) -> str:
return self.name return self.name
@ -324,7 +324,7 @@ def createStaticLibBuilder(env):
return static_lib return static_lib
def createSharedLibBuilder(env, shlib_suffix='$_SHLIBSUFFIX'): def createSharedLibBuilder(env, shlib_suffix: str='$_SHLIBSUFFIX'):
"""This is a utility function that creates the SharedLibrary """This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already. Builder in an Environment if it is not there already.
@ -354,7 +354,7 @@ def createSharedLibBuilder(env, shlib_suffix='$_SHLIBSUFFIX'):
return shared_lib return shared_lib
def createLoadableModuleBuilder(env, loadable_module_suffix='$_LDMODULESUFFIX'): def createLoadableModuleBuilder(env, loadable_module_suffix: str='$_LDMODULESUFFIX'):
"""This is a utility function that creates the LoadableModule """This is a utility function that creates the LoadableModule
Builder in an Environment if it is not there already. Builder in an Environment if it is not there already.
@ -557,7 +557,7 @@ class ToolInitializerMethod:
environment in place of this particular instance. environment in place of this particular instance.
""" """
def __init__(self, name, initializer): def __init__(self, name, initializer) -> None:
""" """
Note: we store the tool name as __name__ so it can be used by Note: we store the tool name as __name__ so it can be used by
the class that attaches this to a construction environment. the class that attaches this to a construction environment.
@ -608,7 +608,7 @@ class ToolInitializer:
that we want to use to delay Tool searches until necessary. that we want to use to delay Tool searches until necessary.
""" """
def __init__(self, env, tools, names): def __init__(self, env, tools, names) -> None:
if not SCons.Util.is_List(tools): if not SCons.Util.is_List(tools):
tools = [tools] tools = [tools]
if not SCons.Util.is_List(names): if not SCons.Util.is_List(names):
@ -622,7 +622,7 @@ class ToolInitializer:
self.methods[name] = method self.methods[name] = method
env.AddMethod(method) env.AddMethod(method)
def remove_methods(self, env): def remove_methods(self, env) -> None:
""" """
Removes the methods that were added by the tool initialization Removes the methods that were added by the tool initialization
so we no longer copy and re-bind them when the construction so we no longer copy and re-bind them when the construction
@ -631,7 +631,7 @@ class ToolInitializer:
for method in self.methods.values(): for method in self.methods.values():
env.RemoveMethod(method) env.RemoveMethod(method)
def apply_tools(self, env): def apply_tools(self, env) -> None:
""" """
Searches the list of associated Tool modules for one that Searches the list of associated Tool modules for one that
exists, and applies that to the construction environment. exists, and applies that to the construction environment.
@ -649,7 +649,7 @@ class ToolInitializer:
# the ToolInitializer class. # the ToolInitializer class.
def Initializers(env): def Initializers(env) -> None:
ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs', '_InternalInstallVersionedLib']) ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs', '_InternalInstallVersionedLib'])
def Install(self, *args, **kw): def Install(self, *args, **kw):
@ -824,7 +824,7 @@ def tool_list(platform, env):
return [x for x in tools if x] return [x for x in tools if x]
def find_program_path(env, key_program, default_paths=None, add_path=False) -> Optional[str]: def find_program_path(env, key_program, default_paths=None, add_path: bool=False) -> Optional[str]:
""" """
Find the location of a tool using various means. Find the location of a tool using various means.

View file

@ -44,7 +44,7 @@ def get_xlc(env):
xlc = env.get('CC', 'xlc') xlc = env.get('CC', 'xlc')
return SCons.Platform.aix.get_xlc(env, xlc, packages) return SCons.Platform.aix.get_xlc(env, xlc, packages)
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for xlc / Visual Age """Add Builders and construction variables for xlc / Visual Age
suite to an Environment.""" suite to an Environment."""
path, _cc, version = get_xlc(env) path, _cc, version = get_xlc(env)

View file

@ -47,7 +47,7 @@ def get_xlc(env):
xlc = env.get('CXX', 'xlC') xlc = env.get('CXX', 'xlC')
return SCons.Platform.aix.get_xlc(env, xlc, packages) return SCons.Platform.aix.get_xlc(env, xlc, packages)
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for xlC / Visual Age """Add Builders and construction variables for xlC / Visual Age
suite to an Environment.""" suite to an Environment."""
path, _cxx, version = get_xlc(env) path, _cxx, version = get_xlc(env)

View file

@ -50,7 +50,7 @@ def get_xlf77(env):
#return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages) #return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages)
return (None, xlf77, xlf77_r, None) return (None, xlf77, xlf77_r, None)
def generate(env): def generate(env) -> None:
""" """
Add Builders and construction variables for the Visual Age FORTRAN Add Builders and construction variables for the Visual Age FORTRAN
compiler to an Environment. compiler to an Environment.

View file

@ -48,7 +48,7 @@ def smart_linkflags(source, target, env, for_signature):
return '' return ''
def generate(env): def generate(env) -> None:
""" """
Add Builders and construction variables for Visual Age linker to Add Builders and construction variables for Visual Age linker to
an Environment. an Environment.

View file

@ -79,7 +79,7 @@ def _applelib_check_valid_version(version_string):
return True, "" return True, ""
def _applelib_currentVersionFromSoVersion(source, target, env, for_signature): def _applelib_currentVersionFromSoVersion(source, target, env, for_signature) -> str:
""" """
A generator function to create the -Wl,-current_version flag if needed. A generator function to create the -Wl,-current_version flag if needed.
If env['APPLELINK_NO_CURRENT_VERSION'] contains a true value no flag will be generated If env['APPLELINK_NO_CURRENT_VERSION'] contains a true value no flag will be generated
@ -110,7 +110,7 @@ def _applelib_currentVersionFromSoVersion(source, target, env, for_signature):
return "-Wl,-current_version,%s" % version_string return "-Wl,-current_version,%s" % version_string
def _applelib_compatVersionFromSoVersion(source, target, env, for_signature): def _applelib_compatVersionFromSoVersion(source, target, env, for_signature) -> str:
""" """
A generator function to create the -Wl,-compatibility_version flag if needed. A generator function to create the -Wl,-compatibility_version flag if needed.
If env['APPLELINK_NO_COMPATIBILITY_VERSION'] contains a true value no flag will be generated If env['APPLELINK_NO_COMPATIBILITY_VERSION'] contains a true value no flag will be generated
@ -141,7 +141,7 @@ def _applelib_compatVersionFromSoVersion(source, target, env, for_signature):
return "-Wl,-compatibility_version,%s" % version_string return "-Wl,-compatibility_version,%s" % version_string
def _applelib_soname(target, source, env, for_signature): def _applelib_soname(target, source, env, for_signature) -> str:
""" """
Override default _soname() function from SCons.Tools.linkCommon.SharedLibrary. Override default _soname() function from SCons.Tools.linkCommon.SharedLibrary.
Apple's file naming for versioned shared libraries puts the version string before Apple's file naming for versioned shared libraries puts the version string before
@ -160,7 +160,7 @@ def _applelib_soname(target, source, env, for_signature):
return "$SHLIBPREFIX$_get_shlib_stem$_SHLIBSOVERSION${SHLIBSUFFIX}" return "$SHLIBPREFIX$_get_shlib_stem$_SHLIBSOVERSION${SHLIBSUFFIX}"
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for applelink to an """Add Builders and construction variables for applelink to an
Environment.""" Environment."""
link.generate(env) link.generate(env)

View file

@ -38,7 +38,7 @@ import SCons.Tool
import SCons.Util import SCons.Util
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for ar to an Environment.""" """Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createStaticLibBuilder(env) SCons.Tool.createStaticLibBuilder(env)

View file

@ -46,7 +46,7 @@ if SCons.Util.case_sensitive_suffixes('.s', '.S'):
else: else:
ASSuffixes.extend(['.S']) ASSuffixes.extend(['.S'])
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for as to an Environment.""" """Add Builders and construction variables for as to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env) static_obj, shared_obj = SCons.Tool.createObjBuilders(env)

View file

@ -44,7 +44,7 @@ def findIt(program, env):
env.PrependENVPath('PATH', dir) env.PrependENVPath('PATH', dir)
return borwin return borwin
def generate(env): def generate(env) -> None:
findIt('bcc32', env) findIt('bcc32', env)
"""Add Builders and construction variables for bcc to an """Add Builders and construction variables for bcc to an
Environment.""" Environment."""

View file

@ -40,7 +40,7 @@ CSuffixes = ['.c', '.m']
if not SCons.Util.case_sensitive_suffixes('.c', '.C'): if not SCons.Util.case_sensitive_suffixes('.c', '.C'):
CSuffixes.append('.C') CSuffixes.append('.C')
def add_common_cc_variables(env): def add_common_cc_variables(env) -> None:
""" """
Add underlying common "C compiler" variables that Add underlying common "C compiler" variables that
are used by multiple tools (specifically, c++). are used by multiple tools (specifically, c++).
@ -64,7 +64,7 @@ def add_common_cc_variables(env):
compilers = ['cc'] compilers = ['cc']
def generate(env): def generate(env) -> None:
""" """
Add Builders and construction variables for C compilers to an Environment. Add Builders and construction variables for C compilers to an Environment.
""" """

View file

@ -33,7 +33,7 @@ selection method.
import os import os
import re import re
import subprocess from subprocess import DEVNULL, PIPE
import SCons.Util import SCons.Util
import SCons.Tool.cc import SCons.Tool.cc
@ -44,17 +44,18 @@ from SCons.Tool.MSCommon import msvc_setup_env_once
compilers = ['clang'] compilers = ['clang']
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for clang to an Environment.""" """Add Builders and construction variables for clang to an Environment."""
SCons.Tool.cc.generate(env) SCons.Tool.cc.generate(env)
if env['PLATFORM'] == 'win32': if env['PLATFORM'] == 'win32':
# Ensure that we have a proper path for clang # Ensure that we have a proper path for clang
clang = SCons.Tool.find_program_path(env, compilers[0], clang = SCons.Tool.find_program_path(
default_paths=get_clang_install_dirs(env['PLATFORM'])) env, compilers[0], default_paths=get_clang_install_dirs(env['PLATFORM'])
)
if clang: if clang:
clang_bin_dir = os.path.dirname(clang) clang_bin_dir = os.path.dirname(clang)
env.AppendENVPath('PATH', clang_bin_dir) env.AppendENVPath("PATH", clang_bin_dir)
# Set-up ms tools paths # Set-up ms tools paths
msvc_setup_env_once(env) msvc_setup_env_once(env)
@ -67,24 +68,19 @@ def generate(env):
# determine compiler version # determine compiler version
if env['CC']: if env['CC']:
# pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], kw = {
pipe = SCons.Action._subproc(env, [env['CC'], '--version'], 'stdout': PIPE,
stdin='devnull', 'stderr': DEVNULL,
stderr='devnull', 'universal_newlines': True,
stdout=subprocess.PIPE) }
if pipe.wait() != 0: return cp = SCons.Action.scons_subproc_run(env, [env['CC'], '-dumpversion'], **kw)
# clang -dumpversion is of no use line = cp.stdout
with pipe.stdout: if line:
line = pipe.stdout.readline() env['CCVERSION'] = line
line = line.decode()
match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line)
if match:
env['CCVERSION'] = match.group(1)
env['CCDEPFLAGS'] = '-MMD -MF ${TARGET}.d' env['CCDEPFLAGS'] = '-MMD -MF ${TARGET}.d'
env["NINJA_DEPFILE_PARSE_FORMAT"] = 'clang' env["NINJA_DEPFILE_PARSE_FORMAT"] = 'clang'
def exists(env): def exists(env):
return env.Detect(compilers) return env.Detect(compilers)

View file

@ -33,7 +33,7 @@ selection method.
import os.path import os.path
import re import re
import subprocess from subprocess import DEVNULL, PIPE
import SCons.Tool import SCons.Tool
import SCons.Util import SCons.Util
@ -44,7 +44,7 @@ from SCons.Tool.MSCommon import msvc_setup_env_once
compilers = ['clang++'] compilers = ['clang++']
def generate(env): def generate(env) -> None:
"""Add Builders and construction variables for clang++ to an Environment.""" """Add Builders and construction variables for clang++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env) static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
@ -63,7 +63,9 @@ def generate(env):
env['SHOBJSUFFIX'] = '.pic.o' env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'win32': elif env['PLATFORM'] == 'win32':
# Ensure that we have a proper path for clang++ # Ensure that we have a proper path for clang++
clangxx = SCons.Tool.find_program_path(env, compilers[0], default_paths=get_clang_install_dirs(env['PLATFORM'])) clangxx = SCons.Tool.find_program_path(
env, compilers[0], default_paths=get_clang_install_dirs(env['PLATFORM'])
)
if clangxx: if clangxx:
clangxx_bin_dir = os.path.dirname(clangxx) clangxx_bin_dir = os.path.dirname(clangxx)
env.AppendENVPath('PATH', clangxx_bin_dir) env.AppendENVPath('PATH', clangxx_bin_dir)
@ -71,23 +73,17 @@ def generate(env):
# Set-up ms tools paths # Set-up ms tools paths
msvc_setup_env_once(env) msvc_setup_env_once(env)
# determine compiler version # determine compiler version
if env['CXX']: if env['CXX']:
pipe = SCons.Action._subproc(env, [env['CXX'], '--version'], kw = {
stdin='devnull', 'stdout': PIPE,
stderr='devnull', 'stderr': DEVNULL,
stdout=subprocess.PIPE) 'universal_newlines': True,
if pipe.wait() != 0: }
return cp = SCons.Action.scons_subproc_run(env, [env['CXX'], '-dumpversion'], **kw)
line = cp.stdout
# clang -dumpversion is of no use if line:
with pipe.stdout: env['CXXVERSION'] = line
line = pipe.stdout.readline()
line = line.decode()
match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line)
if match:
env['CXXVERSION'] = match.group(1)
env['CCDEPFLAGS'] = '-MMD -MF ${TARGET}.d' env['CCDEPFLAGS'] = '-MMD -MF ${TARGET}.d'
env["NINJA_DEPFILE_PARSE_FORMAT"] = 'clang' env["NINJA_DEPFILE_PARSE_FORMAT"] = 'clang'

View file

@ -1,12 +1,5 @@
""" # MIT License
Implements the ability for SCons to emit a compilation database for the MongoDB project. See #
http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation
database is, and why you might want one. The only user visible entry point here is
'env.CompilationDatabase'. This method takes an optional 'target' to name the file that
should hold the compilation database, otherwise, the file defaults to compile_commands.json,
which is the name that most clang tools search for by default.
"""
# Copyright 2020 MongoDB Inc. # Copyright 2020 MongoDB Inc.
# #
# Permission is hereby granted, free of charge, to any person obtaining # Permission is hereby granted, free of charge, to any person obtaining
@ -27,7 +20,17 @@ which is the name that most clang tools search for by default.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""Compilation Database
Implements the ability for SCons to emit a compilation database for a
project. See https://clang.llvm.org/docs/JSONCompilationDatabase.html
for details on what a compilation database is, and why you might want one.
The only user visible entry point here is ``env.CompilationDatabase``.
This method takes an optional *target* to name the file that should hold
the compilation database, otherwise, the file defaults to
``compile_commands.json``, the name that most clang tools search for by default.
"""
import json import json
import itertools import itertools
@ -51,12 +54,12 @@ __COMPILATION_DB_ENTRIES = []
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even # We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
# integrate with the cache, but there doesn't seem to be much call for it. # integrate with the cache, but there doesn't seem to be much call for it.
class __CompilationDbNode(SCons.Node.Python.Value): class __CompilationDbNode(SCons.Node.Python.Value):
def __init__(self, value): def __init__(self, value) -> None:
SCons.Node.Python.Value.__init__(self, value) SCons.Node.Python.Value.__init__(self, value)
self.Decider(changed_since_last_build_node) self.Decider(changed_since_last_build_node)
def changed_since_last_build_node(child, target, prev_ni, node): def changed_since_last_build_node(child, target, prev_ni, node) -> bool:
""" Dummy decider to force always building""" """ Dummy decider to force always building"""
return True return True
@ -111,7 +114,7 @@ class CompDBTEMPFILE(TempFileMunge):
return self.cmd return self.cmd
def compilation_db_entry_action(target, source, env, **kw): def compilation_db_entry_action(target, source, env, **kw) -> None:
""" """
Create a dictionary with evaluated command line, target, source Create a dictionary with evaluated command line, target, source
and store that info as an attribute on the target and store that info as an attribute on the target
@ -140,7 +143,7 @@ def compilation_db_entry_action(target, source, env, **kw):
target[0].write(entry) target[0].write(entry)
def write_compilation_db(target, source, env): def write_compilation_db(target, source, env) -> None:
entries = [] entries = []
use_abspath = env['COMPILATIONDB_USE_ABSPATH'] in [True, 1, 'True', 'true'] use_abspath = env['COMPILATIONDB_USE_ABSPATH'] in [True, 1, 'True', 'true']
@ -197,7 +200,7 @@ def compilation_db_emitter(target, source, env):
return target, source return target, source
def generate(env, **kwargs): def generate(env, **kwargs) -> None:
static_obj, shared_obj = SCons.Tool.createObjBuilders(env) static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
env["COMPILATIONDB_COMSTR"] = kwargs.get( env["COMPILATIONDB_COMSTR"] = kwargs.get(
@ -261,5 +264,5 @@ def generate(env, **kwargs):
env['COMPILATIONDB_PATH_FILTER'] = '' env['COMPILATIONDB_PATH_FILTER'] = ''
def exists(env): def exists(env) -> bool:
return True return True

Some files were not shown because too many files have changed in this diff Show more