import os, sys, inspect
-VERSION="2.0.27"
+VERSION="2.1.5"
REVISION="x"
GIT="x"
INSTALL="x"
os.environ['PYTHONUNBUFFERED'] = '1'
-if Context.HEXVERSION not in (0x2001b00,):
+if Context.HEXVERSION not in (0x2010500,):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf
self.keep = Options.options.keep
"""Whether the build should continue past errors"""
- self.progress_bar = Options.options.progress_bar
+ self.progress_bar = getattr(Options.options, 'progress_bar', 0)
"""
Level of progress status:
cmd = 'configure'
- error_handlers = []
- """
- Additional functions to handle configuration errors
- """
-
def __init__(self, **kw):
super(ConfigurationContext, self).__init__(**kw)
self.environ = dict(os.environ)
top = self.top_dir
if not top:
- top = Options.options.top
+ top = getattr(Options.options, 'top', None)
if not top:
top = getattr(Context.g_module, Context.TOP, None)
if not top:
out = self.out_dir
if not out:
- out = Options.options.out
+ out = getattr(Options.options, 'out', None)
if not out:
out = getattr(Context.g_module, Context.OUT, None)
if not out:
self.init_dirs()
self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
+ if os.path.exists(self.cachedir.abspath()):
+ shutil.rmtree(self.cachedir.abspath())
self.cachedir.mkdir()
path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
:param env: a ConfigSet, usually ``conf.env``
"""
if not env.PREFIX:
- if Options.options.prefix or Utils.is_win32:
+ if getattr(Options.options, 'prefix', None):
env.PREFIX = Options.options.prefix
else:
env.PREFIX = '/'
if not env.BINDIR:
- if Options.options.bindir:
+ if getattr(Options.options, 'bindir', None):
env.BINDIR = Options.options.bindir
else:
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
if not env.LIBDIR:
- if Options.options.libdir:
+ if getattr(Options.options, 'libdir', None):
env.LIBDIR = Options.options.libdir
else:
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
- def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
+ def load(self, tool_list, **kw):
"""
Load Waf tools, which will be imported whenever a build is started.
:type tooldir: list of string
:param funs: functions to execute from the waf tools
:type funs: list of string
- :param cache: whether to prevent the tool from running twice
+ :param cache: whether to prevent the tool from running twice (false by default)
:type cache: bool
"""
tools = Utils.to_list(tool_list)
- if tooldir:
- tooldir = Utils.to_list(tooldir)
+ tooldir = Utils.to_list(kw.get('tooldir', ''))
+ with_sys_path = kw.get('with_sys_path', True)
+ funs = kw.get('funs')
for tool in tools:
# avoid loading the same tool more than once with the same functions
# used by composite projects
- if cache:
- mag = (tool, id(self.env), tooldir, funs)
+ if kw.get('cache'):
+ mag = (tool, id(self.env), tuple(tooldir), funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)' % tool)
continue
return cmd
@conf
-def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
+def check_waf_version(self, mini='1.9.99', maxi='2.2.0', **kw):
"""
Raise a Configuration error if the Waf version does not strictly match the given bounds::
- conf.check_waf_version(mini='1.9.99', maxi='2.1.0')
+ conf.check_waf_version(mini='1.9.99', maxi='2.2.0')
:type mini: number, tuple or string
:param mini: Minimum required version
import imp
# the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x2001b00
+HEXVERSION=0x2010500
"""Constant updated on new releases"""
-WAFVERSION="2.0.27"
+WAFVERSION="2.1.5"
"""Constant updated on new releases"""
-WAFREVISION="c3e645e395505cb5faa115172b1fc9abdaeaf146"
+WAFREVISION="5360d9c9c2623d1799fe8b0ce35581ff0925993a"
"""Git revision when the waf version is updated"""
WAFNAME="waf"
Logs.free_logger(logger)
delattr(self, 'logger')
- def load(self, tool_list, *k, **kw):
+ def load(self, tool_list, **kw):
"""
- Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
- from it. A ``tooldir`` argument may be provided as a list of module paths.
+ Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
:param tool_list: list of Waf tool names to load
:type tool_list: list of string or space-separated string
+ :param tooldir: paths for the imports
+ :type tooldir: list of string
"""
tools = Utils.to_list(tool_list)
path = Utils.to_list(kw.get('tooldir', ''))
that reads the ``options`` wscript function.
"""
-import os, tempfile, optparse, sys, re
+import os, tempfile, argparse, sys, re
from waflib import Logs, Utils, Context, Errors
-options = optparse.Values()
+
+class OptionValues:
+ def __str__(self):
+ return str(self.__dict__)
+
+options = OptionValues()
"""
A global dictionary representing user-provided command-line options::
is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
"""
-envvars = []
-"""
-List of environment variable declarations placed after the Waf executable name.
-These are detected by searching for "=" in the remaining arguments.
-You probably do not want to use this.
-"""
-
lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
"""
Name of the lock file that marks a project as configured
"""
-class opt_parser(optparse.OptionParser):
+class ArgParser(argparse.ArgumentParser):
"""
Command-line options parser.
"""
- def __init__(self, ctx, allow_unknown=False):
- optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
- version='%s %s (%s)' % (Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION))
- self.formatter.width = Logs.get_term_cols()
+ def __init__(self, ctx):
+ argparse.ArgumentParser.__init__(self, add_help=False, conflict_handler='resolve')
self.ctx = ctx
- self.allow_unknown = allow_unknown
- def _process_args(self, largs, rargs, values):
- """
- Custom _process_args to allow unknown options according to the allow_unknown status
- """
- while rargs:
- try:
- optparse.OptionParser._process_args(self,largs,rargs,values)
- except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
- if self.allow_unknown:
- largs.append(e.opt_str)
- else:
- self.error(str(e))
-
- def _process_long_opt(self, rargs, values):
- # --custom-option=-ftxyz is interpreted as -f -t... see #2280
- if self.allow_unknown:
- back = [] + rargs
- try:
- optparse.OptionParser._process_long_opt(self, rargs, values)
- except optparse.BadOptionError:
- while rargs:
- rargs.pop()
- rargs.extend(back)
- rargs.pop(0)
- raise
- else:
- optparse.OptionParser._process_long_opt(self, rargs, values)
-
- def print_usage(self, file=None):
- return self.print_help(file)
+ def format_help(self):
+ self.usage = self.get_usage()
+ return super(ArgParser, self).format_help()
+
+ def format_usage(self):
+ return self.format_help()
+
+ def _get_formatter(self):
+ """Initialize the argument parser to the adequate terminal width"""
+ return self.formatter_class(prog=self.prog, width=Logs.get_term_cols())
+
+ def get_option(self, name):
+ if name in self._option_string_actions:
+ return self._option_string_actions[name]
+
+ def remove_option(self, name):
+ if name in self._option_string_actions:
+ action = self._option_string_actions[name]
+ self._remove_action(action)
+ action.option_strings.remove(name)
+ self._option_string_actions.pop(name, None)
+ for group in self._action_groups:
+ try:
+ group._group_actions.remove(action)
+ except ValueError:
+ pass
def get_usage(self):
"""
continue
if type(v) is type(Context.create_context):
- if v.__doc__ and not k.startswith('_'):
+ if v.__doc__ and len(v.__doc__.splitlines()) < 3 and not k.startswith('_'):
cmds_str[k] = v.__doc__
just = 0
def __init__(self, **kw):
super(OptionsContext, self).__init__(**kw)
- self.parser = opt_parser(self)
- """Instance of :py:class:`waflib.Options.opt_parser`"""
+ self.parser = ArgParser(self)
+ """Instance of :py:class:`waflib.Options.ArgParser`"""
self.option_groups = {}
elif os.environ.get('CLICOLOR_FORCE', '') == '1':
color = 'yes'
p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
- p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
+ p('-j', '--jobs', dest='jobs', default=jobs, type=int, help='amount of parallel jobs (%r)' % jobs)
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
- p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
- p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
- p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")
+ p('--profile', dest='profile', default=0, action='store_true', help=argparse.SUPPRESS)
+ p('--pdb', dest='pdb', default=0, action='store_true', help=argparse.SUPPRESS)
+ p('-h', '--help', dest='whelp', default=0, action='store_true', help='show this help message and exit')
+ p('--version', dest='version', default=False, action='store_true', help='show the Waf version and exit')
gr = self.add_option_group('Configuration options')
- self.option_groups['configure options'] = gr
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
- gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
- gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
- gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
+ gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=argparse.SUPPRESS, dest='no_lock_in_run')
+ gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=argparse.SUPPRESS, dest='no_lock_in_out')
+ gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=argparse.SUPPRESS, dest='no_lock_in_top')
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
if not default_prefix:
gr.add_option('--libdir', dest='libdir', help='libdir')
gr = self.add_option_group('Build and installation options')
- self.option_groups['build and install options'] = gr
gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
gr = self.add_option_group('Step options')
- self.option_groups['step options'] = gr
gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
default_destdir = os.environ.get('DESTDIR', '')
gr = self.add_option_group('Installation and uninstallation options')
- self.option_groups['install/uninstall options'] = gr
gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
- gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
+ gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='disable file installation caching')
gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
def jobs(self):
return count
def add_option(self, *k, **kw):
+ if 'type' in kw and type(kw['type']) == str:
+ Logs.warn('Invalid "type=str" in add_option (must be a class, not a string)')
+ if kw['type'] == 'int':
+ kw['type'] = int
+ elif kw['type'] == 'string':
+ kw['type'] = str
+ return self.add_argument(*k, **kw)
+
+ def add_argument(self, *k, **kw):
"""
- Wraps ``optparse.add_option``::
+ Wraps ``argparse.add_argument``::
def options(ctx):
ctx.add_option('-u', '--use', dest='use', default=False,
action='store_true', help='a boolean option')
- :rtype: optparse option object
+ :rtype: argparse option object
"""
- return self.parser.add_option(*k, **kw)
+ return self.parser.add_argument(*k, **kw)
def add_option_group(self, *k, **kw):
"""
:rtype: optparse option group object
"""
- try:
- gr = self.option_groups[k[0]]
- except KeyError:
- gr = self.parser.add_option_group(*k, **kw)
- self.option_groups[k[0]] = gr
+ gr = self.get_option_group(k[0])
+ if not gr:
+ gr = self.parser.add_argument_group(*k, **kw)
+ gr.add_option = gr.add_argument
+ self.option_groups[k[0]] = gr
return gr
def get_option_group(self, opt_str):
try:
return self.option_groups[opt_str]
except KeyError:
- for group in self.parser.option_groups:
+ for group in self.parser._action_groups:
if group.title == opt_str:
return group
return None
"""
Just parse the arguments
"""
- self.parser.allow_unknown = allow_unknown
- (options, leftover_args) = self.parser.parse_args(args=_args)
- envvars = []
+ (options, leftover_args) = self.parser.parse_known_args(args=_args)
commands = []
for arg in leftover_args:
- if '=' in arg:
- envvars.append(arg)
- elif arg != 'options':
- commands.append(arg)
+ if not allow_unknown and arg.startswith('-'):
+ self.parser.print_help()
+ raise Errors.WafError('Unknown option: %r' % arg)
+ commands.append(arg)
if options.jobs < 1:
options.jobs = 1
if getattr(options, name, None):
path = self.sanitize_path(getattr(options, name), cwd)
setattr(options, name, path)
- return options, commands, envvars
-
- def init_module_vars(self, arg_options, arg_commands, arg_envvars):
- options.__dict__.clear()
- del commands[:]
- del envvars[:]
-
- options.__dict__.update(arg_options.__dict__)
- commands.extend(arg_commands)
- envvars.extend(arg_envvars)
-
- for var in envvars:
- (name, value) = var.split('=', 1)
- os.environ[name.strip()] = value
+ return options, commands
- def init_logs(self, options, commands, envvars):
+ def init_logs(self, options, commands):
Logs.verbose = options.verbose
if options.verbose >= 1:
self.load('errcheck')
def parse_args(self, _args=None):
"""
Parses arguments from a list which is not necessarily the command-line.
- Initializes the module variables options, commands and envvars
+ Initializes the module variables options and commands
If help is requested, prints it and exit the application
:param _args: arguments
:type _args: list of strings
"""
- options, commands, envvars = self.parse_cmd_args(_args)
- self.init_logs(options, commands, envvars)
- self.init_module_vars(options, commands, envvars)
+ arg_options, arg_commands = self.parse_cmd_args(_args)
+ self.init_logs(arg_options, commands)
+
+ options.__dict__.clear()
+ del commands[:]
+
+ options.__dict__.update(arg_options.__dict__)
+ commands.extend(arg_commands)
def execute(self):
"""
self.lst = []
def append(self, task):
heapq.heappush(self.lst, task)
- def appendleft(self, task):
- "Deprecated, do not use"
- heapq.heappush(self.lst, task)
def pop(self):
return heapq.heappop(self.lst)
def extend(self, lst):
while sem.waiting and not sem.is_locked():
# take a frozen task, make it ready to run
x = sem.waiting.pop()
- self._add_task(x)
+ self.add_task(x)
def get_out(self):
"""
return tsk
def add_task(self, tsk):
- """
- Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
-
- :param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.Task`
- """
- # TODO change in waf 2.1
- self.ready.put(tsk)
-
- def _add_task(self, tsk):
if hasattr(tsk, 'semaphore'):
sem = tsk.semaphore
try:
finally:
self.out.put(tsk)
else:
- self.add_task(tsk)
+ self.ready.put(tsk)
def process_task(self, tsk):
"""
st = self.task_status(tsk)
if st == Task.RUN_ME:
- self._add_task(tsk)
+ self.add_task(tsk)
elif st == Task.ASK_LATER:
self.postpone(tsk)
elif st == Task.SKIP_ME:
"Module called for configuring, compiling and installing targets"
-from __future__ import with_statement
-
import os, shlex, shutil, traceback, errno, sys, stat
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
sys.argv.pop(1)
ctx = Context.create_context('options')
- (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
- if options.top:
+ # allow --ver option in user scripts #2453
+ ctx.parser.allow_abbrev = False
+ (options, commands) = ctx.parse_cmd_args(allow_unknown=True)
+ if options.version:
+ print('%s %s (%s)'%(Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION))
+ sys.exit(0)
+ if getattr(options, 'top', None):
start_dir = Context.run_dir = Context.top_dir = options.top
no_climb = True
- if options.out:
+ if getattr(options, 'out', None):
Context.out_dir = options.out
# if 'configure' is in the commands, do not search any further
with tarfile.open(self.get_arch_name()) as t:
for x in t:
- t.extract(x)
+ if hasattr(tarfile, 'data_filter'):
+ t.extract(x, filter='data')
+ else:
+ t.extract(x)
instdir = tempfile.mkdtemp('.inst', self.get_base_name())
cmd = self.make_distcheck_cmd(instdir)
super(store_task_type, cls).__init__(name, bases, dict)
name = cls.__name__
- if name != 'evil' and name != 'Task':
+ if name != 'Task':
if getattr(cls, 'run_str', None):
# if a string is provided, convert it to a method
(f, dvars) = compile_fun(cls.run_str, cls.shell)
# be creative
getattr(cls, 'register', classes)[name] = cls
-evil = store_task_type('evil', (object,), {})
-"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
-
-class Task(evil):
+class Task(metaclass=store_task_type):
"""
Task objects represents actions to perform such as commands to execute by calling the `run` method.
cls.sig_explicit_deps = sig_explicit_deps
return cls
-TaskBase = Task
-"Provided for compatibility reasons, TaskBase should not be used"
-
class TaskSemaphore(object):
"""
Task semaphores provide a simple and efficient way of throttling the amount of
cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
if cls_str:
+ if isinstance(cls_str, str):
+ raise ValueError('cls_str should be a function %r' % self)
setattr(cls, '__str__', self.cls_str)
if cls_keyword:
+ if isinstance(cls_keyword, str):
+ raise ValueError('cls_keyword should be a function %r' % self)
setattr(cls, 'keyword', self.cls_keyword)
if deep_inputs:
C/C++/D configuration helpers
"""
-from __future__ import with_statement
-
import os, re, shlex
from waflib import Build, Utils, Task, Options, Logs, Errors, Runner
from waflib.TaskGen import after_method, feature
kw['compile_mode'] = 'cxx'
if not 'type' in kw:
- kw['type'] = 'cprogram'
+ kw['type'] = '%sprogram' % kw['compile_mode']
if not 'features' in kw:
if not 'header_name' in kw or kw.get('link_header_test', True):
if not 'msg' in kw:
self.fatal('missing "msg" in conf.check(...)')
+ if 'cflags' in kw and not 'c' in kw['features']:
+ self.fatal('Invalid cflags in non-c configuration test, specify conf.check(features=)')
+ if 'cxxflags' in kw and not 'cxx' in kw['features']:
+ self.fatal('Invalid cxxflags in non-cxx configuration test, specify conf.check(features=)')
+ if 'fcflags' in kw and not 'fc' in kw['features']:
+ self.fatal('Invalid fcflags in non-fc configuration test, specify conf.check(features=)')
+ if 'dflags' in kw and not 'd' in kw['features']:
+ self.fatal('Invalid dflags in non-d configuration test, specify conf.check(features=)')
+
@conf
def post_check(self, *k, **kw):
"""
if clang and out.find('__clang__') < 0:
conf.fatal('Not clang/clang++')
- if not clang and not icc and out.find('__clang__') >= 0:
+ if not clang and out.find('__clang__') >= 0:
conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
k = {}
# 20230100
ver = k['__INTEL_CLANG_COMPILER']
conf.env.CC_VERSION = (ver[:4], ver[4:6], ver[-2:])
- conf.env.INTEL_CLANG_COMPILER = 1
else:
ver = k['__INTEL_COMPILER']
conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
else:
conf.fatal('Could not determine the suncc version.')
-# ============ the --as-needed flag should added during the configuration, not at runtime =========
-
-@conf
-def add_as_needed(self):
- """
- Adds ``--as-needed`` to the *LINKFLAGS*
- On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag.
- """
- if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
- self.env.append_unique('LINKFLAGS', '-Wl,--as-needed')
-
# ============ parallel configuration
class cfgtask(Task.Task):
def check_msg(self):
return tmp[0]
- self.check(fragment=ENDIAN_FRAGMENT, features='c cshlib grep_for_endianness',
+ self.check(fragment=ENDIAN_FRAGMENT, features='c cstlib grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp,
okmsg=check_msg, confcache=None)
return tmp[0]
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
self.includes_nodes = lst
cwd = self.get_cwd()
- self.env.INCPATHS = [x.path_from(cwd) for x in lst]
+ if Utils.is_win32:
+ # Visual Studio limitations
+ self.env.INCPATHS = [x.path_from(cwd) if x.is_child_of(self.bld.srcnode) else x.abspath() for x in lst]
+ else:
+ self.env.INCPATHS = [x.path_from(cwd) for x in lst]
class link_task(Task.Task):
"""
class stlink_task(link_task):
"""
Base for static link tasks, which use *ar* most of the time.
- The target is always removed before being written.
"""
- run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+ run_str = [
+ lambda task: task.remove_before_build(),
+ '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+ ]
chmod = Utils.O644
"""Default installation mode for the static libraries"""
+ def remove_before_build(self):
+ "Remove the library before building it"
+ try:
+ os.remove(self.outputs[0].abspath())
+ except OSError:
+ pass
+
def rm_tgt(cls):
+ # TODO obsolete code, remove in waf 2.2
old = cls.run
def wrap(self):
try:
pass
return old(self)
setattr(cls, 'run', wrap)
-rm_tgt(stlink_task)
@feature('skip_stlib_link_deps')
@before_method('process_use')
$ waf configure --with-csc-binary=/foo/bar/mcs
"""
- opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
+ opt.add_option('--with-csc-binary', type=str, dest='cscbinary')
class fake_csshlib(Task.Task):
"""
# encoding: utf-8
# Thomas Nagy, 2008-2018 (ita)
-"Detect as/gas/gcc for compiling assembly files"
+"""Detect as/gas/gcc for compiling assembly files
+
+To force a specific compiler::
+
+ def configure(conf):
+ conf.find_program(['clang'], var='AS')
+ conf.load('gas')
+"""
import waflib.Tools.asm # - leave this
from waflib.Tools import ar
"""
Find the programs gas/as/gcc and set the variable *AS*
"""
- conf.find_program(['gas', 'gcc'], var='AS')
+ names = ['gas', 'gcc', 'clang']
+ if conf.env.COMPILER_CC == 'clang':
+ names = ['clang', 'gas', 'gcc']
+ conf.find_program(names, var='AS')
conf.env.AS_TGT_F = ['-c', '-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.find_ar()
option = opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
- inst_dir.add_option(option)
+ inst_dir.add_argument(k, dest=option.dest, help=option.help, default=option.default)
inst_dir.add_option('--exec-prefix',
help = 'installation prefix for binaries [PREFIX]',
for name, help, default in _options:
option_name = '--' + name
+
+ opt.parser.remove_option(option_name)
+
str_default = default
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
- dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
+ dirs_options.add_option(option_name, help=str_help, default='', dest=name)
Detects the Intel C compiler
"""
-from waflib import Utils
from waflib.Tools import ccroot, ar, gcc
from waflib.Configure import conf
-from waflib.Tools import msvc
@conf
def find_icc(conf):
"""
Finds the program icc and execute it to ensure it really is icc
"""
- if Utils.is_win32:
- conf.find_program(['icx-cl'], var='ICXCL', mandatory=False)
- if conf.env.ICXCL:
- conf.env.INTEL_CLANG_COMPILER = True
- conf.env.CC = conf.env.ICXCL
-
- if not conf.env.ICXCL:
- cc = conf.find_program(['icx', 'icc', 'ICL'], var='CC')
- conf.get_cc_version(cc, icc=True)
-
+ cc = conf.find_program(['icx', 'icc', 'ICL'], var='CC')
+ conf.get_cc_version(cc, icc=True)
conf.env.CC_NAME = 'icc'
def configure(conf):
conf.find_icc()
- if conf.env.ICXCL and Utils.is_win32:
- conf.find_msvc()
- conf.find_program('MT', var='MT')
- conf.env.MTFLAGS = ['/nologo']
- conf.env.MSVC_MANIFEST = True
-
- conf.msvc_common_flags()
-
- conf.env.CFLAGS = []
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
- conf.visual_studio_add_flags()
- conf.env.CC_TGT_F = ['/FC', '/c', '/Fo']
- conf.env.CPPPATH_ST = '/I%s'
- else:
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
Detects the Intel C++ compiler
"""
-from waflib import Utils
from waflib.Tools import ccroot, ar, gxx
from waflib.Configure import conf
-from waflib.Tools import msvc
@conf
def find_icpc(conf):
"""
Finds the program icpc, and execute it to ensure it really is icpc
"""
- if Utils.is_win32:
- conf.find_program(['icx-cl'], var='ICPXCL', mandatory=False)
- if conf.env.ICPXCL:
- conf.env.INTEL_CLANG_COMPILER = True
- conf.env.CXX = conf.env.ICPXCL
-
- if not conf.env.ICPXCL:
- cc = conf.find_program(['icpx', 'icpc', 'ICL'], var='CXX')
- conf.get_cc_version(cc, icc=True)
-
+ cxx = conf.find_program(['icpx', 'icpc'], var='CXX')
+ conf.get_cc_version(cxx, icc=True)
conf.env.CXX_NAME = 'icc'
def configure(conf):
conf.find_icpc()
- if conf.env.ICPXCL and Utils.is_win32:
- conf.find_msvc()
- conf.find_program('MT', var='MT')
- conf.env.MTFLAGS = ['/nologo']
- conf.env.MSVC_MANIFEST = True
-
- conf.msvc_common_flags()
-
- conf.env.CXXFLAGS = []
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
- conf.visual_studio_add_flags()
- conf.env.CXX_TGT_F = ['/c', '/Fo']
- conf.env.CPPPATH_ST = '/I%s'
- else:
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
conf.ifort_modifier_platform()
-all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+all_ifort_platforms = [('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86')]
"""List of icl platforms"""
@conf
except OSError:
pass
else:
- batch_file=os.path.join(path,'bin','ifortvars.bat')
+ batch_file = os.path.join(path, 'bin', 'ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+ else:
+ batch_file = os.path.join(path, 'env', 'vars.bat')
+ if os.path.isfile(batch_file):
+ targets[target] = target_compiler(conf, 'oneapi', arch, version, target, batch_file)
for target,arch in all_ifort_platforms:
try:
return dct
def _get_prog_names(self, compiler):
- if compiler=='intel':
+ if compiler == 'oneapi':
+ compiler_name = 'ifx'
+ linker_name = 'XILINK'
+ lib_name = 'XILIB'
+ elif compiler == 'intel':
compiler_name = 'ifort'
linker_name = 'XILINK'
lib_name = 'XILIB'
Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
"""
-from __future__ import with_statement
-
import os, re
from waflib import Context, Task, Utils, Logs
import waflib.Tools.ccroot
if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA:
self.java_use_rec(x)
- self.env.append_value('CLASSPATH', self.use_lst)
+ self.env.prepend_value('CLASSPATH', self.use_lst)
@feature('javac')
@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
for x in 'javac java jar javadoc'.split():
self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc')))
+ if not self.env.JAVA_HOME:
+ # needed for jni
+ if self.env.JAVAC and len(Utils.to_list(self.env.JAVAC)) == 1:
+ # heuristic to find the correct JAVA_HOME
+ javac_path = Utils.to_list(self.env.JAVAC)[0]
+ java_dir = os.path.dirname(os.path.dirname(os.path.realpath(javac_path)))
+ if os.path.exists(os.path.join(java_dir, 'lib')):
+ self.env.JAVA_HOME = [java_dir]
+
if 'CLASSPATH' in self.environ:
v.CLASSPATH = self.environ['CLASSPATH']
f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
libDirs = [x.parent.abspath() for x in f] or [javaHome]
- # On windows, we need both the .dll and .lib to link. On my JDK, they are
+ # On windows, we need both the .dll and .lib to link. On my JDK, they are
# in different directories...
f = dir.ant_glob('**/*jvm.(lib)')
if f:
or::
def configure(conf):
- conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
+ conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11']
conf.env.MSVC_TARGETS = ['x64']
conf.load('msvc')
To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option
or set ``conf.env.MSVC_LAZY_AUTODETECT=False``.
-Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm
+Supported platforms: ia64, x64, x86, arm64, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm, arm64_amd64, arm64_x86
Compilers supported:
* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
* icl => Intel compiler, versions 9, 10, 11, 13
-* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
-* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
-* PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894)
You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf.
'''.split()
"""importlibs provided by MSVC/Platform SDK. Do NOT search them"""
-all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'),
+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('arm64', 'arm64'),
('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'),
- ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ]
+ ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64'),
+ ('arm64_amd64', 'amd64'), ('arm64_x86', 'x86') ]
"""List of msvc platforms"""
-all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
-"""List of wince platforms"""
-
-all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
+all_icl_platforms = [('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86')]
"""List of icl platforms"""
def options(opt):
m = re.match(r'(^\d+\.\d+).*', vsver)
if m:
default_ver = 'msvc %s' % m.group(1)
- opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default=default_ver)
- opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
+ opt.add_option('--msvc_version', type=str, help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default=default_ver)
+ opt.add_option('--msvc_targets', type=str, help = 'msvc targets, eg: "x64,arm"', default='')
opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
class MSVCVersion(object):
"""
platforms = getattr(Options.options, 'msvc_targets', '').split(',')
if platforms == ['']:
- platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+ platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms + all_icl_platforms]
desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
if desired_versions == ['']:
desired_versions = conf.env.MSVC_VERSIONS or list(sorted(versiondict.keys(), key=MSVCVersion, reverse=True))
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-def gather_wince_supported_platforms():
- """
- Checks SmartPhones SDKs
-
- :param versions: list to modify
- :type versions: list
- """
- supported_wince_platforms = []
- try:
- ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
- try:
- ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
- ce_sdk = ''
- if not ce_sdk:
- return supported_wince_platforms
-
- index = 0
- while 1:
- try:
- sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
- sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
- except OSError:
- break
- index += 1
- try:
- path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
- except OSError:
- try:
- path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
- except OSError:
- continue
- path,xml = os.path.split(path)
- path = str(path)
- path,device = os.path.split(path)
- if not device:
- path,device = os.path.split(path)
- platforms = []
- for arch,compiler in all_wince_platforms:
- if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
- platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
- if platforms:
- supported_wince_platforms.append((device, platforms))
- return supported_wince_platforms
-
def gather_msvc_detected_versions():
#Detected MSVC versions!
version_pattern = re.compile(r'^(\d\d?\.\d\d?)(Exp)?$')
if targets:
versions['msvc %s' % version] = targets
-@conf
-def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
- #Looking for Win CE compilers!
- for device,platforms in supported_platforms:
- targets = {}
- for platform,compiler,include,lib in platforms:
- winCEpath = os.path.join(vc_path, 'ce')
- if not os.path.isdir(winCEpath):
- continue
-
- if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
- bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)]
- incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
- libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
- def combine_common(obj, compiler_env):
- # TODO this is likely broken, remove in waf 2.1
- (common_bindirs,_1,_2) = compiler_env
- return (bindirs + common_bindirs, incdirs, libdirs)
- targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
- if targets:
- versions[device + ' ' + version] = targets
-
-@conf
-def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
- #Looking for WinPhone compilers
- targets = {}
- for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars)
- if targets:
- versions['winphone ' + version] = targets
-
@conf
def gather_vswhere_versions(conf, versions):
try:
else:
vc_paths.append((version, os.path.abspath(str(path))))
- wince_supported_platforms = gather_wince_supported_platforms()
-
for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
- vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat')
- if wince_supported_platforms and os.path.isfile(vsvars):
- conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms)
-
- # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path.
- # Stop after one is found.
- for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
- vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat')
- if os.path.isfile(vsvars):
- conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars)
- break
-
- for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
conf.gather_msvc_targets(versions, version, vc_path)
@conf
self.link_task.outputs.append(man_node)
self.env.DO_MANIFEST = True
-def make_winapp(self, family):
- append = self.env.append_unique
- append('DEFINES', 'WINAPI_FAMILY=%s' % family)
- append('CXXFLAGS', ['/ZW', '/TP'])
- for lib_path in self.env.LIBPATH:
- append('CXXFLAGS','/AI%s'%lib_path)
-
-@feature('winphoneapp')
-@after_method('process_use')
-@after_method('propagate_uselib_vars')
-def make_winphone_app(self):
- """
- Insert configuration flags for windows phone applications (adds /ZW, /TP...)
- """
- make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
- self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
-
-@feature('winapp')
-@after_method('process_use')
-@after_method('propagate_uselib_vars')
-def make_windows_app(self):
- """
- Insert configuration flags for windows applications (adds /ZW, /TP...)
- """
- make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP')
"""
Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
"""
- opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
- opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
+ opt.add_option('--with-perl-binary', type=str, dest='perlbinary', help = 'Specify alternate perl binary', default=None)
+ opt.add_option('--with-perl-archdir', type=str, dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""
-DISTUTILS_IMP = """
-try:
- from distutils.sysconfig import get_config_var, get_python_lib
-except ImportError:
- from sysconfig import get_config_var, get_path
- def get_python_lib(*k, **kw):
- keyword='platlib' if kw.get('plat_specific') else 'purelib'
- if 'prefix' in kw:
- return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']})
- return get_path(keyword)
-""".splitlines()
-
@before_method('process_source')
@feature('py')
def feature_py(self):
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
- assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path())
+ assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.abspath())
self.install_from = getattr(self, 'install_from', None)
relative_trick = getattr(self, 'relative_trick', True)
if self.install_from:
assert isinstance(self.install_from, Node.Node), \
- 'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from))
+ 'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.abspath(), type(self.install_from))
# where to install the python file
if self.install_path:
self.uselib.append('PYEMBED')
@conf
-def get_python_variables(self, variables, imports=None):
+def get_sysconfig_variable(self, variable):
+ """
+ Spawn a new python process to dump configuration variables
+
+ :param variable: variable to print
+ :type variable: string
+ :return: the variable value
+ :rtype: string
+ """
+
+ env = dict(os.environ)
+ try:
+ del env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+ except KeyError:
+ pass
+
+ cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_config_var(%r))" % variable]
+ out = self.cmd_and_log(cmd, env=env).strip()
+
+ if out == "None":
+ return ""
+ else:
+ return out
+
+@conf
+def get_sysconfig_variables(self, variables):
"""
Spawn a new python process to dump configuration variables
:param variables: variables to print
:type variables: list of string
- :param imports: one import by element
- :type imports: list of string
:return: the variable values
:rtype: list of string
"""
- if not imports:
- try:
- imports = self.python_imports
- except AttributeError:
- imports = DISTUTILS_IMP
-
- program = list(imports) # copy
- program.append('')
- for v in variables:
- program.append("print(repr(%s))" % v)
- os_env = dict(os.environ)
+ return [self.get_sysconfig_variable(variable=v) for v in variables]
+
+@conf
+def get_sysconfig_path(self, name):
+ """
+ Spawn a new python process to dump configuration paths
+
+ :param name: path to print
+ :type variable: string
+ :return: the path value
+ :rtype: string
+ """
+
+ env = dict(os.environ)
try:
- del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+ del env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass
- try:
- out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
- except Errors.WafError:
- self.fatal('Could not run %r' % self.env.PYTHON)
- self.to_log(out)
- return_values = []
- for s in out.splitlines():
- s = s.strip()
- if not s:
- continue
- if s == 'None':
- return_values.append(None)
- elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
- return_values.append(eval(s))
- elif s[0].isdigit():
- return_values.append(int(s))
- else: break
- return return_values
+ if self.env.PREFIX:
+ # If project wide PREFIX is set, construct the install directory based on this
+ # Note: we could use sysconfig.get_preferred_scheme('user') but that is Python >= 3.10 only
+ pref_scheme = 'posix_user' # Default to *nix name
+ if Utils.unversioned_sys_platform() == 'darwin':
+ pref_scheme = 'osx_framework_user'
+ elif Utils.unversioned_sys_platform() == 'win32':
+ pref_scheme = 'nt_user'
+ cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_path(%r, %r, {'userbase': %r}))" % (name, pref_scheme, self.env.PREFIX)]
+ else:
+ cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_path(%r))" % name]
+ out = self.cmd_and_log(cmd, env=env).strip()
+
+ if out == "None":
+ return ""
+ else:
+ return out
@conf
def test_pyembed(self, mode, msg='Testing pyembed configuration'):
@conf
def check_python_headers(conf, features='pyembed pyext'):
"""
- Check for headers and libraries necessary to extend or embed python.
- It may use the module *distutils* or sysconfig in newer Python versions.
+ Check for headers and libraries necessary to extend or embed python by using the module *sysconfig*.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
* PYEXT: for compiling python extensions
# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
v = 'prefix SO EXT_SUFFIX LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
try:
- lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
+ lst = conf.get_sysconfig_variables(variables=v)
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
env.LIB_PYEXT = env.LIB_PYEMBED
- conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],))
+ conf.to_log("Include path for Python extensions (found via sysconfig module): %r\n" % (dct['INCLUDEPY'],))
env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env.CC_NAME == "msvc":
- try:
- from distutils.msvccompiler import MSVCCompiler
- except ImportError:
- # From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
- env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
- env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
- env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
- else:
- dist_compiler = MSVCCompiler()
- dist_compiler.initialize()
- env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+ # From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
+ env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/EHsc', '/DNDEBUG'])
+ env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/EHsc', '/DNDEBUG'])
+ env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
- conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter')
+ # See if it compiles
+ conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Broken python installation? Get python-config now!')
@conf
def check_python_version(conf, minver=None):
# Check environment for PYTHONDIR
pydir = conf.environ['PYTHONDIR']
else:
- # Finally, try to guess
- if Utils.is_win32:
- (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
- else:
- (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+ pydir = conf.get_sysconfig_path('purelib')
if 'PYTHONARCHDIR' in conf.env:
# Check if --pythonarchdir was specified
pyarchdir = conf.environ['PYTHONARCHDIR']
else:
# Finally, try to guess
- (pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
+ pyarchdir = conf.get_sysconfig_path('platlib')
+
if not pyarchdir:
pyarchdir = pydir
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Rafaël Kooi, 2023 (RA-Kooi)
+# vim: ts=4 noexpandtab
+# Thomas Nagy, 2006-2024 (ita)
+# Rafaël Kooi, 2023-2024 (RA-Kooi)
"""
This tool helps with finding Qt5 and Qt6 tools and libraries,
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
- It also implies that the include paths have beenset properly.
+ It also implies that the include paths have been set properly.
- to have the include paths added automatically, use the following::
The detection uses pkg-config on Linux by default. The list of
libraries to be requested to pkg-config is formulated by scanning
-in the QTLIBS directory (that can be passed via --qtlibs or by
-setting the environment variable QT5_LIBDIR or QT6_LIBDIR otherwise is
-derived by querying qmake for QT_INSTALL_LIBS directory) for
-shared/static libraries present.
-Alternatively the list of libraries to be requested via pkg-config
-can be set using the qt5_vars attribute, ie:
+in the 'mkspecs/modules' directory of the detected Qt installation
+for shared/static libraries present.
+Alternatively the list of libraries to be requested can be set using
+the qt5_vars attribute, ie:
conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test'];
This can speed up configuration phase if needed libraries are
known beforehand, can improve detection on systems with a
-sparse QT5/Qt6 libraries installation (ie. NIX) and can improve
-detection of some header-only Qt modules (ie. Qt5UiPlugin).
+sparse QT5/Qt6 libraries installation (ie. NIX). The libraries
+requested are required to be found unless they're also specified in
+the qt5_vars_opt attribute (qt6_vars_opt when using Qt6).
To force static library detection use:
-QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
+
+ QT5_FORCE_STATIC=1 waf configure
+
+This should not be needed if your Qt installation is built without
+shared libraries. If you still need this when no shared libraries
+are present, please file a bug report.
+
+To disable pkg-config use:
+
+ QT5_XCOMPILE=1 waf configure
To use Qt6 set the want_qt6 attribute, ie:
conf.want_qt6 = True;
"""
-from __future__ import with_statement
-
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
else:
has_xml = True
-import os, sys, re
+import os, re
from waflib.Tools import cxx
from waflib import Build, Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension, before_method
File extension for the user interface (.ui) files
"""
-EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
+EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C', '.c++']
"""
File extensions of C++ files that may require a .moc processing
"""
else:
self.qt_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
+ if self.want_qt6:
+ self.qt_vars_opt = Utils.to_list(getattr(self, 'qt6_vars_opt', []))
+ else:
+ self.qt_vars_opt = Utils.to_list(getattr(self, 'qt5_vars_opt', []))
+
+ qt_ver = '6' if self.want_qt6 else '5'
+
+ if len(self.qt_vars) > 0:
+ core = 'Qt%sCore' % qt_ver
+ if not core in self.qt_vars:
+ self.fatal('%s not found in qt%s_vars, Qt will not work without it.' % (core, qt_ver))
+
self.find_qt5_binaries()
+ self.set_qt_env()
self.set_qt5_libs_dir()
+ self.set_qt_makespecs_dir()
+ self.set_qt_makespec()
+ self.qt_check_static()
self.set_qt5_libs_to_check()
- self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()
feature = 'qt6' if self.want_qt6 else 'qt5'
- # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
- frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
- uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE'
-
# Qt6 requires C++17 (https://www.qt.io/blog/qt-6.0-released)
- flag_list = []
+ flags_candidates = []
if self.env.CXX_NAME == 'msvc':
stdflag = '/std:c++17' if self.want_qt6 else '/std:c++11'
- flag_list = [[], ['/Zc:__cplusplus', '/permissive-', stdflag]]
+ flags_candidates = [[], ['/Zc:__cplusplus', '/permissive-', stdflag]]
else:
+ # Qt5 and fallback: guess the flags
stdflag = '-std=c++17' if self.want_qt6 else '-std=c++11'
- flag_list = [[], '-fPIE', '-fPIC', stdflag, [stdflag, '-fPIE'], [stdflag, '-fPIC']]
- for flag in flag_list:
+ flags_candidates = [[], ['-fPIE'], ['-fPIC'], [stdflag], [stdflag, '-fPIE'], [stdflag, '-fPIC']]
+
+ # Qt6 has a new build option called 'FEATURE_no_direct_extern_access',
+ # which some distros might use. There's no need to do this on Windows
+ # as Windows doesn't have this issue by nature of dllexport and dllimport.
+ #
+ # Qt6 does not raise any build error when PIC and PIE are both
+ # used at the same time which is the default for some compilers
+ if self.want_qt6 and self.env.DEST_BINFMT == 'elf':
+ mkspecsdir = self.env.QTMKSPECSDIR
+
+ qt6_flags = []
+ qconfig_pri = os.path.join(mkspecsdir, 'qconfig.pri')
+
+ qt_config = {}
+ self.start_msg('Reading qconfig.pri')
+ try:
+ qt_config = self.read_pri(qconfig_pri)
+ self.end_msg('ok')
+ except OSError as e:
+ self.end_msg('unavailable (incomplete detection)', 'YELLOW')
+ self.to_log('File %r is unreadable %r' % (qconfig_pri, e))
+ else:
+ if 'no_direct_extern_access' in qt_config['QT_CONFIG']:
+ if self.env.CXX_NAME == 'gcc':
+ qt6_flags.append('-mno-direct-extern-access')
+ elif self.env.CXX_NAME == 'clang':
+ qt6_flags.append('-fno-direct-access-external-data')
+ self.to_log('Qt has been built with `no_direct_extern_access` enabled, this feature has only been tested with ld.bfd as linker.\nUse ld.gold/ld.mold/ld.lld at your own risk. If you do not know what linker you are using, you are most likely using ld.bfd.')
+
+ if 'reduce_relocations' in qt_config['QT_CONFIG']:
+ if self.env.CXX_NAME in ('gcc', 'clang'):
+ qt6_flags.append('-fPIC')
+
+ if qt6_flags:
+ # Try this configuration first
+ qt6_flags.append(stdflag)
+ flags_candidates.insert(0, qt6_flags)
+
+ frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+ uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE'
+
+ for flags in flags_candidates:
msg = 'See if Qt files compile '
- if flag:
- msg += 'with %s' % flag
+ if flags:
+ msg += 'with %r' % (' '.join(flags))
try:
- self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flag, fragment=frag, msg=msg)
+ self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flags, fragment=frag, msg=msg)
except self.errors.ConfigurationError:
pass
else:
env = self.env
qt_ver = '6' if self.want_qt6 else '5'
- qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT' + qt_ver + '_LIBDIR')
+ qtlibs = ""
+ try:
+ qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+ except Errors.WafError:
+ # CHECK (Rafaël Kooi): Is this still necessary?
+ qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
+ qtlibs = os.path.join(qtdir, 'lib')
- if not qtlibs:
- try:
- qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
- except Errors.WafError:
- qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
- qtlibs = os.path.join(qtdir, 'lib')
+ if not os.path.exists(qtlibs):
+ self.fatal('Unable to find Qt lib directory.')
- self.msg('Found the Qt' + qt_ver + ' library path', qtlibs)
+ self.msg('Checking for Qt' + qt_ver + ' library path', qtlibs)
env.QTLIBS = qtlibs
@conf
-def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
+def configure_single_qt_lib(self, name, uselib):
env = self.env
- qt_ver = '6' if self.want_qt6 else '5'
- if force_static:
- exts = ('.a', '.lib')
+ if self.qt_static:
prefix = 'STLIB'
else:
- exts = ('.so', '.lib')
prefix = 'LIB'
- def lib_names():
- for x in exts:
- for k in ('', qt_ver) if Utils.is_win32 else ['']:
- for p in ('lib', ''):
- yield (p, name, k, x)
-
- for tup in lib_names():
- k = ''.join(tup)
- path = os.path.join(qtlibs, k)
- if os.path.exists(path):
- if env.DEST_OS == 'win32':
- libval = ''.join(tup[:-1])
- else:
- libval = name
- env.append_unique(prefix + '_' + uselib, libval)
- env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt' + qt_ver, 'Qt')))
- return k
- return False
+ modules_dir = os.path.join(self.env.QTMKSPECSDIR, 'modules')
+
+ filename = os.path.join(modules_dir, 'qt_lib_%s.pri' % name)
+ if not os.path.exists(filename):
+ return False
+
+ this_module = self.read_pri(filename)
+
+ def parse_info(module):
+ deps = list(set(module['depends'])) # Some dependencies can be listed twice
+ includes = module['includes'] if 'includes' in module else []
+ defines = module['DEFINES'] if 'DEFINES' in module else []
+
+ if 'CONFIG' in module and 'no_link' in module['CONFIG']:
+ libs = []
+ else:
+ libs = module['module']
+
+ for dep in deps:
+ filename = os.path.join(modules_dir, 'qt_lib_%s.pri' % dep)
+ dep_mod = self.read_pri(filename)
+
+ dep_info = parse_info(dep_mod)
+ includes += dep_info['includes']
+ libs += dep_info['libs']
+ defines += dep_info['defines']
+
+ info = {}
+ info['includes'] = includes
+ info['libs'] = libs
+ info['defines'] = defines
+
+ return info
+
+ info = parse_info(this_module)
+ includes = [self.env.QTMKSPECPATH] + list(set(info['includes']))
+ libs = list(set(info['libs']))
+ defines = list(set(info['defines']))
+
+ env['HAVE_' + uselib] = 1
+
+ if len(libs) > 0:
+ env.append_unique(prefix + '_' + uselib, libs)
+
+ env.append_unique('INCLUDES_' + uselib, includes)
+ env.append_unique('%sPATH_%s' % (prefix, uselib), this_module['libs'][0])
+ env.append_unique('DEFINES_' + uselib, defines)
+ env.append_unique('DEFINES', 'HAVE_%s=1' % uselib)
+
+ return 'yes'
@conf
-def find_qt5_libraries(self):
+def qt_pkg_config_path(self):
env = self.env
qt_ver = '6' if self.want_qt6 else '5'
- qtincludes = self.environ.get('QT' + qt_ver + '_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
- force_static = self.environ.get('QT' + qt_ver + '_FORCE_STATIC')
+ path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % (
+ self.environ.get('PKG_CONFIG_PATH', ''),
+ env.QTLIBS,
+ env.QTLIBS,
+ qt_ver,
+ qt_ver,
+ qt_ver,
+ qt_ver
+ )
+
+ return path
+@conf
+def find_qt5_libraries(self):
+ env = self.env
+ qt_ver = '6' if self.want_qt6 else '5'
try:
if self.environ.get('QT' + qt_ver + '_XCOMPILE'):
self.fatal('QT' + qt_ver + '_XCOMPILE Disables pkg-config detection')
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
+ pass
+
+ qconfig_pri = os.path.join(self.env.QTMKSPECSDIR, 'qconfig.pri')
+ qt_config = self.read_pri(qconfig_pri)
+
+ if 'pkg-config' in qt_config['enabled_features'] and 'PKGCONFIG' in self.env:
+ self.qt_use_pkg_config = True
+ else:
+ self.qt_use_pkg_config = False
+
+ if not self.qt_use_pkg_config:
for i in self.qt_vars:
uselib = i.upper()
+
+ if not i in self.qt_var2mod:
+ self.msg('Checking for %s' % i, False)
+ continue
+
+ modname = self.qt_var2mod[i]
+
if Utils.unversioned_sys_platform() == 'darwin':
# Since at least qt 4.7.3 each library locates in separate directory
fwk = i.replace('Qt' + qt_ver, 'Qt')
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
- if not force_static and not ret:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
+ ret = self.configure_single_qt_lib(modname, uselib)
self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
- path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % (
- self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS, qt_ver, qt_ver, qt_ver, qt_ver)
+ path = self.qt_pkg_config_path()
for i in self.qt_vars:
- self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
+ self.check_cfg(
+ package=i,
+ args='--cflags --libs',
+ mandatory=False,
+ force_static=self.qt_static,
+ pkg_config_path=path)
@conf
def simplify_qt5_libs(self):
Since library paths make really long command-lines,
and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
"""
+ qt_ver = '6' if self.want_qt6 else '5'
env = self.env
- def process_lib(vars_, coreval):
+
+ def process(vars_, prefix, coreval):
for d in vars_:
var = d.upper()
- if var == 'QTCORE':
+ if var == 'QT%sCORE' % qt_ver:
continue
- value = env['LIBPATH_'+var]
+ value = env[prefix + var]
if value:
core = env[coreval]
accu = []
if lib in core:
continue
accu.append(lib)
- env['LIBPATH_'+var] = accu
- process_lib(self.qt_vars, 'LIBPATH_QTCORE')
+ env[prefix + var] = accu
+
+ pre = ''
+ if self.qt_static:
+ pre = 'ST'
+
+ process(self.qt_vars, pre + 'LIBPATH_', '%sLIBPATH_QT%sCORE' % (pre, qt_ver))
+ process(self.qt_vars, pre + 'LIB_', '%sLIB_QT%sCORE' % (pre, qt_ver))
+ process(self.qt_vars, 'DEFINES_', 'DEFINES_QT%sCORE' % qt_ver)
+ process(self.qt_vars, 'INCLUDES_', 'INCLUDES_QT%sCORE' % qt_ver)
@conf
def add_qt5_rpath(self):
"""
Defines rpath entries for Qt libraries
"""
+ qt_ver = '6' if self.want_qt6 else '5'
env = self.env
+
+ if self.qt_static:
+ return
+
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
core = env[coreval]
accu = []
for lib in value:
- if var != 'QTCORE':
+ if var != 'QT%sCORE' % qt_ver:
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
- process_rpath(self.qt_vars, 'LIBPATH_QTCORE')
+ process_rpath(self.qt_vars, 'LIBPATH_QT%sCORE' % qt_ver)
@conf
def set_qt5_libs_to_check(self):
qt_ver = '6' if self.want_qt6 else '5'
+ # We are forced to find all modules that are installed, because some modules
+ # will have a different name from their actual library. Like Qt5's qmltest
+ # module being the library Qt5QuickTest. Without doing this libraries like
+ # those will be unfindable. This is also the case for Qt6.
+
+ self.qt_var2mod = {}
+
+ populate = False
if not self.qt_vars:
- dirlst = Utils.listdir(self.env.QTLIBS)
+ populate = True
- pat = self.env.cxxshlib_PATTERN
- if Utils.is_win32:
- pat = pat.replace('.dll', '.lib')
- if self.environ.get('QT' + qt_ver + '_FORCE_STATIC'):
- pat = self.env.cxxstlib_PATTERN
- if Utils.unversioned_sys_platform() == 'darwin':
- pat = r"%s\.framework"
+ modules_dir = os.path.join(self.env.QTMKSPECSDIR, 'modules')
+ dirlst = Utils.listdir(modules_dir)
- if self.want_qt6:
- # match Qt6Name or QtName but not Qt5Name
- mid_pattern = pat % 'Qt6?(?P<name>[^5]\\w+)'
- else:
- # match Qt5Name or QtName but not Qt6Name
- mid_pattern = pat % 'Qt5?(?P<name>[^6]\\w+)'
- re_qt = re.compile('^%s$' % mid_pattern)
+ for x in sorted(dirlst):
+ if x.startswith('qt_lib_') and (x.endswith('_private.pri') or x.endswith('impl.pri')):
+ continue
+ if not x.startswith('qt_lib_'):
+ continue
+
+ module = self.read_pri(os.path.join(modules_dir, x))
+ var = module['module'][0]
+ mod = module['QT_MODULES'][0]
+
+ self.qt_var2mod[var] = mod
+
+ if populate:
+ self.qt_vars.append(var)
- for x in sorted(dirlst):
- m = re_qt.match(x)
- if m:
- self.qt_vars.append("Qt%s%s" % (qt_ver, m.group('name')))
- if not self.qt_vars:
- self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, self.env.QTLIBS))
+ if not self.qt_var2mod:
+ self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, modules_dir))
qtextralibs = getattr(Options.options, 'qtextralibs', None)
if qtextralibs:
self.qt_vars.extend(qtextralibs.split(','))
@conf
-def set_qt5_defines(self):
+def set_qt_env(self):
+ env = self.env
+ env.QTARCHDATA = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_ARCHDATA']).strip()
+ env.QTINCLUDES = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+ env.QTBINS = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip()
+
+@conf
+def qt_check_static(self):
qt_ver = '6' if self.want_qt6 else '5'
- if sys.platform != 'win32':
- return
+ qconfig_pri = os.path.join(self.env.QTMKSPECSDIR, 'qconfig.pri')
+ qt_config = self.read_pri(qconfig_pri)
+
+ static = False
+ if 'static' in qt_config['enabled_features']:
+ static = True
+
+ dynamic = False
+ if 'shared' in qt_config['enabled_features']:
+ dynamic = True
+
+ force_static = self.environ.get('QT%s_FORCE_STATIC' % qt_ver)
+
+ if force_static and self.static == False:
+ self.fatal('Qt libraries are forced static, but Qt has not been built statically.')
+
+ if force_static or (static and not dynamic):
+ self.qt_static = True
+ else:
+ self.qt_static = False
+
+@conf
+def set_qt_makespecs_dir(self):
+ ver = '6' if self.want_qt6 else '5'
+
+ if self.want_qt6 and 'PKGCONFIG' in self.env:
+ path = self.qt_pkg_config_path()
+
+ mkspecsdir = self.check_cfg(
+ package = 'Qt6Platform',
+ args = ['--variable', 'mkspecsdir'],
+ pkg_config_path = path,
+ quiet = True,
+ mandatory = False).strip()
+
+ found = mkspecsdir != ''
+ if found:
+ self.msg(
+ 'Checking for Qt%s mkspecs path' % ver,
+ mkspecsdir if found else False)
+
+ self.env.QTMKSPECSDIR = mkspecsdir
+ return
+
+ mkspecsdir = os.path.join(self.env.QTARCHDATA, 'mkspecs')
+
+ found = os.path.exists(mkspecsdir)
+ self.msg(
+ 'Checking for Qt%s mkspecs path' % ver,
+ mkspecsdir if found else 'not found',
+ color = 'GREEN' if found else 'YELLOW')
+
+ if not found:
+ self.fatal('Unable to find the Qt%s mkspecs directory' % ver)
+
+ self.env.QTMKSPECSDIR = mkspecsdir
- for x in self.qt_vars:
- y=x.replace('Qt' + qt_ver, 'Qt')[2:].upper()
- self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+@conf
+def set_qt_makespec(self):
+ # qmake makes no effort to detect what the actual mkspec is, by which I mean,
+ # it gives back the makespec Qt has been built with. We don't detect it and
+ # shouldn't either. Attempts at detecting the correct mkspec ended up being
+ # way too complex. The user will know what the correct mkspec is.
+ ver = '6' if self.want_qt6 else '5'
+
+ mkspec = getattr(Options.options, 'mkspec', None) or self.environ.get('QMAKESPEC')
+
+ if not mkspec:
+ try:
+ # We want XSPEC, as that is the spec used to build the target libraries.
+ mkspec = self.cmd_and_log(self.env.QMAKE + ['-query', 'QMAKE_XSPEC']).strip()
+ self.msg('Determining Qt%s makespec' % ver, mkspec)
+
+ mkspecdir = os.path.join(self.env.QTMKSPECSDIR, mkspec)
+
+ if not os.path.exists(mkspecdir):
+ mkspecdir = os.path.join(self.env.QTMKSPECSDIR, 'unsupported', mkspec)
+ except Errors.WafError:
+ self.fatal('Unable to determine Qt%s makespec' % ver)
+ else:
+ mkspecdir = os.path.join(self.env.QTMKSPECSDIR, mkspec)
+
+ if not os.path.exists(mkspecdir):
+ mkspecdir = os.path.join(self.env.QTMKSPECSDIR, 'unsupported', mkspec)
+ if not os.path.exists(mkspecdir):
+ self.fatal('Unable to determine Qt%s makespec' % ver)
+
+ self.msg('Determining Qt%s makespec' % ver, mkspec)
+
+ self.env.QTMKSPEC = mkspec
+ self.env.QTMKSPECPATH = mkspecdir
+
+@conf
+def read_pri(self, path):
+ """
+ Read information from a .pri file as a dict.
+
+ :param path: Path to the pri file
+ :type path: str
+ """
+ ver = '6' if self.want_qt6 else '5'
+
+ # Lines have a format of QT.$lib_name.$key = $value, or $key = $value
+ # = can also be += or -=. This regex grabs the $key and $value components.
+ keyval_re = re.compile(r'^(QT\.\w+\.){0,1}(?P<key>\w+) *\+{0,1}= *(?P<val>.+)?')
+
+ result = {}
+ with open(path, 'r') as f:
+ for line in f:
+ if line.strip() == '':
+ continue
+
+ matches = keyval_re.match(line)
+ if len(matches.groups()) > 0:
+ match = matches.groupdict()
+ values = Utils.to_list(match['val'])
+
+ def replace(value):
+ value = value.replace('$$QT_MODULE_LIB_BASE', self.env.QTLIBS)
+ value = value.replace('$$QT_MODULE_INCLUDE_BASE', self.env.QTINCLUDES)
+
+ # CHECK (Rafaël Kooi): Should this be QT_INSTALL_LIBEXEC instead?
+ value = value.replace('$$QT_MODULE_BIN_BASE', self.env.QTBINS)
+
+ return value
+
+ if values != None:
+ result[match['key']] = list(map(replace, values))
+ else:
+ result[match['key']] = list()
+
+ if 'module' in result and len(result['module']) == 0:
+ result['module'] = ['Qt' + ver + result['name'][0][2:]]
+
+ # Qt5 does not specify CONFIG in (some of?) its modules.
+ if not 'CONFIG' in result:
+ result['CONFIG'] = ['no_link']
+
+ return result
def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
- for i in 'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i, type='string', default='', dest=i)
+
+ opt.add_option('--qtdir', type=str, default='', dest='qtdir', help='path to the root of the qt installation; to aid finding qmake')
+ opt.add_option('--qtbin', type=str, default='', dest='qtbin', help='path to the bin folder of the qt installation; to aid finding qmake')
opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
- opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
+ opt.add_option('--qtextralibs', type=str, default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
+ opt.add_option('--makespec', type=str, default=None, dest='mkspec', help='override the qt makespec')
"""
Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
"""
- opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
- opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
- opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
+ opt.add_option('--with-ruby-archdir', type=str, dest='rubyarchdir', help='Specify directory where to install arch specific files')
+ opt.add_option('--with-ruby-libdir', type=str, dest='rubylibdir', help='Specify alternate ruby library path')
+ opt.add_option('--with-ruby-binary', type=str, dest='rubybinary', help='Specify alternate ruby binary')
Logs.debug('tex: found the following bibunit files: %s', nodes)
return nodes
+known_tex_env_vars = ['TEXINPUTS', 'TEXFONTS', 'PKFONTS', 'TEXPKS', 'GFFONTS']
+"""Tex environment variables that are should cause rebuilds when the values change"""
+
exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
"""List of typical file extensions included in latex files"""
"""
Compiles a tex/latex file.
+ A series of applications need to be run by setting certain environmental variables;
+ these variables are repeatedly regenerated during processing (self.env.env).
+
.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
:top-classes: waflib.Tools.tex.tex
"""
Execute the program **makeglossaries**
"""
+ def make_os_env_again(self):
+ if self.generator.env.env:
+ self.env.env = dict(self.generator.env.env)
+ else:
+ self.env.env = dict(os.environ)
+
def exec_command(self, cmd, **kw):
"""
Executes TeX commands without buffering (latex may prompt for inputs)
if g_bibtex_re.findall(ct):
self.info('calling bibtex')
- self.env.env = {}
- self.env.env.update(os.environ)
+ self.make_os_env_again()
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = aux_node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())
for node in getattr(self, 'multibibs', []):
- self.env.env = {}
- self.env.env.update(os.environ)
+ self.make_os_env_again()
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = node.name[:-4]
self.check_status('error when calling bibtex', self.bibtex_fun())
self.info('calling makeindex')
self.env.SRCFILE = self.idx_node.name
- self.env.env = {}
+ self.make_os_env_again()
self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
def bibtopic(self):
"""
Runs the TeX compiler once
"""
- self.env.env = {}
- self.env.env.update(os.environ)
+ self.make_os_env_again()
self.env.env.update({'TEXINPUTS': self.texinputs()})
self.env.SRCFILE = self.inputs[0].abspath()
self.check_status('error when calling latex', self.texfun())
class latex(tex):
"Compiles LaTeX files"
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+ vars.append('TEXDEPS')
class pdflatex(tex):
"Compiles PdfLaTeX files"
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+ vars.append('TEXDEPS')
class xelatex(tex):
"XeLaTeX files"
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+ vars.append('TEXDEPS')
class dvips(Task.Task):
"Converts dvi files to postscript"
elif self.type == 'xelatex':
task = self.create_task('xelatex', node, node.change_ext('.pdf'))
- task.env = self.env
+ # rebuild when particular environment variables changes are detected
+ task.make_os_env_again()
+ task.env.TEXDEPS = Utils.h_list([task.env.env.get(x, '') for x in known_tex_env_vars])
# add the manual dependencies
if deps_lst:
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
self.source = []
+
def configure(self):
"""
Find the programs tex, latex and others without raising errors.
except AttributeError:
self.bld.utest_results = [tup]
+class test_result(object):
+ def __init__(self, test_path, exit_code, out, err, task):
+ self.task = task
+ self.generator = task.generator
+ self.out = out
+ self.err = err
+ self.exit_code = exit_code
+ self.test_path = test_path
+
+ def __iter__(self):
+ yield self.test_path
+ yield self.exit_code
+ yield self.out
+ yield self.err
+
+ def __getitem__(self, idx):
+ return list(self)[idx]
+
@Task.deep_inputs
class utest(Task.Task):
"""
proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
(stdout, stderr) = proc.communicate()
- self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
+ self.waf_unit_test_results = tup = test_result(self.inputs[0].abspath(), proc.returncode, stdout, stderr, self)
testlock.acquire()
try:
return self.generator.add_test_results(tup)
tfail = len([x for x in lst if x[1]])
Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
- for (f, code, out, err) in lst:
- if not code:
- Logs.pprint('GREEN', ' %s' % f)
+ for result in lst:
+ if not result.exit_code:
+ Logs.pprint('GREEN', ' %s' % result.test_path)
Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
- for (f, code, out, err) in lst:
- if code:
- Logs.pprint('RED', ' %s' % f)
+ for result in lst:
+ if result.exit_code:
+ Logs.pprint('RED', ' %s' % result.test_path)
def set_exit_code(bld):
"""
bld.add_post_fun(waf_unit_test.set_exit_code)
"""
lst = getattr(bld, 'utest_results', [])
- for (f, code, out, err) in lst:
- if code:
+ for result in lst:
+ if result.exit_code:
msg = []
- if out:
- msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
- if err:
- msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
+ if result.out:
+ msg.append('stdout:%s%s' % (os.linesep, result.out.decode('utf-8')))
+ if result.err:
+ msg.append('stderr:%s%s' % (os.linesep, result.err.decode('utf-8')))
bld.fatal(os.linesep.join(msg))
opt.add_option('--clear-failed', action='store_true', default=False,
help='Force failed unit tests to run again next time', dest='clear_failed_tests')
opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
- help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
+ help='Run the unit tests using the test-cmd string. For example --testcmd="valgrind --error-exitcode=1 %%s" to run under valgrind')
opt.add_option('--dump-test-scripts', action='store_true', default=False,
help='Create python scripts to help debug tests', dest='dump_test_scripts')
Cache access operations (copy to and from) are delegated to
independent pre-forked worker subprocesses.
-The following environment variables may be set:
-* WAFCACHE: several possibilities:
- - File cache:
- absolute path of the waf cache (~/.cache/wafcache_user,
- where `user` represents the currently logged-in user)
- - URL to a cache server, for example:
- export WAFCACHE=http://localhost:8080/files/
- in that case, GET/POST requests are made to urls of the form
- http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
- - GCS, S3 or MINIO bucket
- gs://my-bucket/ (uses gsutil command line tool or WAFCACHE_CMD)
- s3://my-bucket/ (uses aws command line tool or WAFCACHE_CMD)
- minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
-* WAFCACHE_CMD: bucket upload/download command, for example:
+Usage::
+
+ def build(bld):
+ bld.load('wafcache')
+ ...
+
+To troubleshoot::
+
+ waf clean build --zone=wafcache
+
+General parameters
+^^^^^^^^^^^^^^^^^^
+
+Use the following environment variables
+
+* WAFCACHE, which can be
+
+ - An absolute path to the waf cache folder, by default::
+
+ ~/.cache/wafcache_user # `user` represents the currently logged-in user
+
+ - A URL to a cache server, for example::
+
+ export WAFCACHE=http://localhost:8080/files/
+ #in that case, `GET/POST` requests are made to urls of the form
+ #`http://localhost:8080/files/000000000/0` (cache management is delegated to the server)
+
+ - A GCS, S3 or MINIO bucket::
+
+ gs://my-bucket/ # (uses gsutil command line tool or WAFCACHE_CMD)
+ s3://my-bucket/ # (uses aws command line tool or WAFCACHE_CMD)
+ minio://my-bucket/ # (uses mc command line tool or WAFCACHE_CMD)
+
+* WAFCACHE_CMD: bucket upload/download command, for example::
+
WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
- Note that the WAFCACHE bucket value is used for the source or destination
- depending on the operation (upload or download). For example, with:
- WAFCACHE="gs://mybucket/"
- the following commands may be run:
- gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1
- gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
+
* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
* WAFCACHE_STATS: if set, displays cache usage statistics on exit
-File cache specific options:
- Files are copied using hard links by default; if the cache is located
- onto another partition, the system switches to file copies instead.
+Remote buckets
+^^^^^^^^^^^^^^
+
+The WAFCACHE bucket value is used for the source or destination
+depending on the operation (upload or download). For example, with::
+
+ WAFCACHE="gs://mybucket/"
+
+the following commands may be run::
+
+ gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1
+ gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
+
+File cache
+^^^^^^^^^^
+
+Files are copied using hard links by default; if the cache is located
+onto another partition, the system switches to file copies instead.
+
+Additional environments can be set:
+
* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
-* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
- and trim the cache (3 minutes)
+* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try and trim the cache (3 minutes)
+
+Asynchronous transfers
+^^^^^^^^^^^^^^^^^^^^^^
-Upload specific options:
* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
this may improve build performance with many/long file uploads
the default is unset (synchronous uploads)
* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
this requires asynchonous uploads to have an effect
-
-Usage::
-
- def build(bld):
- bld.load('wafcache')
- ...
-
-To troubleshoot::
-
- waf clean build --zone=wafcache
"""
-import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex
+import atexit, base64, errno, getpass, os, re, shutil, sys, time, threading, traceback, shlex
try:
import subprocess32 as subprocess
except ImportError:
if e.errno == errno.ENOENT:
with open(lockfile, 'w') as f:
f.write('')
- return
else:
+ # any other errors such as permissions
raise
if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60:
# check every EVICT_INTERVAL_MINUTES minutes if the cache is too big
- # OCLOEXEC is unnecessary because no processes are spawned
+ # OCLOEXEC is unnecessary because no cleaning processes are spawned
fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755)
try:
try:
- fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except EnvironmentError:
- if WAFCACHE_VERBOSITY:
- sys.stderr.write('wafcache: another cleaning process is running\n')
+ import fcntl
+ except ImportError:
+ import msvcrt, ctypes, ctypes.wintypes
+ handle = msvcrt.get_osfhandle(fd)
+
+ kernel32 = ctypes.windll('kernel32', use_last_error=True)
+ DWORD = ctypes.wintypes.DWORD
+ HANDLE = ctypes.wintypes.HANDLE
+ class DUMMYSTRUCTNAME(ctypes.Structure):
+ _fields = [('Offset', ctypes.wintypes.DWORD), ('OffsetHigh', DWORD)]
+ class DUMMYUNIONNAME(ctypes.Union):
+ _fields_ = [('_dummystructname', DUMMYSTRUCTNAME), ('Pointer', ctypes.c_void_p)]
+ class OVERLAPPED(ctypes.Structure):
+ _fields_ = [('Internal', ctypes.c_void_p), ('InternalHigh', ctypes.c_void_p), ('_dummyunionname', DUMMYUNIONNAME), ('hEvent', HANDLE)]
+
+ LockFileEx = kernel32.LockFileEx
+ LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, POINTER(OVERLAPPED)]
+ LockFileEx.restype = BOOL
+
+ UnlockFileEx = kernel32.UnlockFileEx
+ UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, POINTER(OVERLAPPED)]
+ UnlockFileEx.restype = BOOL
+
+ if LockFileEx(handle, 3, 0, 1, 0, ctypes.pointer(OVERLAPPED())):
+ try:
+ lru_trim()
+ os.utime(lockfile, None)
+ finally:
+ win32file.UnlockFileEx(handle, 0, 1, 0, ctypes.pointer(OVERLAPPED()))
+ else:
+ last_error = kernel32.GetLastError()
+ if last_error == 33:
+ if WAFCACHE_VERBOSITY:
+ sys.stderr.write('wafcache: another cleaning process is running\n')
+ else:
+ raise OSError(last_error)
+
else:
- # now dow the actual cleanup
- lru_trim()
- os.utime(lockfile, None)
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except EnvironmentError:
+ if WAFCACHE_VERBOSITY:
+ sys.stderr.write('wafcache: another cleaning process is running\n')
+ else:
+ # now dow the actual cleanup
+ lru_trim()
+ os.utime(lockfile, None)
finally:
os.close(fd)
class netcache(object):
def __init__(self):
+ import urllib3
self.http = urllib3.PoolManager()
def url_of(self, sig, i):
import os
import re
-from waflib import Task
+from waflib import Task, Utils
from waflib.TaskGen import extension
from waflib.Tools import c_preproc
from waflib import Utils
v = conf.env
if not v.WINRC:
if v.CC_NAME == 'msvc':
- conf.find_program('RC', var='WINRC', path_list=v.PATH)
+ if Utils.is_win32:
+ conf.find_program('RC', var='WINRC', path_list=v.PATH)
+ else:
+ llvm_env_path = conf.environ.get('LLVM_PATH')
+ llvm_path = None
+ if llvm_env_path:
+ llvm_path = llvm_env_path
+ elif 'LLVM_PATH' in v:
+ llvm_path = v['LLVM_PATH']
+
+ paths = v.PATH
+ if llvm_path:
+ paths = [llvm_path] + v.PATH
+ conf.find_program('llvm-rc', var='WINRC', path_list=paths)
+
v.WINRC_TGT_F = '/fo'
v.WINRC_SRC_F = ''
else:
through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc)
"""
-from __future__ import with_statement
-
import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time, shlex
try:
except IndexError:
filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py'
cmd = [sys.executable, '-c', readf(filepath)]
- return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, close_fds=not is_win32)
+ return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
def run_prefork_process(cmd, kwargs, cargs):
"""
"""
if not kwargs.get('env'):
kwargs['env'] = dict(os.environ)
+
+ if hasattr(subprocess, 'DEVNULL') and 'stdin' not in kwargs:
+ # prevent accidental communication errors
+ kwargs['stdin'] = subprocess.DEVNULL
+
try:
obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))
except (TypeError, AttributeError):
if not proc:
return run_regular_process(cmd, kwargs, cargs)
- proc.stdin.write(obj)
- proc.stdin.write('\n'.encode())
+ try:
+ proc.stdin.write(obj)
+ proc.stdin.write('\n'.encode())
+ except OSError:
+ proc.poll()
+ raise OSError('Preforked sub-process:%r is not receiving, status: %r' % (proc.pid, proc.returncode))
proc.stdin.flush()
obj = proc.stdout.readline()
if not obj:
- raise OSError('Preforked sub-process %r died' % proc.pid)
+ proc.poll()
+ raise OSError('Preforked sub-process:%r is not responding, status: %r' % (proc.pid, proc.returncode))
process_pool.append(proc)
lst = cPickle.loads(base64.b64decode(obj))
or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process`
and :py:func:`waflib.Utils.run_regular_process`
"""
- if kwargs.get('stdout') and kwargs.get('stderr'):
+ if kwargs.get('stdout') and kwargs.get('stderr') and 'stdin' not in kwargs:
return run_prefork_process(cmd, kwargs, cargs)
else:
return run_regular_process(cmd, kwargs, cargs)
def atexit_pool():
for k in process_pool:
try:
- os.kill(k.pid, 9)
+ k.kill()
except OSError:
pass
- else:
- k.wait()
+
# see #1889
if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f:
atexit.register(atexit_pool)
-if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable:
+if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable or not hasattr(subprocess, 'DEVNULL'):
run_process = run_regular_process
get_process = alloc_process_pool = nada
-
wlock = Utils.threading.Lock()
try:
- from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
+ from ctypes import Structure, WinDLL, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
except ImportError:
class AnsiTerm(object):
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
- windll.kernel32.GetStdHandle.argtypes = [c_ulong]
- windll.kernel32.GetStdHandle.restype = c_ulong
- windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
- windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long
- windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
- windll.kernel32.SetConsoleTextAttribute.restype = c_long
- windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
- windll.kernel32.FillConsoleOutputCharacterW.restype = c_long
- windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
- windll.kernel32.FillConsoleOutputAttribute.restype = c_long
- windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
- windll.kernel32.SetConsoleCursorPosition.restype = c_long
- windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
- windll.kernel32.SetConsoleCursorInfo.restype = c_long
+ kernel32 = WinDLL('kernel32')
+ kernel32.GetStdHandle.argtypes = [c_ulong]
+ kernel32.GetStdHandle.restype = c_ulong
+ kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ kernel32.GetConsoleScreenBufferInfo.restype = c_long
+ kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
+ kernel32.SetConsoleTextAttribute.restype = c_long
+ kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
+ kernel32.FillConsoleOutputCharacterW.restype = c_long
+ kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
+ kernel32.FillConsoleOutputAttribute.restype = c_long
+ kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
+ kernel32.SetConsoleCursorPosition.restype = c_long
+ kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
+ kernel32.SetConsoleCursorInfo.restype = c_long
class AnsiTerm(object):
"""
self.cursor_history = []
handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
- self.hconsole = windll.kernel32.GetStdHandle(handle)
+ self.hconsole = kernel32.GetStdHandle(handle)
self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
self._csinfo = CONSOLE_CURSOR_INFO()
- windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+ kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
# just to double check that the console is usable
self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
- r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
+ r = kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
self._isatty = r == 1
def screen_buffer_info(self):
"""
Updates self._sbinfo and returns it
"""
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
+ kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
return self._sbinfo
def clear_line(self, param):
line_start = sbinfo.CursorPosition
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
chars_written = c_ulong()
- windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
+ kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
+ kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
def clear_screen(self, param):
mode = to_int(param, 0)
elif mode == 2: # Clear entire screen and return cursor to home
clear_start = COORD(0, 0)
clear_length = sbinfo.Size.X * sbinfo.Size.Y
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
+ kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
else: # Clear from cursor position to end of screen
clear_start = sbinfo.CursorPosition
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
chars_written = c_ulong()
- windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
+ kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
+ kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
def push_cursor(self, param):
sbinfo = self.screen_buffer_info()
def pop_cursor(self, param):
if self.cursor_history:
old_pos = self.cursor_history.pop()
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
+ kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
def set_cursor(self, param):
y, sep, x = param.partition(';')
min(max(0, x), sbinfo.Size.X),
min(max(0, y), sbinfo.Size.Y)
)
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+ kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def set_column(self, param):
x = to_int(param, 1) - 1
min(max(0, x), sbinfo.Size.X),
sbinfo.CursorPosition.Y
)
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+ kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def move_cursor(self, x_offset=0, y_offset=0):
sbinfo = self.screen_buffer_info()
min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
)
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+ kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def move_up(self, param):
self.move_cursor(y_offset = -to_int(param, 1))
elif c == 7: # negative
attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
- windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
+ kernel32.SetConsoleTextAttribute(self.hconsole, attr)
def show_cursor(self,param):
self._csinfo.bVisible = 1
- windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+ kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
def hide_cursor(self,param):
self._csinfo.bVisible = 0
- windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+ kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
ansi_command_table = {
'A': move_up,
def writeconsole(self, txt):
chars_written = c_ulong()
- writeconsole = windll.kernel32.WriteConsoleA
+ writeconsole = kernel32.WriteConsoleA
if isinstance(txt, _type):
- writeconsole = windll.kernel32.WriteConsoleW
+ writeconsole = kernel32.WriteConsoleW
# MSDN says that there is a shared buffer of 64 KB for the console
# writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746
if sys.stdout.isatty() or sys.stderr.isatty():
handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
- console = windll.kernel32.GetStdHandle(handle)
+ console = kernel32.GetStdHandle(handle)
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
def get_term_cols():
- windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
+ kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
# Issue 1401 - the progress bar cannot reach the last character
return sbinfo.Size.X - 1
+++ /dev/null
-#! /usr/bin/env python
-# encoding: utf-8
-# Yannick LM 2011
-
-"""
-Support for the boo programming language, for example::
-
- bld(features = "boo", # necessary feature
- source = "src.boo", # list of boo files
- gen = "world.dll", # target
- type = "library", # library/exe ("-target:xyz" flag)
- name = "world" # necessary if the target is referenced by 'use'
- )
-"""
-
-from waflib import Task
-from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method, before_method, extension
-
-@extension('.boo')
-def boo_hook(self, node):
- # Nothing here yet ...
- # TODO filter the non-boo source files in 'apply_booc' and remove this method
- pass
-
-@feature('boo')
-@before_method('process_source')
-def apply_booc(self):
- """Create a booc task """
- src_nodes = self.to_nodes(self.source)
- out_node = self.path.find_or_declare(self.gen)
-
- self.boo_task = self.create_task('booc', src_nodes, [out_node])
-
- # Set variables used by the 'booc' task
- self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
-
- # type is "exe" by default
- type = getattr(self, "type", "exe")
- self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
-
-@feature('boo')
-@after_method('apply_boo')
-def use_boo(self):
- """"
- boo applications honor the **use** keyword::
- """
- dep_names = self.to_list(getattr(self, 'use', []))
- for dep_name in dep_names:
- dep_task_gen = self.bld.get_tgen_by_name(dep_name)
- if not dep_task_gen:
- continue
- dep_task_gen.post()
- dep_task = getattr(dep_task_gen, 'boo_task', None)
- if not dep_task:
- # Try a cs task:
- dep_task = getattr(dep_task_gen, 'cs_task', None)
- if not dep_task:
- # Try a link task:
- dep_task = getattr(dep_task, 'link_task', None)
- if not dep_task:
- # Abort ...
- continue
- self.boo_task.set_run_after(dep_task) # order
- self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
- self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
-
-class booc(Task.Task):
- """Compiles .boo files """
- color = 'YELLOW'
- run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
-
-@conf
-def check_booc(self):
- self.find_program('booc', 'BOOC')
- self.env.BOO_FLAGS = ['-nologo']
-
-def configure(self):
- """Check that booc is available """
- self.check_booc()
-
def options(opt):
opt = opt.add_option_group('Boost Options')
- opt.add_option('--boost-includes', type='string',
+ opt.add_option('--boost-includes', type=str,
default='', dest='boost_includes',
help='''path to the directory where the boost includes are,
e.g., /path/to/boost_1_55_0/stage/include''')
- opt.add_option('--boost-libs', type='string',
+ opt.add_option('--boost-libs', type=str,
default='', dest='boost_libs',
help='''path to the directory where the boost libs are,
e.g., path/to/boost_1_55_0/stage/lib''')
opt.add_option('--boost-mt', action='store_true',
default=False, dest='boost_mt',
help='select multi-threaded libraries')
- opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ opt.add_option('--boost-abi', type=str, default='', dest='boost_abi',
help='''select libraries with tags (gd for debug, static is automatically added),
see doc Boost, Getting Started, chapter 6.1''')
opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
- opt.add_option('--boost-toolset', type='string',
+ opt.add_option('--boost-toolset', type=str,
default='', dest='boost_toolset',
help='force a toolset e.g. msvc, vc90, \
gcc, mingw, mgw45 (default: auto)')
py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
- opt.add_option('--boost-python', type='string',
+ opt.add_option('--boost-python', type=str,
default=py_version, dest='boost_python',
help='select the lib python with this version \
(default: %s)' % py_version)
# Thomas Nagy, 2015
"""
-Force files to depend on the timestamps of those located in the build directory. You may
+Force files to also depend on the timestamps of those located in the build directory. You may
want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
-or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
-or to hash the file in the build directory with its timestamp
+or one may want to hash theA files in the source directory only under certain conditions (md5_tstamp tool)
"""
import os
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
return Utils.h_file(self.abspath())
- try:
- # add the creation time to the signature
- return self.sig + str(os.stat(self.abspath()).st_mtime)
- except AttributeError:
- return None
+ val = Utils.h_file(self.abspath()) + str(os.stat(self.abspath()).st_mtime).encode('latin-1')
+ return val
Node.Node.get_bld_sig = get_bld_sig
ctx.load('build_logs')
"""
-import atexit, sys, time, os, shutil, threading
+import atexit, datetime, sys, os, shutil, threading
from waflib import ansiterm, Logs, Context
# adding the logs under the build/ directory will clash with the clean/ command
up = os.path.dirname(Context.g_module.__file__)
except AttributeError:
up = '.'
-LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
+LOGFILE = os.path.join(up, 'logs', datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S_%f.log'))
wlock = threading.Lock()
class log_to_file(object):
#!/usr/bin/env python
# -*- coding: utf-8 vi:ts=4:noexpandtab
-import subprocess, shlex, sys
+import shlex
+from waflib import Errors
from waflib.Tools import ccroot, gcc, gxx
from waflib.Configure import conf
-from waflib.TaskGen import after_method, feature
from waflib.Tools.compiler_c import c_compiler
from waflib.Tools.compiler_cxx import cxx_compiler
"""
Emscripten doesn't support processing '-' like clang/gcc
"""
-
dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
dummy.write("")
cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
env = conf.env.env or None
try:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
- out = p.communicate()[0]
- except Exception as e:
- conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
-
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'latin-1')
+ out, err = conf.cmd_and_log(cmd, output=0, env=env)
+ except Errors.WafError as e:
+ conf.fatal('Could not determine the emscripten version %r: %s' % (cmd, e))
k = {}
out = out.splitlines()
"""
import re
-from waflib import Utils
+from waflib import Errors, Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
def get_sxc_version(conf, fc):
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
- p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
- out, err = p.communicate()
+
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not determine an sxcc version %r' % cmd)
if out:
match = version_re(out)
+++ /dev/null
-#!/usr/bin/env python\r
-# encoding: utf-8\r
-# Anton Feldmann, 2012\r
-# "Base for cabal"\r
-\r
-from waflib import Task, Utils\r
-from waflib.TaskGen import extension\r
-from waflib.Utils import threading\r
-from shutil import rmtree\r
-\r
-lock = threading.Lock()\r
-registering = False\r
-\r
-def configure(self):\r
- self.find_program('cabal', var='CABAL')\r
- self.find_program('ghc-pkg', var='GHCPKG')\r
- pkgconfd = self.bldnode.abspath() + '/package.conf.d'\r
- self.env.PREFIX = self.bldnode.abspath() + '/dist'\r
- self.env.PKGCONFD = pkgconfd\r
- if self.root.find_node(pkgconfd + '/package.cache'):\r
- self.msg('Using existing package database', pkgconfd, color='CYAN')\r
- else:\r
- pkgdir = self.root.find_dir(pkgconfd)\r
- if pkgdir:\r
- self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')\r
- rmtree(pkgdir.abspath())\r
- pkgdir = None\r
-\r
- self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])\r
- self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')\r
-\r
-@extension('.cabal')\r
-def process_cabal(self, node):\r
- out_dir_node = self.bld.root.find_dir(self.bld.out_dir)\r
- package_node = node.change_ext('.package')\r
- package_node = out_dir_node.find_or_declare(package_node.name)\r
- build_node = node.parent.get_bld()\r
- build_path = build_node.abspath()\r
- config_node = build_node.find_or_declare('setup-config')\r
- inplace_node = build_node.find_or_declare('package.conf.inplace')\r
-\r
- config_task = self.create_task('cabal_configure', node)\r
- config_task.cwd = node.parent.abspath()\r
- config_task.depends_on = getattr(self, 'depends_on', '')\r
- config_task.build_path = build_path\r
- config_task.set_outputs(config_node)\r
-\r
- build_task = self.create_task('cabal_build', config_node)\r
- build_task.cwd = node.parent.abspath()\r
- build_task.build_path = build_path\r
- build_task.set_outputs(inplace_node)\r
-\r
- copy_task = self.create_task('cabal_copy', inplace_node)\r
- copy_task.cwd = node.parent.abspath()\r
- copy_task.depends_on = getattr(self, 'depends_on', '')\r
- copy_task.build_path = build_path\r
-\r
- last_task = copy_task\r
- task_list = [config_task, build_task, copy_task]\r
-\r
- if (getattr(self, 'register', False)):\r
- register_task = self.create_task('cabal_register', inplace_node)\r
- register_task.cwd = node.parent.abspath()\r
- register_task.set_run_after(copy_task)\r
- register_task.build_path = build_path\r
-\r
- pkgreg_task = self.create_task('ghcpkg_register', inplace_node)\r
- pkgreg_task.cwd = node.parent.abspath()\r
- pkgreg_task.set_run_after(register_task)\r
- pkgreg_task.build_path = build_path\r
-\r
- last_task = pkgreg_task\r
- task_list += [register_task, pkgreg_task]\r
-\r
- touch_task = self.create_task('cabal_touch', inplace_node)\r
- touch_task.set_run_after(last_task)\r
- touch_task.set_outputs(package_node)\r
- touch_task.build_path = build_path\r
-\r
- task_list += [touch_task]\r
-\r
- return task_list\r
-\r
-def get_all_src_deps(node):\r
- hs_deps = node.ant_glob('**/*.hs')\r
- hsc_deps = node.ant_glob('**/*.hsc')\r
- lhs_deps = node.ant_glob('**/*.lhs')\r
- c_deps = node.ant_glob('**/*.c')\r
- cpp_deps = node.ant_glob('**/*.cpp')\r
- proto_deps = node.ant_glob('**/*.proto')\r
- return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])\r
-\r
-class Cabal(Task.Task):\r
- def scan(self):\r
- return (get_all_src_deps(self.generator.path), ())\r
-\r
-class cabal_configure(Cabal):\r
- run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'\r
- shell = True\r
-\r
- def scan(self):\r
- out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)\r
- deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]\r
- return (deps, ())\r
-\r
-class cabal_build(Cabal):\r
- run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'\r
- shell = True\r
-\r
-class cabal_copy(Cabal):\r
- run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'\r
- shell = True\r
-\r
-class cabal_register(Cabal):\r
- run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'\r
- shell = True\r
-\r
-class ghcpkg_register(Cabal):\r
- run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'\r
- shell = True\r
-\r
- def runnable_status(self):\r
- global lock, registering\r
-\r
- val = False \r
- lock.acquire()\r
- val = registering\r
- lock.release()\r
-\r
- if val:\r
- return Task.ASK_LATER\r
-\r
- ret = Task.Task.runnable_status(self)\r
- if ret == Task.RUN_ME:\r
- lock.acquire()\r
- registering = True\r
- lock.release()\r
-\r
- return ret\r
-\r
- def post_run(self):\r
- global lock, registering\r
-\r
- lock.acquire()\r
- registering = False\r
- lock.release()\r
-\r
- return Task.Task.post_run(self)\r
-\r
-class cabal_touch(Cabal):\r
- run_str = 'touch ${TGT}'\r
-\r
def options(opt):
def x(opt, param):
dest = name_to_dest(param)
- gr = opt.get_option_group("configure options")
+ gr = opt.get_option_group("Configuration options")
gr.add_option('--%s-root' % dest,
help="path containing include and lib subfolders for %s" \
% param,
--- /dev/null
+#!/usr/bin/env python
+# encoding: utf-8
+# vim: sw=4 ts=4 noexpandtab
+
+"""
+LLVM Clang-CL support.
+
+Clang-CL is supposed to be a drop-in replacement for MSVC CL, but also serves
+well as a cross compiler for Windows from Linux (provided you have set up the
+environment). Requires Visual Studio 2015+ to be installed.
+
+On Windows it uses (most) MSVC tools.
+
+Usage:
+ $ waf configure
+Or:
+ $ LLVM_PATH=C:\\Program Files\\LLVM\\bin waf configure
+Or:
+ def configure(self):
+ self.env.LLVM_PATH = 'C:\\Program Files\\LLVM\\bin'
+ self.load('clang_cl')
+"""
+
+import os
+
+from waflib import Utils, Errors, Logs
+from waflib.Configure import conf
+from waflib.Tools import msvc
+
+def options(opt):
+ msvc.options(opt)
+
+@conf
+def get_llvm_paths(self):
+ llvm_path = []
+ if Utils.is_win32:
+ try:
+ llvm_key = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432Node\\LLVM\\LLVM')
+ except OSError:
+ try:
+ llvm_key = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\LLVM\\LLVM')
+ except OSError:
+ llvm_key = None
+
+ if llvm_key:
+ llvm_dir, _ = Utils.winreg.QueryValueEx(llvm_key, '')
+ if llvm_dir:
+ llvm_path.append(os.path.join(llvm_dir, 'bin'))
+
+ tmp = self.environ.get('LLVM_PATH') or self.env.LLVM_PATH
+ if tmp:
+ llvm_path.append(tmp)
+ llvm_path += self.env.PATH
+ return llvm_path
+
+@conf
+def find_clang_cl(self):
+ """
+ Find the program clang-cl.
+ """
+ del(self.env.CC)
+ del(self.env.CXX)
+
+ paths = self.get_llvm_paths()
+ cc = self.find_program('clang-cl', var='CC', path_list=paths)
+ self.env.CC = self.env.CXX = cc
+ self.env.CC_NAME_SECONDARY = self.env.CXX_NAME_SECONDARY = 'clang'
+
+ if not Utils.is_win32:
+ self.env.MSVC_COMPILER = 'msvc'
+ self.env.MSVC_VERSION = 19
+
+ if not self.env.LINK_CXX:
+ self.find_program('lld-link', path_list=paths, errmsg='lld-link was not found (linker)', var='LINK_CXX')
+
+ if not self.env.LINK_CC:
+ self.env.LINK_CC = self.env.LINK_CXX
+
+@conf
+def find_llvm_tools(self):
+ """
+ Find the librarian, manifest tool, and resource compiler.
+ """
+ self.env.CC_NAME = self.env.CXX_NAME = 'msvc'
+
+ paths = self.get_llvm_paths()
+ llvm_path = self.environ.get('LLVM_PATH') or self.env.LLVM_PATH
+ if llvm_path:
+ paths = [llvm_path] + self.env.PATH
+ else:
+ paths = self.env.PATH
+
+ if not self.env.AR:
+ stliblink = self.find_program('llvm-lib', path_list=paths, var='AR')
+ if not stliblink:
+ self.fatal('Unable to find required program "llvm-lib"')
+
+ self.env.ARFLAGS = ['/nologo']
+
+ # We assume clang_cl to only be used with relatively new MSVC installations.
+ self.env.MSVC_MANIFEST = True
+ self.find_program('llvm-mt', path_list=paths, var='MT')
+ self.env.MTFLAGS = ['/nologo']
+
+ try:
+ self.load('winres')
+ except Errors.ConfigurationError:
+ Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+def configure(self):
+ if Utils.is_win32:
+ self.autodetect(True)
+ self.find_msvc()
+ else:
+ self.find_llvm_tools()
+
+ self.find_clang_cl()
+ self.msvc_common_flags()
+ self.cc_load_tools()
+ self.cxx_load_tools()
+ self.cc_add_flags()
+ self.cxx_add_flags()
+ self.link_add_flags()
+ self.visual_studio_add_flags()
+++ /dev/null
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2021 (ita)
-
-from waflib import Utils, Runner
-
-"""
-Re-enable the classic threading system from waf 1.x
-
-def configure(conf):
- conf.load('classic_runner')
-"""
-
-class TaskConsumer(Utils.threading.Thread):
- """
- Task consumers belong to a pool of workers
-
- They wait for tasks in the queue and then use ``task.process(...)``
- """
- def __init__(self, spawner):
- Utils.threading.Thread.__init__(self)
- """
- Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
- """
- self.spawner = spawner
- self.daemon = True
- self.start()
-
- def run(self):
- """
- Loop over the tasks to execute
- """
- try:
- self.loop()
- except Exception:
- pass
-
- def loop(self):
- """
- Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
- :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
- """
- master = self.spawner.master
- while 1:
- if not master.stop:
- try:
- tsk = master.ready.get()
- if tsk:
- tsk.log_display(tsk.generator.bld)
- master.process_task(tsk)
- else:
- break
- finally:
- master.out.put(tsk)
-
-class Spawner(object):
- """
- Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
- spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
- :py:class:`waflib.Task.Task` instance.
- """
- def __init__(self, master):
- self.master = master
- """:py:class:`waflib.Runner.Parallel` producer instance"""
-
- self.pool = [TaskConsumer(self) for i in range(master.numjobs)]
-
-Runner.Spawner = Spawner
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
'''
-from __future__ import absolute_import
import sys, re
import logging
from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils
def options(opt):
- opt.add_option('--cpplint-filters', type='string',
+ opt.add_option('--cpplint-filters', type=str,
default='', dest='CPPLINT_FILTERS',
help='add filters to cpplint')
- opt.add_option('--cpplint-length', type='int',
+ opt.add_option('--cpplint-length', type=int,
default=80, dest='CPPLINT_LINE_LENGTH',
help='specify the line length (default: 80)')
- opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
+ opt.add_option('--cpplint-level', default=1, type=int, dest='CPPLINT_LEVEL',
help='specify the log level (default: 1)')
- opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
+ opt.add_option('--cpplint-break', default=5, type=int, dest='CPPLINT_BREAK',
help='break the build if error >= level (default: 5)')
- opt.add_option('--cpplint-root', type='string',
+ opt.add_option('--cpplint-root', type=str,
default='', dest='CPPLINT_ROOT',
help='root directory used to derive header guard')
opt.add_option('--cpplint-skip', action='store_true',
default=False, dest='CPPLINT_SKIP',
help='skip cpplint during build')
- opt.add_option('--cpplint-output', type='string',
+ opt.add_option('--cpplint-output', type=str,
default='waf', dest='CPPLINT_OUTPUT',
help='select output format (waf, emacs, vs7, eclipse)')
"""
Add the ``--with-diab-bindir`` command-line options.
"""
- opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
+ opt.add_option('--with-diab-bindir', type=str, dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
with tarfile.open(tmpfile) as f:
temp = tempfile.mkdtemp(dir=pkgdir)
try:
- f.extractall(temp)
+ if hasattr(tarfile, 'data_filter'):
+ f.extractall(temp, filter='data')
+ else:
+ f.extractall(temp)
os.rename(temp, os.path.join(pkgdir, subdir))
finally:
try:
addTargetWrap('configure', True)
addTargetWrap('dist', False)
addTargetWrap('install', False)
- addTargetWrap('check', False)
for addTgt in self.env.ECLIPSE_EXTRA_TARGETS or []:
addTargetWrap(addTgt, False)
--- /dev/null
+#! /usr/bin/env python
+# encoding: utf-8
+# Detection of the flang Fortran compiler
+
+import re
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_flang')
+
+@conf
+def find_flang(conf):
+ fc = conf.find_program(['flang'], var='FC')
+ conf.get_nfort_version(fc)
+ conf.env.FC_NAME = 'FLANG'
+ conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def flang_flags(conf):
+ v = conf.env
+ v['_FCMODOUTFLAGS'] = []
+ v['FCFLAGS_DEBUG'] = []
+ v['FCFLAGS_fcshlib'] = []
+ v['LINKFLAGS_fcshlib'] = []
+ v['FCSTLIB_MARKER'] = ''
+ v['FCSHLIB_MARKER'] = ''
+
+@conf
+def get_flang_version(conf, fc):
+ cmd = fc + ['-dM', '-E', '-']
+ env = conf.env.env or None
+
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
+ except Errors.WafError:
+ conf.fatal('Could not determine the FLANG compiler version for %r' % cmd)
+ if out.find('__clang__') < 0:
+ conf.fatal('Not a flang compiler')
+
+ k = {}
+ out = out.splitlines()
+ for line in out:
+ lst = shlex.split(line)
+ if len(lst)>2:
+ key = lst[1]
+ val = lst[2]
+ k[key] = val
+
+ def isD(var):
+ return var in k
+
+ # Some documentation is available at http://predef.sourceforge.net
+ # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS = ''
+ for i in MACRO_TO_DESTOS:
+ if isD(i):
+ conf.env.DEST_OS = MACRO_TO_DESTOS[i]
+ break
+ else:
+ if isD('__APPLE__') and isD('__MACH__'):
+ conf.env.DEST_OS = 'darwin'
+ elif isD('__unix__'): # unix must be tested last as it's a generic fallback
+ conf.env.DEST_OS = 'generic'
+
+ if isD('__ELF__'):
+ conf.env.DEST_BINFMT = 'elf'
+ elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
+ conf.env.DEST_BINFMT = 'pe'
+ if not conf.env.IMPLIBDIR:
+ conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
+ conf.env.LIBDIR = conf.env.BINDIR
+ elif isD('__APPLE__'):
+ conf.env.DEST_BINFMT = 'mac-o'
+
+ if not conf.env.DEST_BINFMT:
+ # Infer the binary format from the os name.
+ conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
+
+ for i in MACRO_TO_DEST_CPU:
+ if isD(i):
+ conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
+ break
+
+ Logs.debug('fc_flang: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
+ conf.env.FC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+
+ return k
+
+
+def configure(conf):
+ conf.find_flang()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.flang_flags()
def options(opt):
- opt.add_option('--fi-path', type='string', default='', dest='fi_path',
+ opt.add_option('--fi-path', type=str, default='', dest='fi_path',
help='''path to the FreeImage directory \
where the files are e.g. /FreeImage/Dist''')
opt.add_option('--fip', action='store_true', default=False, dest='fip',
+++ /dev/null
-import os
-import pipes
-import subprocess
-import sys
-
-from waflib import Logs, Task, Context
-from waflib.Tools.c_preproc import scan as scan_impl
-# ^-- Note: waflib.extras.gccdeps.scan does not work for us,
-# due to its current implementation:
-# The -MD flag is injected into the {C,CXX}FLAGS environment variable and
-# dependencies are read out in a separate step after compiling by reading
-# the .d file saved alongside the object file.
-# As the genpybind task refers to a header file that is never compiled itself,
-# gccdeps will not be able to extract the list of dependencies.
-
-from waflib.TaskGen import feature, before_method
-
-
-def join_args(args):
- return " ".join(pipes.quote(arg) for arg in args)
-
-
-def configure(cfg):
- cfg.load("compiler_cxx")
- cfg.load("python")
- cfg.check_python_version(minver=(2, 7))
- if not cfg.env.LLVM_CONFIG:
- cfg.find_program("llvm-config", var="LLVM_CONFIG")
- if not cfg.env.GENPYBIND:
- cfg.find_program("genpybind", var="GENPYBIND")
-
- # find clang reasource dir for builtin headers
- cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join(
- cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(),
- "clang",
- cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip())
- if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR):
- cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR)
- else:
- cfg.fatal("Clang resource dir not found")
-
-
-@feature("genpybind")
-@before_method("process_source")
-def generate_genpybind_source(self):
- """
- Run genpybind on the headers provided in `source` and compile/link the
- generated code instead. This works by generating the code on the fly and
- swapping the source node before `process_source` is run.
- """
- # name of module defaults to name of target
- module = getattr(self, "module", self.target)
-
- # create temporary source file in build directory to hold generated code
- out = "genpybind-%s.%d.cpp" % (module, self.idx)
- out = self.path.get_bld().find_or_declare(out)
-
- task = self.create_task("genpybind", self.to_nodes(self.source), out)
- # used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind
- task.features = self.features
- task.module = module
- # can be used to select definitions to include in the current module
- # (when header files are shared by more than one module)
- task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", []))
- # additional include directories
- task.includes = self.to_list(getattr(self, "includes", []))
- task.genpybind = self.env.GENPYBIND
-
- # Tell waf to compile/link the generated code instead of the headers
- # originally passed-in via the `source` parameter. (see `process_source`)
- self.source = [out]
-
-
-class genpybind(Task.Task): # pylint: disable=invalid-name
- """
- Runs genpybind on headers provided as input to this task.
- Generated code will be written to the first (and only) output node.
- """
- quiet = True
- color = "PINK"
- scan = scan_impl
-
- @staticmethod
- def keyword():
- return "Analyzing"
-
- def run(self):
- if not self.inputs:
- return
-
- args = self.find_genpybind() + self._arguments(
- resource_dir=self.env.GENPYBIND_RESOURCE_DIR)
-
- output = self.run_genpybind(args)
-
- # For debugging / log output
- pasteable_command = join_args(args)
-
- # write generated code to file in build directory
- # (will be compiled during process_source stage)
- (output_node,) = self.outputs
- output_node.write("// {}\n{}\n".format(
- pasteable_command.replace("\n", "\n// "), output))
-
- def find_genpybind(self):
- return self.genpybind
-
- def run_genpybind(self, args):
- bld = self.generator.bld
-
- kwargs = dict(cwd=bld.variant_dir)
- if hasattr(bld, "log_command"):
- bld.log_command(args, kwargs)
- else:
- Logs.debug("runner: {!r}".format(args))
- proc = subprocess.Popen(
- args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
- stdout, stderr = proc.communicate()
-
- if not isinstance(stdout, str):
- stdout = stdout.decode(sys.stdout.encoding, errors="replace")
- if not isinstance(stderr, str):
- stderr = stderr.decode(sys.stderr.encoding, errors="replace")
-
- if proc.returncode != 0:
- bld.fatal(
- "genpybind returned {code} during the following call:"
- "\n{command}\n\n{stdout}\n\n{stderr}".format(
- code=proc.returncode,
- command=join_args(args),
- stdout=stdout,
- stderr=stderr,
- ))
-
- if stderr.strip():
- Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr))
-
- return stdout
-
- def _include_paths(self):
- return self.generator.to_incnodes(self.includes + self.env.INCLUDES)
-
- def _inputs_as_relative_includes(self):
- include_paths = self._include_paths()
- relative_includes = []
- for node in self.inputs:
- for inc in include_paths:
- if node.is_child_of(inc):
- relative_includes.append(node.path_from(inc))
- break
- else:
- self.generator.bld.fatal("could not resolve {}".format(node))
- return relative_includes
-
- def _arguments(self, genpybind_parse=None, resource_dir=None):
- args = []
- relative_includes = self._inputs_as_relative_includes()
- is_cxx = "cxx" in self.features
-
- # options for genpybind
- args.extend(["--genpybind-module", self.module])
- if self.genpybind_tags:
- args.extend(["--genpybind-tag"] + self.genpybind_tags)
- if relative_includes:
- args.extend(["--genpybind-include"] + relative_includes)
- if genpybind_parse:
- args.extend(["--genpybind-parse", genpybind_parse])
-
- args.append("--")
-
- # headers to be processed by genpybind
- args.extend(node.abspath() for node in self.inputs)
-
- args.append("--")
-
- # options for clang/genpybind-parse
- args.append("-D__GENPYBIND__")
- args.append("-xc++" if is_cxx else "-xc")
- has_std_argument = False
- for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]:
- flag = flag.replace("-std=gnu", "-std=c")
- if flag.startswith("-std=c"):
- has_std_argument = True
- args.append(flag)
- if not has_std_argument:
- args.append("-std=c++14")
- args.extend("-I{}".format(n.abspath()) for n in self._include_paths())
- args.extend("-D{}".format(p) for p in self.env.DEFINES)
-
- # point to clang resource dir, if specified
- if resource_dir:
- args.append("-resource-dir={}".format(resource_dir))
-
- return args
def options(opt):
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
help='title for the svg diagram', dest='dtitle')
- opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
- opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
- opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
- opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+ opt.add_option('--dwidth', action='store', type=int, help='diagram width', default=800, dest='dwidth')
+ opt.add_option('--dtime', action='store', type=float, help='recording interval in seconds', default=0.009, dest='dtime')
+ opt.add_option('--dband', action='store', type=int, help='band width', default=22, dest='dband')
+ opt.add_option('--dmaxtime', action='store', type=float, help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
--- /dev/null
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2020 (ita)
+
+from waflib import Utils
+from waflib.Configure import conf
+
+PTHREAD_CHECK = '''
+#include <pthread.h>
+
+static void* fun(void* params) {
+ (void)params;
+ return NULL;
+}
+
+int main(int argc, char **argv) {
+ pthread_t thread;
+ (void)argc;
+ (void)argv;
+ pthread_create(&thread, NULL, &fun, NULL);
+ pthread_join(thread, NULL);
+ return 0;
+}
+'''
+
+@conf
+def check_pthreads(self, mode=None):
+ if not mode:
+ mode = 'cxx' if self.env.CXX else 'c'
+
+ if Utils.unversioned_sys_platform() == 'sunos':
+ flags = ['-pthreads', '-lpthread', '-mt', '-pthread']
+ else:
+ flags = ['', '-lpthreads', '-Kthread', '-kthread', '-llthread', '-pthread', '-pthreads', '-mthreads', '-lpthread', '--thread-safe', '-mt']
+
+ features = mode
+ for flag in flags:
+ self.env.stash()
+
+ self.env[mode.upper() + 'FLAGS_PTHREAD'] = [flag]
+
+ if flag:
+ msg = ' -> Trying pthread compilation flag %s' % flag
+ okmsg = 'needs %s' % flag
+ else:
+ msg = 'Checking if a pthread flag is necessary for compiling'
+ okmsg = 'None'
+
+ try:
+ self.check(features=features, msg=msg, okmsg=okmsg, use='PTHREAD', fragment=PTHREAD_CHECK)
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ continue
+ else:
+ break
+ else:
+ self.fatal('Could not find a suitable pthreads flag for compiling')
+
+ features = '%s %sprogram' % (mode, mode)
+ for flag in flags:
+ self.env.stash()
+
+ self.env.LINKFLAGS_PTHREAD = [flag]
+
+ if flag:
+ msg = ' -> Trying pthread link flag %s' % flag
+ okmsg = 'needs %s' % flag
+ else:
+ msg = 'Checking if a pthread flag is necessary for linking'
+ okmsg = 'None'
+
+ try:
+ self.check(features=features, msg=msg, okmsg=okmsg, use='PTHREAD', fragment=PTHREAD_CHECK)
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ continue
+ else:
+ break
+ else:
+ self.fatal('Could not find a suitable pthreads flag for linking')
+
+
+def configure(self):
+ self.check_pthreads()
+
#!/usr/bin/env python
# encoding: utf-8
-# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python
+# Federico Pellegrin, 2016-2024 (fedepell) adapted for Python
"""
-This tool helps with finding Python Qt5 tools and libraries,
-and provides translation from QT5 files to Python code.
+This tool helps with finding Python Qt5/Qt6 tools and libraries,
+and provides translation from QT5/QT6 files to Python code.
The following snippet illustrates the tool usage::
Add into the sources list also the qrc resources files or ui5
definition files and they will be translated into python code
-with the system tools (PyQt5, PySide2, PyQt4 are searched in this
-order) and then compiled
+with the system tools (PyQt6, PySide6, PyQt5, PySide2, PyQt4 are
+searched in this order) and then compiled
"""
try:
@conf
def find_pyqt5_binaries(self):
"""
- Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
+ Detects PyQt5 or PySide2/6 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
"""
env = self.env
- if getattr(Options.options, 'want_pyqt5', True):
+ if getattr(Options.options, 'want_pyqt6', True):
+ self.find_program(['pyuic6'], var='QT_PYUIC')
+ self.find_program(['pyrcc6'], var='QT_PYRCC')
+ self.find_program(['pylupdate6'], var='QT_PYLUPDATE')
+ self.find_program(['lrelease-qt6', 'lrelease'], var='QT_LRELEASE')
+ elif getattr(Options.options, 'want_pyside6', True):
+ self.find_program(['pyside6-uic','uic-qt6'], var='QT_PYUIC')
+ self.find_program(['pyside6-rcc','rcc-qt6'], var='QT_PYRCC')
+ self.find_program(['pyside6-lupdate','lupdate-qt6'], var='QT_PYLUPDATE')
+ self.find_program(['lrelease-qt6', 'lrelease'], var='QT_LRELEASE')
+ elif getattr(Options.options, 'want_pyqt5', True):
self.find_program(['pyuic5'], var='QT_PYUIC')
self.find_program(['pyrcc5'], var='QT_PYRCC')
self.find_program(['pylupdate5'], var='QT_PYLUPDATE')
+ self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
elif getattr(Options.options, 'want_pyside2', True):
self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC')
self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC')
self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE')
+ self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
elif getattr(Options.options, 'want_pyqt4', True):
self.find_program(['pyuic4'], var='QT_PYUIC')
self.find_program(['pyrcc4'], var='QT_PYRCC')
self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
else:
- self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
- self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
- self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
+ self.find_program(['pyuic6', 'pyside6-uic', 'pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
+ self.find_program(['pyrcc6', 'pyside6-rcc', 'pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
+ self.find_program(['pylupdate6', 'pyside6-lupdate', 'pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
+ self.find_program(['lrelease-qt6', 'lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
if not env.QT_PYUIC:
- self.fatal('cannot find the uic compiler for python for qt5')
+ self.fatal('cannot find the uic compiler for python for qt')
if not env.QT_PYRCC:
- self.fatal('cannot find the rcc compiler for python for qt5')
+ self.fatal('cannot find the rcc compiler for python for qt')
- self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
def options(opt):
"""
Command-line options
"""
pyqt5opt=opt.add_option_group("Python QT5 Options")
- pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
- pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
- pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
+ pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT bindings (default search order: PyQt6, PySide6, PyQt5, PySide2, PyQt4)')
+ pyqt5opt.add_option('--pyqt5-pyqt6', action='store_true', default=False, dest='want_pyqt6', help='use PyQt6 bindings as python QT bindings (default search order: PyQt6, PySide6, PyQt5, PySide2, PyQt4)')
+ pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT bindings (default search order: PyQt6, PySide6, PyQt5, PySide2, PyQt4)')
+ pyqt5opt.add_option('--pyqt5-pyside6', action='store_true', default=False, dest='want_pyside6', help='use PySide6 bindings as python QT bindings (default search order: PyQt6, PySide6, PyQt5, PySide2, PyQt4)')
+ pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT bindings (default search order: PyQt6, PySide6, PyQt5, PySide2, PyQt4)')
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
opt.add_option('--header-ext',
- type='string',
+ type=str,
default='',
help='header extension for moc files',
dest='qt_header_ext')
for i in 'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i, type='string', default='', dest=i)
+ opt.add_option('--'+i, type=str, default='', dest=i)
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
their default value from their optparse object and store them
into the review dictionaries.
"""
- gr = self.get_option_group('configure options')
+ gr = self.get_option_group('Configuration options')
for opt in gr.option_list:
if opt.action != 'store' or opt.dest in ("out", "top"):
continue
def parse_rst_node(task, node, nodes, names, seen, dirs=None):
# TODO add extensibility, to handle custom rst include tags...
if dirs is None:
- dirs = (node.parent,node.get_bld().parent)
+ dirs = (node.parent, node.parent.get_bld())
if node in seen:
return
else:
outdir = self.path.get_bld()
outdir.mkdir()
+ self.outdir = outdir
self.env['OUTDIR'] = outdir.abspath()
self.scalac_task = tsk = self.create_task('scalac')
"""
Create _moc.cpp files
-The builds are 30-40% faster when .moc files are included,
-you should NOT use this tool. If you really
-really want it:
+The builds are 30-40% faster when .moc files are directly included,
+so the usage of this tool is discouraged.
def configure(conf):
- conf.load('compiler_cxx qt4')
- conf.load('slow_qt4')
+ conf.load('compiler_cxx qt5')
+ conf.load('slow_qt')
-See playground/slow_qt/wscript for a complete example.
+See playground/slow_qt/wscript for a complete example,
+and run with "waf --zones=slow_qt" to display the moc files that should be generated
"""
from waflib.TaskGen import extension
-from waflib import Task
-import waflib.Tools.qt4
+from waflib import Task, Logs
+import waflib.Tools.qt5
import waflib.Tools.cxx
-@extension(*waflib.Tools.qt4.EXT_QT4)
+@extension(*waflib.Tools.qt5.EXT_QT5)
def cxx_hook(self, node):
return self.create_compiled_task('cxx_qt', node)
def runnable_status(self):
ret = Task.classes['cxx'].runnable_status(self)
if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
-
try:
cache = self.generator.moc_cache
except AttributeError:
# no corresponding file, continue
continue
- # the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
- cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
+ # the file foo.cpp could be compiled for a static and a shared library
+ # one workaround is to use a %number in the name
+ #cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
+
+ # another workaround is to add the target name
+ cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%s_moc.cpp' % self.generator.name)
if cxx_node in cache:
continue
cache[cxx_node] = self
+ Logs.debug('slow_qt: will create a file named %s', cxx_node.abspath())
+
tsk = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(x)
tsk.set_outputs(cxx_node)
if x.name.endswith('.cpp'):
- # moc is trying to be too smart but it is too dumb:
- # why forcing the #include when Q_OBJECT is in the cpp file?
gen = self.generator.bld.producer
gen.outstanding.append(tsk)
gen.total += 1
return Task.ASK_LATER
return ret
-
def options(opt):
grp = opt.add_option_group('Softlink Libraries Options')
- grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
+ grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%(default)s]')
def configure(cnf):
cnf.find_program('ldd')
def options(opt):
- opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
- opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
- opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
- opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
- opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
+ opt.add_option('--with-ti-cgt', type=str, dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
+ opt.add_option('--with-ti-biosutils', type=str, dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
+ opt.add_option('--with-ti-dspbios', type=str, dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
+ opt.add_option('--with-ti-dsplink', type=str, dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
+ opt.add_option('--with-ti-xdctools', type=str, dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
class ti_cprogram(cprogram):
"""
Link object files into a c program
-
+
Changes:
- the linked executable to have a relative path (because we can)
def options(opt):
global MAX_BATCH
- opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
+ opt.add_option('--batchsize', action='store', dest='batchsize', type=int, default=MAX_BATCH,
help='default unity batch size (0 disables unity builds)')
@TaskGen.taskgen_method
sys.path.append(osp.abspath(DEFAULT_DIR))
def options(self):
- group = self.add_option_group('configure options')
+ group = self.add_option_group('Configuration options')
group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
group.add_option('--use-config', action='store', default=None,
# encoding: utf-8
# Thomas Nagy, 2010-2018 (ita)
-from __future__ import with_statement
-
import os
all_modifs = {}
def fixdir(dir):
"""Call all substitution functions on Waf folders"""
- for k in all_modifs:
- for v in all_modifs[k]:
- modif(os.path.join(dir, 'waflib'), k, v)
-
-def modif(dir, name, fun):
- """Call a substitution function"""
- if name == '*':
- lst = []
- for y in '. Tools extras'.split():
- for x in os.listdir(os.path.join(dir, y)):
- if x.endswith('.py'):
- lst.append(y + os.sep + x)
- for x in lst:
- modif(dir, x, fun)
- return
+ for y in '. Tools extras'.split():
+ for x in os.listdir(os.path.join(dir, 'waflib', y)):
+ if x.endswith('.py'):
+ filename = os.path.join(dir, 'waflib', y, x)
+ update(filename)
- filename = os.path.join(dir, name)
+def update(filename):
with open(filename, 'r') as f:
txt = f.read()
- txt = fun(txt)
+ txt = txt.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
+ txt = txt.replace('.encode()', '')
+ txt = txt.replace('class Task(metaclass=store_task_type):', "class Task(object):%s\t__metaclass__=store_task_type" % os.linesep)
with open(filename, 'w') as f:
f.write(txt)
-def subst(*k):
- """register a substitution function"""
- def do_subst(fun):
- for x in k:
- try:
- all_modifs[x].append(fun)
- except KeyError:
- all_modifs[x] = [fun]
- return fun
- return do_subst
-
-@subst('*')
-def r1(code):
- "utf-8 fixes for python < 2.6"
- code = code.replace('as e:', ',e:')
- code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
- return code.replace('.encode()', '')
-
-@subst('Runner.py')
-def r4(code):
- "generator syntax"
- return code.replace('next(self.biter)', 'self.biter.next()').replace('self.daemon = True', 'self.setDaemon(1)')
-
-@subst('Context.py')
-def r5(code):
- return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
-
+ for k in all_modifs:
+ for v in all_modifs[k]:
+ modif(os.path.join(dir, 'waflib'), k, v)
txt = sys.stdin.readline().strip()
if not txt:
# parent process probably ended
- sys.exit(1)
+ sys.exit(18)
[cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt))
cargs = cargs or {}
run()
except KeyboardInterrupt:
break
-
+ except Exception:
+ traceback.print_exc(file=sys.stderr)
+ sys.exit(19)