Skip to content
Snippets Groups Projects
moz.configure 34.5 KiB
Newer Older
  • Learn to ignore specific revisions
  • # -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
    
    # vim: set filetype=python:
    # This Source Code Form is subject to the terms of the Mozilla Public
    # License, v. 2.0. If a copy of the MPL was not distributed with this
    # file, You can obtain one at http://mozilla.org/MPL/2.0/.
    
    
    include("build/moz.configure/init.configure")
    
    # Note:
    # - Gecko-specific options and rules should go in toolkit/moz.configure.
    # - Firefox-specific options and rules should go in browser/moz.configure.
    # - Fennec-specific options and rules should go in
    #   mobile/android/moz.configure.
    # - Spidermonkey-specific options and rules should go in js/moz.configure.
    # - etc.
    
    option(
        "--enable-artifact-builds",
        env="MOZ_ARTIFACT_BUILDS",
        help="Download and use prebuilt binary artifacts.",
    )
    
    set_config("MOZ_ARTIFACT_BUILDS", artifact_builds)
    
    imply_option(
        "--enable-artifact-build-symbols",
        depends(artifact_builds)(lambda v: False if v is None else None),
        reason="--disable-artifact-builds",
    )
    
    option(
        "--enable-artifact-build-symbols",
        nargs="?",
        choices=("full",),
        help="Download symbols when artifact builds are enabled.",
    )
    
    @depends("--enable-artifact-build-symbols", "MOZ_AUTOMATION", target)
    
    def enable_artifact_build_symbols(value, automation, target):
        if len(value):
            return value[0]
        if bool(value):
    
            if target.os == "Android" and not automation:
                return "full"
    
    set_config("MOZ_ARTIFACT_BUILD_SYMBOLS", enable_artifact_build_symbols)
    
    
    @depends("--enable-artifact-builds")
    
    def imply_disable_compile_environment(value):
        if value:
            return False
    
    
    option(
        env="MOZ_COPY_PDBS",
        help="For builds that do not support symbols in the normal fashion,"
        " generate and copy them into the resulting build archive.",
    )
    
    set_config("MOZ_COPY_PDBS", depends_if("MOZ_COPY_PDBS")(lambda _: True))
    
    imply_option("--enable-compile-environment", imply_disable_compile_environment)
    
    option("--disable-compile-environment", help="Disable compiler/library checks")
    
    
    @depends("--disable-compile-environment")
    
    def compile_environment(compile_env):
        if compile_env:
    
    set_config("COMPILE_ENVIRONMENT", compile_environment)
    add_old_configure_assignment("COMPILE_ENVIRONMENT", compile_environment)
    
    option("--disable-tests", help="Do not build test libraries & programs")
    
    def enable_tests(value):
        if value:
            return True
    
    
    
    set_config("ENABLE_TESTS", enable_tests)
    set_define("ENABLE_TESTS", enable_tests)
    
    
    
    @depends(enable_tests)
    def gtest_has_rtti(value):
        if value:
    
            return "0"
    
    
    set_define("GTEST_HAS_RTTI", gtest_has_rtti)
    
    
    
    @depends(target, enable_tests)
    def linux_gtest_defines(target, enable_tests):
    
        if enable_tests and target.os == "Android":
            return namespace(os_linux_android=True, use_own_tr1_tuple=True, has_clone="0")
    
    
    set_define("GTEST_OS_LINUX_ANDROID", linux_gtest_defines.os_linux_android)
    set_define("GTEST_USE_OWN_TR1_TUPLE", linux_gtest_defines.use_own_tr1_tuple)
    set_define("GTEST_HAS_CLONE", linux_gtest_defines.has_clone)
    
    option(
        "--enable-debug",
        nargs="?",
        help="Enable building with developer debug info "
        "(using the given compiler flags).",
    )
    
    
    @depends("--enable-debug")
    
    def moz_debug(debug):
        if debug:
            return bool(debug)
    
    
    
    set_config("MOZ_DEBUG", moz_debug)
    set_define("MOZ_DEBUG", moz_debug)
    
    # Override any value MOZ_DEBUG may have from the environment when passing it
    # down to old-configure.
    
    add_old_configure_assignment("MOZ_DEBUG", depends("--enable-debug")(lambda x: bool(x)))
    
    option(
        "--with-debug-label",
        nargs="+",
        help="Debug DEBUG_<value> for each comma-separated value given",
    )
    
    
    @depends(moz_debug, "--with-debug-label")
    
    def debug_defines(debug, labels):
        if debug:
    
            return ["DEBUG"] + ["DEBUG_%s" % label for label in labels]
        return ["NDEBUG", "TRIMMED"]
    
    
    set_config("MOZ_DEBUG_DEFINES", debug_defines)
    
    option(env="MOZ_PGO", help="Build with profile guided optimizations")
    
    set_config("MOZ_PGO", depends("MOZ_PGO")(lambda x: bool(x)))
    
    imply_option("--enable-release", mozilla_official)
    imply_option("--enable-release", depends_if("MOZ_AUTOMATION")(lambda x: True))
    
    option(
        "--enable-release",
        default=milestone.is_release_or_beta,
        help="{Build|Do not build} with more conservative, release "
        "engineering-oriented options.{ This may slow down builds.|}",
    )
    
    
    @depends("--enable-release")
    def developer_options(value):
        if not value:
            return True
    
    
    add_old_configure_assignment("DEVELOPER_OPTIONS", developer_options)
    set_config("DEVELOPER_OPTIONS", developer_options)
    
    
    
    option(
        env="MOZ_FETCHES_DIR",
        nargs=1,
        when="MOZ_AUTOMATION",
        help="Directory containing fetched artifacts",
    )
    
    
    @depends("MOZ_FETCHES_DIR", when="MOZ_AUTOMATION")
    def moz_fetches_dir(value):
        if value:
            return value[0]
    
    
    
    @depends(vcs_checkout_type, milestone.is_nightly, "MOZ_AUTOMATION")
    def bootstrap_default(vcs_checkout_type, is_nightly, automation):
    
        # We only enable if building off a VCS checkout of central.
        if is_nightly and vcs_checkout_type:
    
        default=bootstrap_default,
        help="{Automatically bootstrap or update some toolchains|Disable bootstrap or update of toolchains}",
    
    )
    
    
    @depends(developer_options, "--enable-bootstrap", moz_fetches_dir)
    def bootstrap_search_path_order(developer_options, bootstrap, moz_fetches_dir):
    
        if moz_fetches_dir:
            log.debug("Prioritizing MOZ_FETCHES_DIR in toolchain path.")
            return "prepend"
    
    
        if bootstrap:
            log.debug(
                "Prioritizing mozbuild state dir in toolchain paths because "
                "bootstrap mode is enabled."
            )
            return "prepend"
    
    
        if developer_options:
            log.debug(
                "Prioritizing mozbuild state dir in toolchain paths because "
                "you are not building in release mode."
            )
            return "prepend"
    
        log.debug(
            "Prioritizing system over mozbuild state dir in "
            "toolchain paths because you are building in "
            "release mode."
        )
        return "append"
    
    
    
    @dependable
    @imports("os")
    @imports(_from="os", _import="environ")
    def original_path():
        return environ["PATH"].split(os.pathsep)
    
    
    
    @depends(host, when="--enable-bootstrap")
    @imports("os")
    @imports(_from="mozbuild.toolchains", _import="toolchain_task_definitions")
    @imports(_from="__builtin__", _import="Exception")
    def bootstrap_toolchain_tasks(host):
        prefix = {
            ("x86_64", "GNU", "Linux"): "linux64",
            ("x86_64", "OSX", "Darwin"): "macosx64",
            ("aarch64", "OSX", "Darwin"): "macosx64-aarch64",
            ("x86_64", "WINNT", "WINNT"): "win64",
        }.get((host.cpu, host.os, host.kernel))
        try:
            return namespace(prefix=prefix, tasks=toolchain_task_definitions())
        except Exception:
            return None
    
            configure_error("bootstrap_path only takes `when` as a keyword argument")
    
            toolchains_base_dir,
            bootstrap_toolchain_tasks,
            shell,
            check_build_environment,
    
            when=when,
        )
        @imports("os")
        @imports("subprocess")
        @imports(_from="mozbuild.util", _import="ensureParentDir")
        @imports(_from="__builtin__", _import="open")
        @imports(_from="__builtin__", _import="Exception")
    
        def bootstrap_path(bootstrap, toolchains_base_dir, tasks, shell, build_env, path):
            path_parts = path.split("/")
    
    
                prefixes = [""]
                if tasks.prefix:
                    prefixes.insert(0, "{}-".format(tasks.prefix))
                for prefix in prefixes:
    
                    label = "toolchain-{}{}".format(prefix, path_parts[0])
                    task = tasks.tasks.get(label)
                    if task:
                        break
                log.debug("Trying to bootstrap %s", label)
    
                task_index = task.optimization.get("index-search")
                if not task_index:
    
                log.debug("Resolved %s to %s", label, task_index[0])
    
                task_index = task_index[0].split(".")[-1]
                artifact = task.attributes["toolchain-artifact"]
                # `mach artifact toolchain` doesn't support authentication for
                # private artifacts.
                if not artifact.startswith("public/"):
    
                    log.debug("Cannot bootstrap %s: not a public artifact", label)
    
                index_file = os.path.join(toolchains_base_dir, "indices", path_parts[0])
                try:
                    with open(index_file) as fh:
                        index = fh.read().strip()
                except Exception:
                    index = None
    
                    log.debug("%s is up-to-date", label)
    
                    "%s bootstrapped toolchain in %s",
                    "Updating" if exists else "Installing",
    
                    os.path.join(toolchains_base_dir, path_parts[0]),
                )
                subprocess.run(
                    [
                        shell,
                        os.path.join(build_env.topsrcdir, "mach"),
                        "--log-no-times",
                        "artifact",
                        "toolchain",
                        "--from-build",
                        label,
                    ],
                    cwd=toolchains_base_dir,
                    check=True,
                )
                ensureParentDir(index_file)
                with open(index_file, "w") as fh:
                    fh.write(task_index)
    
    
            path = os.path.join(toolchains_base_dir, *path_parts)
    
                    if not try_bootstrap(os.path.exists(path)):
    
                        # If there aren't toolchain artifacts to use for this build,
                        # don't return a path.
                        return None
    
                except Exception as e:
                    log.error("%s", e)
                    die("If you can't fix the above, retry with --disable-bootstrap.")
    
            # We re-test whether the path exists because it may have been created by
            # try_bootstrap. Automation will not have gone through the bootstrap
            # process, but we want to return the path if it exists.
            if os.path.exists(path):
                return path
    
        return bootstrap_path
    
    
    @template
    
    def bootstrap_search_path(path, paths=original_path, **kwargs):
    
        def bootstrap_search_path(path, order, paths, original_path):
            if paths is None:
                paths = original_path
    
                return [path] + paths
            return paths + [path]
    
    # The execution model of the configure sandbox doesn't allow for
    # check_prog to use bootstrap_search_path directly because check_prog
    # comes first, so we use a trick to allow it. No use of check_prog
    # happening before here won't allow bootstrap.
    @template
    def check_prog(*args, **kwargs):
        kwargs["bootstrap_search_path"] = bootstrap_search_path
        return check_prog(*args, **kwargs)
    
    
    
    @depends(target, host)
    def want_wine(target, host):
        return target.kernel == "WINNT" and host.kernel != "WINNT"
    
    
    
        "WINE",
        ["wine64", "wine"],
        allow_missing=True,
    
    )
    check_prog("WGET", ("wget",), allow_missing=True)
    
    include("build/moz.configure/toolchain.configure", when="--enable-compile-environment")
    
    include("build/moz.configure/pkg.configure")
    
    # Make this assignment here rather than in pkg.configure to avoid
    # requiring this file in unit tests.
    
    add_old_configure_assignment("PKG_CONFIG", pkg_config)
    
    include("build/moz.configure/memory.configure", when="--enable-compile-environment")
    include("build/moz.configure/headers.configure", when="--enable-compile-environment")
    include("build/moz.configure/warnings.configure", when="--enable-compile-environment")
    include("build/moz.configure/flags.configure", when="--enable-compile-environment")
    
    include("build/moz.configure/lto-pgo.configure", when="--enable-compile-environment")
    
    # rust.configure is included by js/moz.configure.
    
    option("--enable-valgrind", help="Enable Valgrind integration hooks")
    
    valgrind_h = check_header("valgrind/valgrind.h", when="--enable-valgrind")
    
    
    @depends("--enable-valgrind", valgrind_h)
    
    def check_valgrind(valgrind, valgrind_h):
        if valgrind:
            if not valgrind_h:
    
                die("--enable-valgrind specified but Valgrind is not installed")
    
    
    set_define("MOZ_VALGRIND", check_valgrind)
    set_config("MOZ_VALGRIND", check_valgrind)
    
    
        return target.kernel == "OpenBSD" or host.kernel == "OpenBSD"
    
    
    option(
        env="SO_VERSION",
        nargs=1,
        default="1.0",
        when=is_openbsd,
        help="Shared library version for OpenBSD systems",
    )
    
    @depends("SO_VERSION", when=is_openbsd)
    
    @template
    def library_name_info_template(host_or_target):
    
        assert host_or_target in {host, target}
    
        compiler = {
            host: host_c_compiler,
            target: c_compiler,
        }[host_or_target]
    
        @depends(host_or_target, compiler, so_version)
        def library_name_info_impl(host_or_target, compiler, so_version):
    
            if host_or_target.kernel == "WINNT":
    
                # There aren't artifacts for mingw builds, so it's OK that the
                # results are inaccurate in that case.
    
                if compiler and compiler.type != "clang-cl":
    
                        dll=namespace(prefix="", suffix=".dll"),
                        lib=namespace(prefix="lib", suffix="a"),
                        import_lib=namespace(prefix="lib", suffix="a"),
                        obj=namespace(prefix="", suffix="o"),
    
                    dll=namespace(prefix="", suffix=".dll"),
                    lib=namespace(prefix="", suffix="lib"),
                    import_lib=namespace(prefix="", suffix="lib"),
                    obj=namespace(prefix="", suffix="obj"),
    
            elif host_or_target.kernel == "Darwin":
    
                    dll=namespace(prefix="lib", suffix=".dylib"),
                    lib=namespace(prefix="lib", suffix="a"),
                    import_lib=namespace(prefix=None, suffix=""),
                    obj=namespace(prefix="", suffix="o"),
    
                dll=namespace(prefix="lib", suffix=so),
                lib=namespace(prefix="lib", suffix="a"),
                import_lib=namespace(prefix=None, suffix=""),
                obj=namespace(prefix="", suffix="o"),
    
    host_library_name_info = library_name_info_template(host)
    library_name_info = library_name_info_template(target)
    
    set_config("DLL_PREFIX", library_name_info.dll.prefix)
    set_config("DLL_SUFFIX", library_name_info.dll.suffix)
    set_config("HOST_DLL_PREFIX", host_library_name_info.dll.prefix)
    set_config("HOST_DLL_SUFFIX", host_library_name_info.dll.suffix)
    set_config("LIB_PREFIX", library_name_info.lib.prefix)
    set_config("LIB_SUFFIX", library_name_info.lib.suffix)
    set_config("OBJ_SUFFIX", library_name_info.obj.suffix)
    
    # Lots of compilation tests depend on this variable being present.
    
    add_old_configure_assignment("OBJ_SUFFIX", library_name_info.obj.suffix)
    set_config("IMPORT_LIB_SUFFIX", library_name_info.import_lib.suffix)
    set_define(
        "MOZ_DLL_PREFIX", depends(library_name_info.dll.prefix)(lambda s: '"%s"' % s)
    )
    set_define(
        "MOZ_DLL_SUFFIX", depends(library_name_info.dll.suffix)(lambda s: '"%s"' % s)
    )
    set_config("WASM_OBJ_SUFFIX", "wasm")
    
    # Make `profiling` available to this file even when js/moz.configure
    # doesn't end up included.
    profiling = dependable(False)
    
    # Same for js_standalone
    js_standalone = dependable(False)
    
    # Same for fold_libs
    fold_libs = dependable(False)
    
    
    @depends("--help")
    @imports(_from="mozbuild.backend", _import="backends")
    
    @deprecated_option("--enable-build-backend", nargs="+", choices=build_backends_choices)
    
            return tuple("+%s" % b for b in backends)
    
    imply_option("--build-backends", build_backend)
    
    
    @depends(
        "--enable-artifact-builds",
        "--disable-compile-environment",
        "--enable-build-backend",
        "--enable-project",
        "--enable-application",
        "--help",
    )
    @imports("sys")
    def build_backend_defaults(
        artifact_builds, compile_environment, requested_backends, project, application, _
    ):
    
        if application:
            project = application[0]
        elif project:
            project = project[0]
    
    
            # As a special case, if Tup was requested, do not combine it with any
            # Make based backend by default.
            all_backends = []
        elif artifact_builds:
    
            all_backends = ["FasterMake+RecursiveMake"]
    
            all_backends = ["RecursiveMake", "FasterMake"]
    
        # Normally, we'd use target.os == 'WINNT', but a dependency on target
        # would require target to depend on --help, as well as host and shell,
        # and this is not a can of worms we can open at the moment.
    
        if (
            sys.platform == "win32"
            and compile_environment
            and project not in ("mobile/android", "memory", "tools/update-programs")
        ):
    
            all_backends.append("VisualStudio")
    
    option(
        "--build-backends",
        nargs="+",
        default=build_backend_defaults,
        choices=build_backends_choices,
        help="Build backends to generate",
    )
    
    
    @depends("--build-backends")
    
    
    set_config("BUILD_BACKENDS", build_backends)
    
    def check_objdir_backend_reuse(build_env, backends):
        # "Make based" might be RecursiveMake or a hybrid backend, so "Make" is
        # intentionally vague for use with the substring match below.
    
        incompatible_backends = (("Tup", "Make"), ("Make", "Tup"))
        for backend_file in glob.iglob(
            os.path.join(build_env.topobjdir, "backend.*Backend")
        ):
    
            for prev, curr in incompatible_backends:
                if prev in backend_file and any(curr in b for b in backends):
    
                    die(
                        "The active objdir, %s, was previously "
    
                        "used to build with a %s based backend. "
                        "Change objdirs (by setting MOZ_OBJDIR in "
                        "your mozconfig) or clobber to continue.\n",
    
    # Determine whether to build the gtest xul. This happens in automation
    
    # on Android and Desktop platforms with the exception of:
    #  - Windows PGO, where linking xul-gtest.dll takes too long;
    #  - Android other than x86_64, where gtest is not required.
    
    @depends(
        "MOZ_PGO",
        build_project,
        target,
        "MOZ_AUTOMATION",
        enable_tests,
        when="--enable-compile-environment",
    )
    
    def build_gtest(pgo, build_project, target, automation, enable_tests):
        return bool(
            enable_tests
            and automation
    
            and build_project in ("browser", "comm/mail", "mobile/android")
            and not (
                (pgo and target.os == "WINNT")
                or (target.os == "Android" and target.cpu != "x86_64")
            )
    
    option(
        "--enable-gtest-in-build",
        default=build_gtest,
        help="{Enable|Force disable} building the gtest libxul during the build.",
        when="--enable-compile-environment",
    )
    
    set_config("LINK_GTEST_DURING_COMPILE", True, when="--enable-gtest-in-build")
    
    # Localization
    # ==============================================================
    
    option(
        "--enable-ui-locale",
        default="en-US",
        help="Select the user interface locale (default: en-US)",
    )
    
    set_config("MOZ_UI_LOCALE", depends("--enable-ui-locale")(lambda x: x))
    
    # clang-plugin location
    # ==============================================================
    
    @depends(host_library_name_info, check_build_environment, when="--enable-clang-plugin")
    
    def clang_plugin_path(library_name_info, build_env):
        topobjdir = build_env.topobjdir
    
            topobjdir = topobjdir[:-7]
        return os.path.abspath(
    
            os.path.join(
                topobjdir,
                "build",
                "clang-plugin",
                "%sclang-plugin%s"
                % (library_name_info.dll.prefix, library_name_info.dll.suffix),
            )
    
    add_old_configure_assignment("CLANG_PLUGIN", clang_plugin_path)
    
    # Awk detection
    # ==============================================================
    
    awk = check_prog("AWK", ("gawk", "mawk", "nawk", "awk"))
    
    
    # Until the AWK variable is not necessary in old-configure
    
    @depends(awk)
    def awk_for_old_configure(value):
    
    
    add_old_configure_assignment("AWK", awk_for_old_configure)
    
    # Perl detection
    # ==============================================================
    
    perl = check_prog("PERL", ("perl5", "perl"))
    
    
    # Until the PERL variable is not necessary in old-configure
    
    @depends(perl)
    def perl_for_old_configure(value):
    
    
    add_old_configure_assignment("PERL", perl_for_old_configure)
    
    
    
    @template
    def perl_version_check(min_version):
        @depends(perl)
    
        @checking("for minimum required perl version >= %s" % min_version)
    
        def get_perl_version(perl):
    
            return Version(
                check_cmd_output(
                    perl,
                    "-e",
                    "print $]",
                    onerror=lambda: die("Failed to get perl version."),
                )
            )
    
    
        @depends(get_perl_version)
        def check_perl_version(version):
            if version < min_version:
    
                die("Perl %s or higher is required.", min_version)
    
        @checking("for full perl installation")
        @imports("subprocess")
    
        def has_full_perl_installation(perl):
    
            ret = subprocess.call([perl, "-e", "use Config; exit(!-d $Config{archlib})"])
    
            return ret == 0
    
        @depends(has_full_perl_installation)
        def require_full_perl_installation(has_full_perl_installation):
            if not has_full_perl_installation:
    
                die(
                    "Cannot find Config.pm or $Config{archlib}. "
                    "A full perl installation is required."
                )
    
    # GNU make detection
    # ==============================================================
    
    option(env="MAKE", nargs=1, help="Path to GNU make")
    
    
    def possible_makes(make, host):
        candidates = []
    
        if host.kernel == "WINNT":
            candidates.append("mingw32-make")
    
            candidates.extend(("mozmake", "make", "gmake"))
    
            candidates.extend(("gmake", "make"))
    
    check_prog("GMAKE", possible_makes, bootstrap="mozmake")
    
    # watchman detection
    # ==============================================================
    
    
    option(env="WATCHMAN", nargs=1, help="Path to the watchman program")
    
    
    @depends(host, "WATCHMAN")
    @checking("for watchman", callback=lambda w: w.path if w else "not found")
    
    def watchman(host, prog):
        # On Windows, `watchman` is only supported on 64-bit hosts.
    
        if host.os == "WINNT" and host.cpu != "x86_64":
    
        # `watchman version` will talk to the Watchman daemon service.
        # This can hang due to permissions problems. e.g.
        # https://github.com/facebook/watchman/issues/376. So use
        # `watchman --version` to prevent a class of failures.
    
        out = check_cmd_output(prog, "--version", onerror=lambda: None)
    
        return namespace(path=prog, version=Version(out.strip()))
    
    def watchman_version(w):
        return w.version
    
    
    @depends_all(hg_version, hg_config, watchman)
    
    @checking("for watchman Mercurial integration")
    @imports("os")
    
    def watchman_hg(hg_version, hg_config, watchman):
    
        if hg_version < Version("3.8"):
            return "no (Mercurial 3.8+ required)"
    
        for k in ("extensions.fsmonitor", "extensions.hgext.fsmonitor"):
            if k in hg_config and hg_config[k] != "!":
    
        mode_disabled = hg_config.get("fsmonitor.mode") == "off"
    
            return "no (fsmonitor extension not enabled)"
    
            return "no (fsmonitor.mode=off disables fsmonitor)"
    
    # Miscellaneous programs
    # ==============================================================
    
                DSYMUTIL=("dsymutil", "llvm-dsymutil"),
                MKFSHFS=("newfs_hfs", "mkfs.hfsplus"),
                HFS_TOOL=("hfsplus",),
    
        if target.os == "GNU" and target.kernel == "Linux":
            return namespace(RPMBUILD=("rpmbuild",))
    
    
    check_prog("DSYMUTIL", extra_programs.DSYMUTIL, allow_missing=True)
    check_prog("MKFSHFS", extra_programs.MKFSHFS, allow_missing=True)
    check_prog("HFS_TOOL", extra_programs.HFS_TOOL, allow_missing=True)
    check_prog("RPMBUILD", extra_programs.RPMBUILD, allow_missing=True)
    
            "makensis-3.01",
            "makensis-3.0b3",
            "makensis-3.0b1",
            "makensis",
    
        ]
    
        # Look for nsis installed by msys environment. But only the 32-bit version.
        # We use an absolute path and insert as the first entry so it is preferred
        # over a 64-bit exe that may be in PATH.
    
        if "MSYSTEM_PREFIX" in os.environ:
            prefix = os.path.dirname(os.environ["MSYSTEM_PREFIX"])
            candidates.insert(0, os.path.join(prefix, "mingw32", "bin", "makensis.exe"))
    
    
    nsis = check_prog("MAKENSISU", makensis_progs, allow_missing=True)
    
    
    # Make sure the version of makensis is up to date.
    
    @checking("for NSIS version")
    @imports("re")
    
    def nsis_version(nsis, wine):
        if not nsis:
            return None
    
    
        def onerror():
            return die("Failed to get nsis version.")
    
    
        if wine and nsis.lower().endswith(".exe"):
            out = check_cmd_output(wine, nsis, "-version", onerror=onerror)
    
            out = check_cmd_output(nsis, "-version", onerror=onerror)
    
        m = re.search(r"(?<=v)[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?", out)
    
            raise FatalCheckError("Unknown version of makensis")
    
        # Versions comparisons don't quite work well with beta versions, so ensure
        # it works for the non-beta version.
    
        if ver < nsis_min_version and (ver >= "3.0a" or ver < "3"):
            raise FatalCheckError(
                "To build the installer you must have NSIS"
                " version %s or greater in your path" % nsis_min_version
            )
    
    # And that makensis is 32-bit (but only on Windows).
    
    @depends_if(nsis, when=depends(host)(lambda h: h.kernel == "WINNT"))
    @checking("for 32-bit NSIS")
    
    def nsis_binary_type(nsis):
        bin_type = windows_binary_type(nsis)
    
        if bin_type != "win32":
            raise FatalCheckError("%s is not a 32-bit Windows application" % nsis)
    
        return "yes"
    
    # And any flags we have to give to makensis
    @depends(host)
    def nsis_flags(host):
    
        if host.kernel != "WINNT":
            return "-nocd"
        return ""
    
    set_config("MAKENSISU_FLAGS", nsis_flags)
    
    check_prog("7Z", ("7z", "7za"), allow_missing=True, when=target_is_windows)
    check_prog("UPX", ("upx",), allow_missing=True, when=target_is_windows)
    
    @depends(host_c_compiler, c_compiler, bindgen_config_paths)
    def llvm_objdump(host_c_compiler, c_compiler, bindgen_config_paths):
    
        for compiler in (host_c_compiler, c_compiler):
    
            if compiler and compiler.type == "clang":
    
            elif compiler and compiler.type == "clang-cl":
                clang = os.path.join(os.path.dirname(compiler.compiler), "clang")
    
            clang = bindgen_config_paths.clang_path
    
            out = check_cmd_output(
                clang, "--print-prog-name=llvm-objdump", onerror=lambda: None
            )
    
            if out:
                llvm_objdump = out.rstrip()
        return (llvm_objdump,)
    
    
    
    llvm_objdump = check_prog(
        "LLVM_OBJDUMP",
        llvm_objdump,
        what="llvm-objdump",
        when="--enable-compile-environment",
    
    add_old_configure_assignment("LLVM_OBJDUMP", llvm_objdump)
    
    option("--enable-dtrace", help="Build with dtrace support")
    
    dtrace = check_header(
        "sys/sdt.h",
        when="--enable-dtrace",
        onerror=lambda: die("dtrace enabled but sys/sdt.h not found"),