summaryrefslogtreecommitdiff
path: root/zephyr/zmake/zmake
diff options
context:
space:
mode:
Diffstat (limited to 'zephyr/zmake/zmake')
-rw-r--r--zephyr/zmake/zmake/__main__.py9
-rw-r--r--zephyr/zmake/zmake/jobserver.py149
-rw-r--r--zephyr/zmake/zmake/multiproc.py45
-rw-r--r--zephyr/zmake/zmake/zmake.py691
4 files changed, 385 insertions, 509 deletions
diff --git a/zephyr/zmake/zmake/__main__.py b/zephyr/zmake/zmake/__main__.py
index 2b7358d1f1..23fb58eca6 100644
--- a/zephyr/zmake/zmake/__main__.py
+++ b/zephyr/zmake/zmake/__main__.py
@@ -10,7 +10,6 @@ import os
import pathlib
import sys
-import zmake.jobserver as jobserver
import zmake.multiproc as multiproc
import zmake.zmake as zm
@@ -64,7 +63,7 @@ def maybe_reexec(argv):
os.execve(sys.executable, [sys.executable, "-m", "zmake", *argv], env)
-def call_with_namespace(func, namespace, **kwds):
+def call_with_namespace(func, namespace):
"""Call a function with arguments applied from a Namespace.
Args:
@@ -74,6 +73,7 @@ def call_with_namespace(func, namespace, **kwds):
Returns:
The result of calling the callable.
"""
+ kwds = {}
sig = inspect.signature(func)
names = [p.name for p in sig.parameters.values()]
for name, value in vars(namespace).items():
@@ -392,11 +392,8 @@ def main(argv=None):
multiproc.LOG_JOB_NAMES = False
logging.basicConfig(format=log_format, level=opts.log_level)
- # Create the jobserver client BEFORE any pipes get opened in LogWriter
- jobserver_client = jobserver.GNUMakeJobClient.from_environ(jobs=opts.jobs)
- multiproc.LogWriter.reset()
- zmake = call_with_namespace(zm.Zmake, opts, jobserver=jobserver_client)
+ zmake = call_with_namespace(zm.Zmake, opts)
try:
subcommand_method = getattr(zmake, opts.subcommand.replace("-", "_"))
result = call_with_namespace(subcommand_method, opts)
diff --git a/zephyr/zmake/zmake/jobserver.py b/zephyr/zmake/zmake/jobserver.py
index c1c9538b35..a3d6287da2 100644
--- a/zephyr/zmake/zmake/jobserver.py
+++ b/zephyr/zmake/zmake/jobserver.py
@@ -3,17 +3,13 @@
# found in the LICENSE file.
"""Module for job counters, limiting the amount of concurrent executions."""
-import fcntl
-import functools
import logging
import multiprocessing
import os
import re
import select
-import selectors
import shlex
import subprocess
-import sys
import zmake
@@ -30,8 +26,7 @@ class JobHandle:
pass
def __exit__(self, exc_type, exc_value, traceback):
- if self.release_func:
- self.release_func(*self.args, **self.kwargs)
+ self.release_func(*self.args, **self.kwargs)
class JobClient:
@@ -46,16 +41,6 @@ class JobClient:
"""Get the environment variables necessary to share the job server."""
return {}
- @staticmethod
- def is_sequential():
- """Returns True if the jobserver is using -j1."""
- return False
-
- @staticmethod
- def pass_fds():
- """Returns the file descriptors that should be passed to subprocesses."""
- return []
-
def popen(self, argv, **kwargs):
"""Start a process using subprocess.Popen
@@ -68,9 +53,7 @@ class JobClient:
# the bare minimum (PATH only). This prevents us from building obscure
# dependencies on the environment variables.
kwargs.setdefault("env", {"PATH": "/bin:/usr/bin"})
- kwargs.setdefault("pass_fds", [])
kwargs["env"].update(self.env())
- kwargs["pass_fds"] += self.pass_fds()
logger = logging.getLogger(self.__class__.__name__)
logger.debug(
@@ -79,53 +62,17 @@ class JobClient:
" " if kwargs["env"] else "",
zmake.util.repr_command(argv),
)
- return subprocess.Popen( # pylint:disable=consider-using-with
- argv, **kwargs
- )
+ return subprocess.Popen(argv, **kwargs)
class GNUMakeJobClient(JobClient):
- """A job client for GNU make.
+ """A job client for GNU make."""
- A client of jobserver is allowed to run 1 job without contacting the
- jobserver, so maintain an optional self._internal_pipe to hold that
- job.
- """
-
- def __init__(self, inheritable_pipe, jobs, internal_jobs=0, makeflags=None):
- self._makeflags = makeflags
- self._inheritable_pipe = inheritable_pipe
- self.jobs = jobs
- self._selector = selectors.DefaultSelector()
- if internal_jobs:
- self._internal_pipe = os.pipe()
- os.write(self._internal_pipe[1], b"+" * internal_jobs)
- os.set_blocking(self._internal_pipe[0], False)
- self._selector.register(
- self._internal_pipe[0],
- selectors.EVENT_READ,
- self._internal_pipe[1],
- )
- else:
- self._internal_pipe = None
- if self._inheritable_pipe is not None:
- os.set_blocking(self._inheritable_pipe[0], False)
- self._selector.register(
- self._inheritable_pipe[0],
- selectors.EVENT_READ,
- self._inheritable_pipe[1],
- )
-
- def __del__(self):
- if self._inheritable_pipe:
- os.close(self._inheritable_pipe[0])
- os.close(self._inheritable_pipe[1])
- if self._internal_pipe:
- os.close(self._internal_pipe[0])
- os.close(self._internal_pipe[1])
+ def __init__(self, read_fd, write_fd):
+ self._pipe = [read_fd, write_fd]
@classmethod
- def from_environ(cls, env=None, jobs=0):
+ def from_environ(cls, env=None):
"""Create a job client from an environment with the MAKEFLAGS variable.
If we are started under a GNU Make Job Server, we can search
@@ -134,57 +81,22 @@ class GNUMakeJobClient(JobClient):
respectively. If we don't find this environment variable (or
the string inside of it), this will raise an OSError.
- The specification for MAKEFLAGS is:
- * If the first char is "n", this is a dry run, just exit.
- * If the flags contains -j1, go to sequential mode.
- * If the flags contains --jobserver-auth=R,W AND those file
- descriptors are valid, use the jobserver. Otherwise output a
- warning.
-
Args:
env: Optionally, the environment to search.
- jobs: The number of jobs set by the user on the command line.
Returns:
- A GNUMakeJobClient configured appropriately or None if there is
- no MAKEFLAGS environment variable.
+ A GNUMakeJobClient configured appropriately.
"""
if env is None:
env = os.environ
makeflags = env.get("MAKEFLAGS")
if not makeflags:
- return None
+ raise OSError("MAKEFLAGS is not set in the environment")
match = re.search(r"--jobserver-auth=(\d+),(\d+)", makeflags)
- if match:
- pipe = [int(x) for x in match.groups()]
- if jobs:
- pipe = None
- logging.warning(
- "-jN forced on command line; ignoring GNU make jobserver"
- )
- else:
- try:
- # Use F_GETFD to see if file descriptors are valid
- fcntl.fcntl(pipe[0], fcntl.F_GETFD)
- fcntl.fcntl(pipe[1], fcntl.F_GETFD)
- logging.info("using GNU make jobserver")
- except OSError:
- pipe = None
- logging.warning(
- "No file descriptors; ignoring GNU make jobserver"
- )
- else:
- pipe = None
- if not jobs:
- match = re.search(r"-j(\d+)", makeflags)
- if match:
- jobs = int(match.group(1))
- if jobs == 1:
- logging.info("Running in sequential mode (-j1)")
- if makeflags[0] == "n":
- logging.info("MAKEFLAGS contained dry-run flag")
- sys.exit(0)
- return cls(pipe, jobs, internal_jobs=1, makeflags=makeflags)
+ if not match:
+ raise OSError("MAKEFLAGS did not contain jobserver flags")
+ read_fd, write_fd = map(int, match.groups())
+ return cls(read_fd, write_fd)
def get_job(self):
"""Claim a job.
@@ -192,38 +104,12 @@ class GNUMakeJobClient(JobClient):
Returns:
A JobHandle object.
"""
- while True:
- ready_items = self._selector.select()
- if len(ready_items) > 0:
- read_fd = ready_items[0][0].fd
- write_fd = ready_items[0][0].data
- try:
- byte = os.read(read_fd, 1)
- return JobHandle(
- functools.partial(os.write, write_fd, byte)
- )
- except BlockingIOError:
- pass
+ byte = os.read(self._pipe[0], 1)
+ return JobHandle(lambda: os.write(self._pipe[1], byte))
def env(self):
"""Get the environment variables necessary to share the job server."""
- if self._makeflags:
- return {"MAKEFLAGS": self._makeflags}
- flag = ""
- if self.jobs:
- flag += f" -j{self.jobs}"
- if self.jobs != 1 and self._inheritable_pipe is not None:
- flag += " --jobserver-auth={},{}".format(*self._inheritable_pipe)
- return {"MAKEFLAGS": flag}
-
- def is_sequential(self):
- return self.jobs == 1
-
- def pass_fds(self):
- """Returns the file descriptors that should be passed to subprocesses."""
- if self.jobs != 1 and self._inheritable_pipe is not None:
- return self._inheritable_pipe
- return []
+ return {"MAKEFLAGS": "--jobserver-auth={},{}".format(*self._pipe)}
class GNUMakeJobServer(GNUMakeJobClient):
@@ -234,10 +120,11 @@ class GNUMakeJobServer(GNUMakeJobClient):
"""
def __init__(self, jobs=0):
+ [read_fd, write_fd] = os.pipe()
+ super().__init__(read_fd, write_fd)
if not jobs:
jobs = multiprocessing.cpu_count()
elif jobs > select.PIPE_BUF:
jobs = select.PIPE_BUF
- super().__init__(os.pipe(), jobs)
- os.write(self._inheritable_pipe[1], b"+" * jobs)
+ os.write(self._pipe[1], b"+" * jobs)
diff --git a/zephyr/zmake/zmake/multiproc.py b/zephyr/zmake/zmake/multiproc.py
index a668bcb961..0838f5f1f8 100644
--- a/zephyr/zmake/zmake/multiproc.py
+++ b/zephyr/zmake/zmake/multiproc.py
@@ -43,8 +43,7 @@ class LogWriter:
# A local pipe use to signal the look that a new file descriptor was added and
# should be included in the select statement.
- _logging_interrupt_pipe = []
-
+ _logging_interrupt_pipe = os.pipe()
# A condition variable used to synchronize logging operations.
_logging_cv = threading.Condition()
# A map of file descriptors to their LogWriter
@@ -55,14 +54,7 @@ class LogWriter:
@classmethod
def reset(cls):
"""Reset this module to its starting state (useful for tests)"""
- with LogWriter._logging_cv:
- LogWriter._logging_map.clear()
- if len(LogWriter._logging_interrupt_pipe) > 1:
- os.write(LogWriter._logging_interrupt_pipe[1], b"x")
- else:
- cls._logging_interrupt_pipe = os.pipe()
- LogWriter._logging_thread = None
- LogWriter._logging_cv.notify_all()
+ LogWriter._logging_map.clear()
def __init__(
self,
@@ -143,21 +135,20 @@ class LogWriter:
removed from the map as it is no longer valid.
"""
with LogWriter._logging_cv:
- if file_descriptor in LogWriter._logging_map:
- writer = LogWriter._logging_map[file_descriptor]
- if file_descriptor.closed:
- del LogWriter._logging_map[file_descriptor]
- LogWriter._logging_cv.notify_all()
- return
- line = file_descriptor.readline()
- if not line:
- # EOF
- del LogWriter._logging_map[file_descriptor]
- LogWriter._logging_cv.notify_all()
- return
- line = line.rstrip("\n")
- if line:
- writer.log_line(line)
+ writer = LogWriter._logging_map[file_descriptor]
+ if file_descriptor.closed:
+ del LogWriter._logging_map[file_descriptor]
+ LogWriter._logging_cv.notify_all()
+ return
+ line = file_descriptor.readline()
+ if not line:
+ # EOF
+ del LogWriter._logging_map[file_descriptor]
+ LogWriter._logging_cv.notify_all()
+ return
+ line = line.rstrip("\n")
+ if line:
+ writer.log_line(line)
@classmethod
def _prune_logging_fds(cls):
@@ -304,9 +295,6 @@ class Executor:
Args:
func: A function which returns an int result code or throws an
exception.
-
- Returns:
- A join function which will wait until this task is finished.
"""
with self.lock:
thread = threading.Thread(
@@ -314,7 +302,6 @@ class Executor:
)
thread.start()
self.threads.append(thread)
- return thread.join
def wait(self):
"""Wait for a result to be available.
diff --git a/zephyr/zmake/zmake/zmake.py b/zephyr/zmake/zmake/zmake.py
index fb88dae7e9..f81f157054 100644
--- a/zephyr/zmake/zmake/zmake.py
+++ b/zephyr/zmake/zmake/zmake.py
@@ -187,10 +187,13 @@ class Zmake:
if jobserver:
self.jobserver = jobserver
else:
- self.jobserver = zmake.jobserver.GNUMakeJobServer(jobs=jobs)
+ try:
+ self.jobserver = zmake.jobserver.GNUMakeJobClient.from_environ()
+ except OSError:
+ self.jobserver = zmake.jobserver.GNUMakeJobServer(jobs=jobs)
self.executor = zmake.multiproc.Executor()
- self._sequential = self.jobserver.is_sequential() and not goma
+ self._sequential = jobs == 1 and not goma
self.failed_projects = []
@property
@@ -240,7 +243,6 @@ class Zmake:
delete_intermediates=False,
static_version=False,
save_temps=False,
- wait_for_executor=True,
):
"""Locate and configure the specified projects."""
# Resolve build_dir if needed.
@@ -267,6 +269,7 @@ class Zmake:
coverage=coverage,
allow_warnings=allow_warnings,
extra_cflags=extra_cflags,
+ multiproject=len(projects) > 1,
delete_intermediates=delete_intermediates,
static_version=static_version,
save_temps=save_temps,
@@ -276,11 +279,11 @@ class Zmake:
result = self.executor.wait()
if result:
return result
+ result = self.executor.wait()
+ if result:
+ return result
non_test_projects = [p for p in projects if not p.config.is_test]
if len(non_test_projects) > 1 and coverage and build_after_configure:
- result = self.executor.wait()
- if result:
- return result
result = self._merge_lcov_files(
projects=non_test_projects,
build_dir=build_dir,
@@ -289,11 +292,6 @@ class Zmake:
if result:
self.failed_projects.append(str(build_dir / "all_builds.info"))
return result
- elif wait_for_executor:
- result = self.executor.wait()
- if result:
- return result
-
return 0
def build(
@@ -387,10 +385,8 @@ class Zmake:
delete_intermediates=False,
static_version=True,
save_temps=False,
- wait_for_executor=False,
)
- if not result:
- result = self.executor.wait()
+
if result:
self.logger.error(
"compare-builds failed to build all projects at %s",
@@ -444,364 +440,372 @@ class Zmake:
coverage=False,
allow_warnings=False,
extra_cflags=None,
+ multiproject=False,
delete_intermediates=False,
static_version=False,
save_temps=False,
):
"""Set up a build directory to later be built by "zmake build"."""
try:
- with self.jobserver.get_job():
- # Clobber build directory if requested.
- if clobber and build_dir.exists():
- self.logger.info(
- "Clearing build directory %s due to --clobber",
- build_dir,
- )
- shutil.rmtree(build_dir)
+ # Clobber build directory if requested.
+ if clobber and build_dir.exists():
+ self.logger.info(
+ "Clearing build directory %s due to --clobber", build_dir
+ )
+ shutil.rmtree(build_dir)
+
+ generated_include_dir = (build_dir / "include").resolve()
+ base_config = zmake.build_config.BuildConfig(
+ cmake_defs={
+ "CMAKE_EXPORT_COMPILE_COMMANDS": "ON",
+ "DTS_ROOT": str(self.module_paths["ec"] / "zephyr"),
+ "SYSCALL_INCLUDE_DIRS": str(
+ self.module_paths["ec"]
+ / "zephyr"
+ / "include"
+ / "drivers"
+ ),
+ "USER_CACHE_DIR": str(
+ self.module_paths["ec"]
+ / "build"
+ / "zephyr"
+ / "user-cache"
+ ),
+ "ZEPHYR_BASE": str(self.zephyr_base),
+ "ZMAKE_INCLUDE_DIR": str(generated_include_dir),
+ "ZMAKE_PROJECT_NAME": project.config.project_name,
+ **(
+ {"EXTRA_EC_VERSION_FLAGS": "--static"}
+ if static_version
+ else {}
+ ),
+ **(
+ {"EXTRA_CFLAGS": "-save-temps=obj"}
+ if save_temps
+ else {}
+ ),
+ },
+ )
+
+ # Prune the module paths to just those required by the project.
+ module_paths = project.prune_modules(self.module_paths)
+
+ module_config = zmake.modules.setup_module_symlinks(
+ build_dir / "modules", module_paths
+ )
+
+ # Symlink the Zephyr base into the build directory so it can
+ # be used in the build phase.
+ util.update_symlink(self.zephyr_base, build_dir / "zephyr_base")
+
+ dts_overlay_config = project.find_dts_overlays(module_paths)
- generated_include_dir = (build_dir / "include").resolve()
- base_config = zmake.build_config.BuildConfig(
+ toolchain_support = project.get_toolchain(
+ module_paths, override=toolchain
+ )
+ toolchain_config = toolchain_support.get_build_config()
+
+ if bringup:
+ base_config |= zmake.build_config.BuildConfig(
+ kconfig_defs={"CONFIG_PLATFORM_EC_BRINGUP": "y"}
+ )
+ if coverage:
+ base_config |= zmake.build_config.BuildConfig(
+ kconfig_defs={"CONFIG_COVERAGE": "y"}
+ )
+ if allow_warnings:
+ base_config |= zmake.build_config.BuildConfig(
+ cmake_defs={"ALLOW_WARNINGS": "ON"}
+ )
+ if extra_cflags:
+ base_config |= zmake.build_config.BuildConfig(
+ cmake_defs={"EXTRA_CFLAGS": extra_cflags},
+ )
+ if self.goma:
+ base_config |= zmake.build_config.BuildConfig(
cmake_defs={
- "CMAKE_EXPORT_COMPILE_COMMANDS": "ON",
- "DTS_ROOT": str(self.module_paths["ec"] / "zephyr"),
- "SYSCALL_INCLUDE_DIRS": str(
- self.module_paths["ec"]
- / "zephyr"
- / "include"
- / "drivers"
- ),
- "USER_CACHE_DIR": str(
- self.module_paths["ec"]
- / "build"
- / "zephyr"
- / "user-cache"
- ),
- "ZEPHYR_BASE": str(self.zephyr_base),
- "ZMAKE_INCLUDE_DIR": str(generated_include_dir),
- "ZMAKE_PROJECT_NAME": project.config.project_name,
- **(
- {"EXTRA_EC_VERSION_FLAGS": "--static"}
- if static_version
- else {}
- ),
- **(
- {"EXTRA_CFLAGS": "-save-temps=obj"}
- if save_temps
- else {}
- ),
+ "CMAKE_C_COMPILER_LAUNCHER": self.gomacc,
+ "CMAKE_CXX_COMPILER_LAUNCHER": self.gomacc,
},
)
- # Prune the module paths to just those required by the project.
- module_paths = project.prune_modules(self.module_paths)
-
- module_config = zmake.modules.setup_module_symlinks(
- build_dir / "modules", module_paths
+ if not build_dir.exists():
+ build_dir.mkdir()
+ if not generated_include_dir.exists():
+ generated_include_dir.mkdir()
+ processes = []
+ files_to_write = []
+ self.logger.info(
+ "Building %s in %s.", project.config.project_name, build_dir
+ )
+ for build_name, build_config in project.iter_builds():
+ config: zmake.build_config.BuildConfig = (
+ base_config
+ | toolchain_config
+ | module_config
+ | dts_overlay_config
+ | build_config
)
- # Symlink the Zephyr base into the build directory so it can
- # be used in the build phase.
- util.update_symlink(self.zephyr_base, build_dir / "zephyr_base")
-
- dts_overlay_config = project.find_dts_overlays(module_paths)
+ config_json = config.as_json()
+ config_json_file = build_dir / f"cfg-{build_name}.json"
+ if config_json_file.is_file():
+ if config_json_file.read_text() == config_json:
+ self.logger.info(
+ "Skip reconfiguring %s:%s due to previous cmake run of "
+ "equivalent configuration. Run with --clobber if this "
+ "optimization is undesired.",
+ project.config.project_name,
+ build_name,
+ )
+ continue
+ config_json_file.unlink()
- toolchain_support = project.get_toolchain(
- module_paths, override=toolchain
- )
- toolchain_config = toolchain_support.get_build_config()
+ files_to_write.append((config_json_file, config_json))
- if bringup:
- base_config |= zmake.build_config.BuildConfig(
- kconfig_defs={"CONFIG_PLATFORM_EC_BRINGUP": "y"}
- )
- if coverage:
- base_config |= zmake.build_config.BuildConfig(
- kconfig_defs={"CONFIG_COVERAGE": "y"}
- )
- if allow_warnings:
- base_config |= zmake.build_config.BuildConfig(
- cmake_defs={"ALLOW_WARNINGS": "ON"}
- )
- if extra_cflags:
- base_config |= zmake.build_config.BuildConfig(
- cmake_defs={"EXTRA_CFLAGS": extra_cflags},
- )
- if self.goma:
- base_config |= zmake.build_config.BuildConfig(
- cmake_defs={
- "CMAKE_C_COMPILER_LAUNCHER": self.gomacc,
- "CMAKE_CXX_COMPILER_LAUNCHER": self.gomacc,
- },
+ output_dir = build_dir / "build-{}".format(build_name)
+ if output_dir.exists():
+ self.logger.info(
+ "Clobber %s due to configuration changes.", output_dir
)
+ shutil.rmtree(output_dir)
- if not build_dir.exists():
- build_dir.mkdir()
- if not generated_include_dir.exists():
- generated_include_dir.mkdir()
self.logger.info(
- "Building %s in %s.", project.config.project_name, build_dir
+ "Configuring %s:%s.",
+ project.config.project_name,
+ build_name,
)
- # To reconstruct a Project object later, we need to know the
- # name and project directory.
- (build_dir / "project_name.txt").write_text(
- project.config.project_name
+
+ kconfig_file = build_dir / "kconfig-{}.conf".format(build_name)
+ proc = config.popen_cmake(
+ self.jobserver,
+ project.config.project_dir,
+ output_dir,
+ kconfig_file,
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="utf-8",
+ errors="replace",
)
- util.update_symlink(
- project.config.project_dir, build_dir / "project"
+ job_id = "{}:{}".format(project.config.project_name, build_name)
+ zmake.multiproc.LogWriter.log_output(
+ self.logger,
+ logging.DEBUG,
+ proc.stdout,
+ log_level_override_func=cmake_log_level_override,
+ job_id=job_id,
)
+ zmake.multiproc.LogWriter.log_output(
+ self.logger,
+ logging.ERROR,
+ proc.stderr,
+ log_level_override_func=cmake_log_level_override,
+ job_id=job_id,
+ )
+ if self._sequential:
+ if proc.wait():
+ raise OSError(get_process_failure_msg(proc))
+ else:
+ processes.append(proc)
+ for proc in processes:
+ if proc.wait():
+ raise OSError(get_process_failure_msg(proc))
+
+ for path, contents in files_to_write:
+ path.write_text(contents)
+
+ # To reconstruct a Project object later, we need to know the
+ # name and project directory.
+ (build_dir / "project_name.txt").write_text(
+ project.config.project_name
+ )
+ util.update_symlink(
+ project.config.project_dir, build_dir / "project"
+ )
- wait_funcs = []
- for build_name, build_config in project.iter_builds():
- config: zmake.build_config.BuildConfig = (
- base_config
- | toolchain_config
- | module_config
- | dts_overlay_config
- | build_config
- )
-
- wait_func = self.executor.append(
- func=functools.partial(
- self._configure_one_build,
- config=config,
- build_dir=build_dir,
- build_name=build_name,
- project=project,
- )
- )
- wait_funcs.append(wait_func)
- # Outside the with...get_job above.
- for wait_func in wait_funcs:
- wait_func()
-
+ output_files = []
if build_after_configure:
- self._build(
+ result = self._build(
build_dir=build_dir,
project=project,
coverage=coverage,
+ output_files_out=output_files,
+ multiproject=multiproject,
static_version=static_version,
- delete_intermediates=delete_intermediates,
)
+ if result:
+ self.failed_projects.append(project.config.project_name)
+ return result
+
+ if delete_intermediates:
+ outdir = build_dir / "output"
+ for child in build_dir.iterdir():
+ if child != outdir:
+ logging.debug("Deleting %s", child)
+ if not child.is_symlink() and child.is_dir():
+ shutil.rmtree(child)
+ else:
+ child.unlink()
return 0
except Exception:
self.failed_projects.append(project.config.project_name)
raise
- def _configure_one_build(
- self,
- config,
- build_dir,
- build_name,
- project,
- ):
- """Run cmake and maybe ninja on one build dir."""
- with self.jobserver.get_job():
- config_json = config.as_json()
- config_json_file = build_dir / f"cfg-{build_name}.json"
- if config_json_file.is_file():
- if config_json_file.read_text() == config_json:
- self.logger.info(
- "Skip reconfiguring %s:%s due to previous cmake run of "
- "equivalent configuration. Run with --clobber if this "
- "optimization is undesired.",
- project.config.project_name,
- build_name,
- )
- return 0
- config_json_file.unlink()
-
- output_dir = build_dir / "build-{}".format(build_name)
- if output_dir.exists():
- self.logger.info(
- "Clobber %s due to configuration changes.",
- output_dir,
- )
- shutil.rmtree(output_dir)
-
- self.logger.info(
- "Configuring %s:%s.",
- project.config.project_name,
- build_name,
- )
-
- kconfig_file = build_dir / "kconfig-{}.conf".format(build_name)
- proc = config.popen_cmake(
- self.jobserver,
- project.config.project_dir,
- output_dir,
- kconfig_file,
- stdin=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- encoding="utf-8",
- errors="replace",
- )
- job_id = "{}:{}".format(project.config.project_name, build_name)
- zmake.multiproc.LogWriter.log_output(
- self.logger,
- logging.DEBUG,
- proc.stdout,
- log_level_override_func=cmake_log_level_override,
- job_id=job_id,
- )
- zmake.multiproc.LogWriter.log_output(
- self.logger,
- logging.ERROR,
- proc.stderr,
- log_level_override_func=cmake_log_level_override,
- job_id=job_id,
- )
- if proc.wait():
- raise OSError(get_process_failure_msg(proc))
- config_json_file.write_text(config_json)
- return 0
-
def _build(
self,
build_dir,
project: zmake.project.Project,
+ output_files_out=None,
coverage=False,
+ multiproject=False,
static_version=False,
- delete_intermediates=False,
):
"""Build a pre-configured build directory."""
- with self.jobserver.get_job():
- dirs: Dict[str, pathlib.Path] = {}
-
- build_dir = build_dir.resolve()
+ def wait_and_check_success(procs, writers):
+ """Wait for processes to complete and check for errors
+
+ Args:
+ procs: List of subprocess.Popen objects to check
+ writers: List of LogWriter objects to check
+
+ Returns:
+ True if all if OK
+ False if an error was found (so that zmake should exit)
+ """
+ bad = None
+ for proc in procs:
+ if proc.wait() and not bad:
+ bad = proc
+ if bad:
+ # Just show the first bad process for now. Both builds likely
+ # produce the same error anyway. If they don't, the user can
+ # still take action on the errors/warnings provided. Showing
+ # multiple 'Execution failed' messages is not very friendly
+ # since it exposes the fragmented nature of the build.
+ raise OSError(get_process_failure_msg(bad))
- # Compute the version string.
- version_string = zmake.version.get_version_string(
- project.config.project_name,
- build_dir / "zephyr_base",
- zmake.modules.locate_from_directory(build_dir / "modules"),
- static=static_version,
- )
+ # Let all output be produced before exiting
+ for writer in writers:
+ writer.wait()
+ return True
+
+ procs = []
+ log_writers = []
+ dirs: Dict[str, pathlib.Path] = {}
+
+ build_dir = build_dir.resolve()
+
+ # Compute the version string.
+ version_string = zmake.version.get_version_string(
+ project.config.project_name,
+ build_dir / "zephyr_base",
+ zmake.modules.locate_from_directory(build_dir / "modules"),
+ static=static_version,
+ )
- # The version header needs to generated during the build phase
- # instead of configure, as the tree may have changed since
- # configure was run.
- zmake.version.write_version_header(
- version_string,
- build_dir / "include" / "ec_version.h",
- "zmake",
- static=static_version,
- )
+ # The version header needs to generated during the build phase
+ # instead of configure, as the tree may have changed since
+ # configure was run.
+ zmake.version.write_version_header(
+ version_string,
+ build_dir / "include" / "ec_version.h",
+ "zmake",
+ static=static_version,
+ )
- gcov = "gcov.sh-not-found"
- wait_funcs = []
- for build_name, _ in project.iter_builds():
+ gcov = "gcov.sh-not-found"
+ for build_name, _ in project.iter_builds():
+ with self.jobserver.get_job():
dirs[build_name] = build_dir / "build-{}".format(build_name)
gcov = dirs[build_name] / "gcov.sh"
- wait_func = self.executor.append(
- func=functools.partial(
- self._build_one_dir,
- build_name=build_name,
- dirs=dirs,
- coverage=coverage,
- project=project,
- )
+ cmd = ["/usr/bin/ninja", "-C", dirs[build_name].as_posix()]
+ if self.goma:
+ # Go nuts ninja, goma does the heavy lifting!
+ cmd.append("-j1024")
+ elif multiproject:
+ cmd.append("-j1")
+ # Only tests will actually build with coverage enabled.
+ if coverage and not project.config.is_test:
+ cmd.append("all.libraries")
+ self.logger.info(
+ "Building %s:%s: %s",
+ project.config.project_name,
+ build_name,
+ util.repr_command(cmd),
+ )
+ proc = self.jobserver.popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="utf-8",
+ errors="replace",
+ # TODO(b/239619222): Filter os.environ for ninja.
+ env=os.environ,
+ )
+ job_id = "{}:{}".format(project.config.project_name, build_name)
+ dirs[build_name].mkdir(parents=True, exist_ok=True)
+ build_log = open( # pylint:disable=consider-using-with
+ dirs[build_name] / "build.log",
+ "w",
+ )
+ out = zmake.multiproc.LogWriter.log_output(
+ logger=self.logger,
+ log_level=logging.INFO,
+ file_descriptor=proc.stdout,
+ log_level_override_func=ninja_stdout_log_level_override,
+ job_id=job_id,
+ tee_output=build_log,
+ )
+ err = zmake.multiproc.LogWriter.log_output(
+ self.logger,
+ logging.ERROR,
+ proc.stderr,
+ job_id=job_id,
)
- wait_funcs.append(wait_func)
- # Outside the with...get_job above.
- for wait_func in wait_funcs:
- wait_func()
- with self.jobserver.get_job():
- # Run the packer.
- packer_work_dir = build_dir / "packer"
- output_dir = build_dir / "output"
- for newdir in output_dir, packer_work_dir:
- if not newdir.exists():
- newdir.mkdir()
-
- # For non-tests, they won't link with coverage, so don't pack the
- # firmware. Also generate a lcov file.
- if coverage and not project.config.is_test:
+ if self._sequential:
+ if not wait_and_check_success([proc], [out, err]):
+ return 2
+ else:
+ procs.append(proc)
+ log_writers += [out, err]
+
+ if not wait_and_check_success(procs, log_writers):
+ return 2
+
+ # Run the packer.
+ packer_work_dir = build_dir / "packer"
+ output_dir = build_dir / "output"
+ for newdir in output_dir, packer_work_dir:
+ if not newdir.exists():
+ newdir.mkdir()
+
+ if output_files_out is None:
+ output_files_out = []
+ # For non-tests, they won't link with coverage, so don't pack the
+ # firmware. Also generate a lcov file.
+ if coverage and not project.config.is_test:
+ with self.jobserver.get_job():
self._run_lcov(
build_dir,
output_dir / "zephyr.info",
initial=True,
gcov=gcov,
)
- else:
- for output_file, output_name in project.packer.pack_firmware(
- packer_work_dir,
- self.jobserver,
- dirs,
- version_string=version_string,
- ):
- shutil.copy2(output_file, output_dir / output_name)
- self.logger.debug("Output file '%s' created.", output_file)
-
- if delete_intermediates:
- outdir = build_dir / "output"
- for child in build_dir.iterdir():
- if child != outdir:
- logging.debug("Deleting %s", child)
- if not child.is_symlink() and child.is_dir():
- shutil.rmtree(child)
- else:
- child.unlink()
- return 0
-
- def _build_one_dir(self, build_name, dirs, coverage, project):
- """Builds one sub-dir of a configured project (build-ro, etc)."""
-
- with self.jobserver.get_job():
- cmd = ["/usr/bin/ninja", "-C", dirs[build_name].as_posix()]
- if self.goma:
- # Go nuts ninja, goma does the heavy lifting!
- cmd.append("-j1024")
- elif self._sequential:
- cmd.append("-j1")
- # Only tests will actually build with coverage enabled.
- if coverage and not project.config.is_test:
- cmd.append("all.libraries")
- self.logger.info(
- "Building %s:%s: %s",
- project.config.project_name,
- build_name,
- util.repr_command(cmd),
- )
- proc = self.jobserver.popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- encoding="utf-8",
- errors="replace",
- # TODO(b/239619222): Filter os.environ for ninja.
- env=os.environ,
- )
- job_id = "{}:{}".format(project.config.project_name, build_name)
- dirs[build_name].mkdir(parents=True, exist_ok=True)
- build_log = open( # pylint:disable=consider-using-with
- dirs[build_name] / "build.log",
- "w",
- )
- out = zmake.multiproc.LogWriter.log_output(
- logger=self.logger,
- log_level=logging.INFO,
- file_descriptor=proc.stdout,
- log_level_override_func=ninja_stdout_log_level_override,
- job_id=job_id,
- tee_output=build_log,
- )
- err = zmake.multiproc.LogWriter.log_output(
- self.logger,
- logging.ERROR,
- proc.stderr,
- job_id=job_id,
- )
-
- if proc.wait():
- raise OSError(get_process_failure_msg(proc))
+ else:
+ for output_file, output_name in project.packer.pack_firmware(
+ packer_work_dir,
+ self.jobserver,
+ dirs,
+ version_string=version_string,
+ ):
+ shutil.copy2(output_file, output_dir / output_name)
+ self.logger.debug("Output file '%s' created.", output_file)
+ output_files_out.append(output_file)
- # Let all output be produced before exiting
- out.wait()
- err.wait()
- return 0
+ return 0
def _run_lcov(
self,
@@ -865,33 +869,34 @@ class Zmake:
pathlib.Path(build_dir) / project.config.project_name
)
all_lcov_files.append(project_build_dir / "output" / "zephyr.info")
- # Merge info files into a single lcov.info
- self.logger.info("Merging coverage data into %s.", output_file)
- cmd = [
- "/usr/bin/lcov",
- "-o",
- output_file,
- "--rc",
- "lcov_branch_coverage=1",
- ]
- for info in all_lcov_files:
- cmd += ["-a", info]
- proc = self.jobserver.popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- encoding="utf-8",
- errors="replace",
- )
- zmake.multiproc.LogWriter.log_output(
- self.logger, logging.ERROR, proc.stderr, job_id="lcov"
- )
- zmake.multiproc.LogWriter.log_output(
- self.logger, logging.DEBUG, proc.stdout, job_id="lcov"
- )
- if proc.wait():
- raise OSError(get_process_failure_msg(proc))
- return 0
+ with self.jobserver.get_job():
+ # Merge info files into a single lcov.info
+ self.logger.info("Merging coverage data into %s.", output_file)
+ cmd = [
+ "/usr/bin/lcov",
+ "-o",
+ output_file,
+ "--rc",
+ "lcov_branch_coverage=1",
+ ]
+ for info in all_lcov_files:
+ cmd += ["-a", info]
+ proc = self.jobserver.popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="utf-8",
+ errors="replace",
+ )
+ zmake.multiproc.LogWriter.log_output(
+ self.logger, logging.ERROR, proc.stderr, job_id="lcov"
+ )
+ zmake.multiproc.LogWriter.log_output(
+ self.logger, logging.DEBUG, proc.stdout, job_id="lcov"
+ )
+ if proc.wait():
+ raise OSError(get_process_failure_msg(proc))
+ return 0
def list_projects(self, fmt, search_dir):
"""List project names known to zmake on stdout.