DXR is a code search and navigation tool aimed at making sense of large projects. It supports full-text and regex searches as well as structural queries.

Mercurial (f313a52c45bf)

VCS Links

Line Code
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

from __future__ import absolute_import, print_function, unicode_literals

from abc import (
    ABCMeta,
    abstractmethod,
)

import errno
import itertools
import os
import time

from contextlib import contextmanager

from mach.mixin.logging import LoggingMixin

import mozpack.path as mozpath
from ..preprocessor import Preprocessor
from ..pythonutil import iter_modules_in_path
from ..util import (
    FileAvoidWrite,
    simple_diff,
)
from ..frontend.data import ContextDerived
from ..frontend.reader import EmptyConfig
from .configenvironment import ConfigEnvironment
from mozbuild.base import ExecutionSummary


class BuildBackend(LoggingMixin):
    """Abstract base class for build backends.

    A build backend is merely a consumer of the build configuration (the output
    of the frontend processing). It does something with said data. What exactly
    is the discretion of the specific implementation.
    """

    __metaclass__ = ABCMeta

    def __init__(self, environment):
        assert isinstance(environment, (ConfigEnvironment, EmptyConfig))
        self.populate_logger()

        self.environment = environment

        # Files whose modification should cause a new read and backend
        # generation.
        self.backend_input_files = set()

        # Files generated by the backend.
        self._backend_output_files = set()

        self._environments = {}
        self._environments[environment.topobjdir] = environment

        # The number of backend files created.
        self._created_count = 0

        # The number of backend files updated.
        self._updated_count = 0

        # The number of unchanged backend files.
        self._unchanged_count = 0

        # The number of deleted backend files.
        self._deleted_count = 0

        # The total wall time spent in the backend. This counts the time the
        # backend writes out files, etc.
        self._execution_time = 0.0

        # Mapping of changed file paths to diffs of the changes.
        self.file_diffs = {}

        self.dry_run = False

        self._init()

    def summary(self):
        return ExecutionSummary(
            self.__class__.__name__.replace('Backend', '') +
            ' backend executed in {execution_time:.2f}s\n  '
            '{total:d} total backend files; '
            '{created:d} created; '
            '{updated:d} updated; '
            '{unchanged:d} unchanged; '
            '{deleted:d} deleted',
            execution_time=self._execution_time,
            total=self._created_count + self._updated_count +
            self._unchanged_count,
            created=self._created_count,
            updated=self._updated_count,
            unchanged=self._unchanged_count,
            deleted=self._deleted_count)

    def _init(self):
        """Hook point for child classes to perform actions during __init__.

        This exists so child classes don't need to implement __init__.
        """

    def consume(self, objs):
        """Consume a stream of TreeMetadata instances.

        This is the main method of the interface. This is what takes the
        frontend output and does something with it.

        Child classes are not expected to implement this method. Instead, the
        base class consumes objects and calls methods (possibly) implemented by
        child classes.
        """

        # Previously generated files.
        list_file = mozpath.join(self.environment.topobjdir, 'backend.%s'
                                 % self.__class__.__name__)
        backend_output_list = set()
        if os.path.exists(list_file):
            with open(list_file) as fh:
                backend_output_list.update(mozpath.normsep(p)
                                           for p in fh.read().splitlines())

        for obj in objs:
            obj_start = time.time()
            if (not self.consume_object(obj) and
                    not isinstance(self, PartialBackend)):
                raise Exception('Unhandled object of type %s' % type(obj))
            self._execution_time += time.time() - obj_start

            if (isinstance(obj, ContextDerived) and
                    not isinstance(self, PartialBackend)):
                self.backend_input_files |= obj.context_all_paths

        # Pull in all loaded Python as dependencies so any Python changes that
        # could influence our output result in a rescan.
        self.backend_input_files |= set(iter_modules_in_path(
            self.environment.topsrcdir, self.environment.topobjdir))

        finished_start = time.time()
        self.consume_finished()
        self._execution_time += time.time() - finished_start

        # Purge backend files created in previous run, but not created anymore
        delete_files = backend_output_list - self._backend_output_files
        for path in delete_files:
            full_path = mozpath.join(self.environment.topobjdir, path)
            try:
                with open(full_path, 'r') as existing:
                    old_content = existing.read()
                    if old_content:
                        self.file_diffs[full_path] = simple_diff(
                            full_path, old_content.splitlines(), None)
            except IOError:
                pass
            try:
                if not self.dry_run:
                    os.unlink(full_path)
                self._deleted_count += 1
            except OSError:
                pass
        # Remove now empty directories
        for dir in set(mozpath.dirname(d) for d in delete_files):
            try:
                os.removedirs(dir)
            except OSError:
                pass

        # Write out the list of backend files generated, if it changed.
        if backend_output_list != self._backend_output_files:
            with self._write_file(list_file) as fh:
                fh.write('\n'.join(sorted(self._backend_output_files)))
        else:
            # Always update its mtime if we're not in dry-run mode.
            if not self.dry_run:
                with open(list_file, 'a'):
                    os.utime(list_file, None)

        # Write out the list of input files for the backend
        with self._write_file('%s.in' % list_file) as fh:
            fh.write('\n'.join(sorted(
                mozpath.normsep(f) for f in self.backend_input_files)))

    @abstractmethod
    def consume_object(self, obj):
        """Consumes an individual TreeMetadata instance.

        This is the main method used by child classes to react to build
        metadata.
        """

    def consume_finished(self):
        """Called when consume() has completed handling all objects."""

    def build(self, config, output, jobs, verbose, what=None):
        """Called when 'mach build' is executed.

        This should return the status value of a subprocess, where 0 denotes
        success and any other value is an error code. A return value of None
        indicates that the default 'make -f client.mk' should run.
        """
        return None

    def _write_purgecaches(self, config):
        """Write .purgecaches sentinels.

        The purgecaches mechanism exists to allow the platform to
        invalidate the XUL cache (which includes some JS) at application
        startup-time.  The application checks for .purgecaches in the
        application directory, which varies according to
        --enable-application.  There's a further wrinkle on macOS, where
        the real application directory is part of a Cocoa bundle
        produced from the regular application directory by the build
        system.  In this case, we write to both locations, since the
        build system recreates the Cocoa bundle from the contents of the
        regular application directory and might remove a sentinel
        created here.
        """

        app = config.substs['MOZ_BUILD_APP']
        if app == 'mobile/android':
            # In order to take effect, .purgecaches sentinels would need to be
            # written to the Android device file system.
            return

        root = mozpath.join(config.topobjdir, 'dist', 'bin')

        if app == 'browser':
            root = mozpath.join(config.topobjdir, 'dist', 'bin', 'browser')

        purgecaches_dirs = [root]
        if app == 'browser' and 'cocoa' == config.substs['MOZ_WIDGET_TOOLKIT']:
            bundledir = mozpath.join(config.topobjdir, 'dist',
                                     config.substs['MOZ_MACBUNDLE_NAME'],
                                     'Contents', 'Resources',
                                     'browser')
            purgecaches_dirs.append(bundledir)

        for dir in purgecaches_dirs:
            with open(mozpath.join(dir, '.purgecaches'), 'wt') as f:
                f.write('\n')

    def post_build(self, config, output, jobs, verbose, status):
        """Called late during 'mach build' execution, after `build(...)` has finished.

        `status` is the status value returned from `build(...)`.

        In the case where `build` returns `None`, this is called after
        the default `make` command has completed, with the status of
        that command.

        This should return the status value from `build(...)`, or the
        status value of a subprocess, where 0 denotes success and any
        other value is an error code.

        If an exception is raised, |mach build| will fail with a
        non-zero exit code.
        """
        self._write_purgecaches(config)

        return status

    @contextmanager
    def _write_file(self, path=None, fh=None, readmode='rU'):
        """Context manager to write a file.

        This is a glorified wrapper around FileAvoidWrite with integration to
        update the summary data on this instance.

        Example usage:

            with self._write_file('foo.txt') as fh:
                fh.write('hello world')
        """

        if path is not None:
            assert fh is None
            fh = FileAvoidWrite(path, capture_diff=True, dry_run=self.dry_run,
                                readmode=readmode)
        else:
            assert fh is not None

        dirname = mozpath.dirname(fh.name)
        try:
            os.makedirs(dirname)
        except OSError as error:
            if error.errno != errno.EEXIST:
                raise

        yield fh

        self._backend_output_files.add(mozpath.relpath(fh.name, self.environment.topobjdir))
        existed, updated = fh.close()
        if fh.diff:
            self.file_diffs[fh.name] = fh.diff
        if not existed:
            self._created_count += 1
        elif updated:
            self._updated_count += 1
        else:
            self._unchanged_count += 1

    @contextmanager
    def _get_preprocessor(self, obj):
        '''Returns a preprocessor with a few predefined values depending on
        the given BaseConfigSubstitution(-like) object, and all the substs
        in the current environment.'''
        pp = Preprocessor()
        srcdir = mozpath.dirname(obj.input_path)
        pp.context.update({
            k: ' '.join(v) if isinstance(v, list) else v
            for k, v in obj.config.substs.iteritems()
        })
        pp.context.update(
            top_srcdir=obj.topsrcdir,
            topobjdir=obj.topobjdir,
            srcdir=srcdir,
            srcdir_rel=mozpath.relpath(srcdir, mozpath.dirname(obj.output_path)),
            relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or '.',
            DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) or '.',
        )
        pp.do_filter('attemptSubstitution')
        pp.setMarker(None)
        with self._write_file(obj.output_path) as fh:
            pp.out = fh
            yield pp


class PartialBackend(BuildBackend):
    """A PartialBackend is a BuildBackend declaring that its consume_object
    method may not handle all build configuration objects it's passed, and
    that it's fine."""


def HybridBackend(*backends):
    """A HybridBackend is the combination of one or more PartialBackends
    with a non-partial BuildBackend.

    Build configuration objects are passed to each backend, stopping at the
    first of them that declares having handled them.
    """
    assert len(backends) >= 2
    assert all(issubclass(b, PartialBackend) for b in backends[:-1])
    assert not(issubclass(backends[-1], PartialBackend))
    assert all(issubclass(b, BuildBackend) for b in backends)

    class TheHybridBackend(BuildBackend):
        def __init__(self, environment):
            self._backends = [b(environment) for b in backends]
            super(TheHybridBackend, self).__init__(environment)

        def consume_object(self, obj):
            return any(b.consume_object(obj) for b in self._backends)

        def consume_finished(self):
            for backend in self._backends:
                backend.consume_finished()

            for attr in ('_execution_time', '_created_count', '_updated_count',
                         '_unchanged_count', '_deleted_count'):
                setattr(self, attr,
                        sum(getattr(b, attr) for b in self._backends))

            for b in self._backends:
                self.file_diffs.update(b.file_diffs)
                for attr in ('backend_input_files', '_backend_output_files'):
                    files = getattr(self, attr)
                    files |= getattr(b, attr)

    name = '+'.join(itertools.chain(
        (b.__name__.replace('Backend', '') for b in backends[:-1]),
        (b.__name__ for b in backends[-1:])
    ))

    return type(str(name), (TheHybridBackend,), {})