diff --git a/build/docs/mozbuild-files.rst b/build/docs/mozbuild-files.rst index b143c01fa2443..a1cba0106073a 100644 --- a/build/docs/mozbuild-files.rst +++ b/build/docs/mozbuild-files.rst @@ -53,16 +53,15 @@ side-effects. Previously, when the build configuration was defined in unnoticed. ``moz.build`` files fix this problem by eliminating the potential for false promises. -In the sandbox, all ``UPPERCASE`` variables are globals and all -non-``UPPERCASE`` variables are locals. After a ``moz.build`` file has -completed execution, only the globals are used to retrieve state. +After a ``moz.build`` file has completed execution, only the +``UPPERCASE`` variables are used to retrieve state. The set of variables and functions available to the Python sandbox is -defined by the :py:mod:`mozbuild.frontend.sandbox_symbols` module. The +defined by the :py:mod:`mozbuild.frontend.context` module. The data structures in this module are consumed by the :py:class:`mozbuild.frontend.reader.MozbuildSandbox` class to construct the sandbox. There are tests to ensure that the set of symbols exposed -to an empty sandbox are all defined in the ``sandbox_symbols`` module. +to an empty sandbox are all defined in the ``context`` module. This module also contains documentation for each symbol, so nothing can sneak into the sandbox without being explicitly defined and documented. @@ -81,13 +80,14 @@ of all the special ``UPPERCASE`` variables populated during its execution. The code for reading ``moz.build`` files lives in -:py:mod:`mozbuild.frontend.reader`. The evaluated Python sandboxes are -passed into :py:mod:`mozbuild.frontend.emitter`, which converts them to -classes defined in :py:mod:`mozbuild.frontend.data`. Each class in this -module define a domain-specific component of tree metdata. e.g. there -will be separate classes that represent a JavaScript file vs a compiled -C++ file or test manifests. This means downstream consumers of this data -can filter on class types to only consume what they are interested in. +:py:mod:`mozbuild.frontend.reader`. The Python sandboxes evaluation results +(:py:class:`mozbuild.frontend.context.Context`) are passed into +:py:mod:`mozbuild.frontend.emitter`, which converts them to classes defined +in :py:mod:`mozbuild.frontend.data`. Each class in this module defines a +domain-specific component of tree metdata. e.g. there will be separate +classes that represent a JavaScript file vs a compiled C++ file or test +manifests. This means downstream consumers of this data can filter on class +types to only consume what they are interested in. There is no well-defined mapping between ``moz.build`` file instances and the number of :py:mod:`mozbuild.frontend.data` classes derived from @@ -98,7 +98,7 @@ The purpose of the ``emitter`` layer between low-level sandbox execution and metadata representation is to facilitate a unified normalization and verification step. There are multiple downstream consumers of the ``moz.build``-derived data and many will perform the same actions. This -logic can be complicated, so we a component dedicated to it. +logic can be complicated, so we have a component dedicated to it. Other Notes =========== diff --git a/build/docs/mozbuild-symbols.rst b/build/docs/mozbuild-symbols.rst index 0a99d64ce73b8..4e9a8853a09cd 100644 --- a/build/docs/mozbuild-symbols.rst +++ b/build/docs/mozbuild-symbols.rst @@ -4,4 +4,4 @@ mozbuild Sandbox Symbols ======================== -.. mozbuildsymbols:: mozbuild.frontend.sandbox_symbols +.. mozbuildsymbols:: mozbuild.frontend.context diff --git a/python/moz.build b/python/moz.build index 9fa86161f461e..1dfae867cf86c 100644 --- a/python/moz.build +++ b/python/moz.build @@ -36,11 +36,11 @@ PYTHON_UNIT_TESTS += [ 'mozbuild/mozbuild/test/controller/test_ccachestats.py', 'mozbuild/mozbuild/test/controller/test_clobber.py', 'mozbuild/mozbuild/test/frontend/__init__.py', + 'mozbuild/mozbuild/test/frontend/test_context.py', 'mozbuild/mozbuild/test/frontend/test_emitter.py', 'mozbuild/mozbuild/test/frontend/test_namespaces.py', 'mozbuild/mozbuild/test/frontend/test_reader.py', 'mozbuild/mozbuild/test/frontend/test_sandbox.py', - 'mozbuild/mozbuild/test/frontend/test_sandbox_symbols.py', 'mozbuild/mozbuild/test/test_base.py', 'mozbuild/mozbuild/test/test_containers.py', 'mozbuild/mozbuild/test/test_expression.py', diff --git a/python/mozbuild/mozbuild/backend/android_eclipse.py b/python/mozbuild/mozbuild/backend/android_eclipse.py index 41ed63ddb4efb..43f76e35e648d 100644 --- a/python/mozbuild/mozbuild/backend/android_eclipse.py +++ b/python/mozbuild/mozbuild/backend/android_eclipse.py @@ -19,8 +19,8 @@ from .common import CommonBackend from ..frontend.data import ( AndroidEclipseProjectData, - SandboxDerived, - SandboxWrapped, + ContextDerived, + ContextWrapped, ) from ..makeutil import Makefile from ..util import ensureParentDir @@ -58,7 +58,7 @@ def detailed(summary): def consume_object(self, obj): """Write out Android Eclipse project files.""" - if not isinstance(obj, SandboxDerived): + if not isinstance(obj, ContextDerived): return CommonBackend.consume_object(self, obj) @@ -71,7 +71,7 @@ def consume_object(self, obj): obj.ack() # ... and handle the one case we care about specially. - if isinstance(obj, SandboxWrapped) and isinstance(obj.wrapped, AndroidEclipseProjectData): + if isinstance(obj, ContextWrapped) and isinstance(obj.wrapped, AndroidEclipseProjectData): self._process_android_eclipse_project_data(obj.wrapped, obj.srcdir, obj.objdir) def consume_finished(self): diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py index 054d80474cd8d..e2baa2025ed2a 100644 --- a/python/mozbuild/mozbuild/backend/base.py +++ b/python/mozbuild/mozbuild/backend/base.py @@ -22,8 +22,8 @@ from ..pythonutil import iter_modules_in_path from ..util import FileAvoidWrite from ..frontend.data import ( + ContextDerived, ReaderSummary, - SandboxDerived, ) from .configenvironment import ConfigEnvironment import mozpack.path as mozpath @@ -184,8 +184,8 @@ def consume(self, objs): self.consume_object(obj) backend_time += time.time() - obj_start - if isinstance(obj, SandboxDerived): - self.backend_input_files |= obj.sandbox_all_paths + if isinstance(obj, ContextDerived): + self.backend_input_files |= obj.context_all_paths if isinstance(obj, ReaderSummary): self.summary.mozbuild_count = obj.total_file_count diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py index 4797868524be4..b5cb5ddf25239 100644 --- a/python/mozbuild/mozbuild/backend/recursivemake.py +++ b/python/mozbuild/mozbuild/backend/recursivemake.py @@ -30,6 +30,8 @@ from ..frontend.data import ( AndroidEclipseProjectData, ConfigFileSubstitution, + ContextDerived, + ContextWrapped, Defines, DirectoryTraversal, Exports, @@ -48,8 +50,6 @@ PerSourceFlag, Program, Resources, - SandboxDerived, - SandboxWrapped, SharedLibrary, SimpleProgram, StaticLibrary, @@ -324,7 +324,7 @@ def detailed(summary): def consume_object(self, obj): """Write out build files necessary to build with recursive make.""" - if not isinstance(obj, SandboxDerived): + if not isinstance(obj, ContextDerived): return if obj.objdir not in self._backend_files: @@ -454,7 +454,7 @@ def consume_object(self, obj): elif isinstance(obj, JavaScriptModules): self._process_javascript_modules(obj, backend_file) - elif isinstance(obj, SandboxWrapped): + elif isinstance(obj, ContextWrapped): # Process a rich build system object from the front-end # as-is. Please follow precedent and handle CamelCaseData # in a function named _process_camel_case_data. At some diff --git a/python/mozbuild/mozbuild/frontend/sandbox_symbols.py b/python/mozbuild/mozbuild/frontend/context.py similarity index 83% rename from python/mozbuild/mozbuild/frontend/sandbox_symbols.py rename to python/mozbuild/mozbuild/frontend/context.py index 7c7a44bae2e52..212820a5189ef 100644 --- a/python/mozbuild/mozbuild/frontend/sandbox_symbols.py +++ b/python/mozbuild/mozbuild/frontend/context.py @@ -6,39 +6,192 @@ # DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. # ###################################################################### -r"""Defines the global config variables. +r"""This module contains the data structure (context) holding the configuration +from a moz.build. The data emitted by the frontend derives from those contexts. -This module contains data structures defining the global symbols that have -special meaning in the frontend files for the build system. - -If you are looking for the absolute authority on what the global namespace in -the Sandbox consists of, you've come to the right place. +It also defines the set of variables and functions available in moz.build. +If you are looking for the absolute authority on what moz.build files can +contain, you've come to the right place. """ from __future__ import unicode_literals +import os + from collections import OrderedDict +from contextlib import contextmanager from mozbuild.util import ( HierarchicalStringList, HierarchicalStringListWithFlagsFactory, + KeyedDefaultDict, List, + memoized_property, + ReadOnlyKeyedDefaultDict, StrictOrderingOnAppendList, StrictOrderingOnAppendListWithFlagsFactory, ) -from .sandbox import SandboxDerivedValue +import mozpack.path as mozpath from types import StringTypes +import itertools + + +class ContextDerivedValue(object): + """Classes deriving from this one receive a special treatment in a + Context. See Context documentation. + """ + + +class Context(KeyedDefaultDict): + """Represents a moz.build configuration context. + + Instances of this class are filled by the execution of sandboxes. + At the core, a Context is a dict, with a defined set of possible keys we'll + call variables. Each variable is associated with a type. + + When reading a value for a given key, we first try to read the existing + value. If a value is not found and it is defined in the allowed variables + set, we return a new instance of the class for that variable. We don't + assign default instances until they are accessed because this makes + debugging the end-result much simpler. Instead of a data structure with + lots of empty/default values, you have a data structure with only the + values that were read or touched. + + Instances of variables classes are created by invoking class_name(), + except when class_name derives from ContextDerivedValue, in which + case class_name(instance_of_the_context) is invoked. + A value is added to those calls when instances are created during + assignment (setitem). + + allowed_variables is a dict of the variables that can be set and read in + this context instance. Keys in this dict are the strings representing keys + in this context which are valid. Values are tuples of stored type, + assigned type, default value, a docstring describing the purpose of the + variable, and a tier indicator (see comment above the VARIABLES declaration + in this module). + + config is the ConfigEnvironment for this context. + """ + def __init__(self, allowed_variables={}, config=None): + self._allowed_variables = allowed_variables + self.main_path = None + self.all_paths = set() + self.config = config + self.executed_time = 0 + KeyedDefaultDict.__init__(self, self._factory) + + def add_source(self, path): + """Adds the given path as source of the data from this context.""" + assert os.path.isabs(path) + + if not self.main_path: + self.main_path = path + self.all_paths.add(path) + + @memoized_property + def objdir(self): + return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip('/') + + @memoized_property + def srcdir(self): + return mozpath.join(self.config.topsrcdir, self.relsrcdir).rstrip('/') + + @memoized_property + def relsrcdir(self): + assert self.main_path + return mozpath.relpath(mozpath.dirname(self.main_path), + self.config.topsrcdir) + + @memoized_property + def relobjdir(self): + return self.relsrcdir + + def _factory(self, key): + """Function called when requesting a missing key.""" + + defaults = self._allowed_variables.get(key) + if not defaults: + raise KeyError('global_ns', 'get_unknown', key) + + # If the default is specifically a lambda (or, rather, any function + # --but not a class that can be called), then it is actually a rule to + # generate the default that should be used. + default = defaults[0] + if issubclass(default, ContextDerivedValue): + return default(self) + else: + return default() + + def _validate(self, key, value): + """Validates whether the key is allowed and if the value's type + matches. + """ + stored_type, input_type, docs, tier = \ + self._allowed_variables.get(key, (None, None, None, None)) + + if stored_type is None: + raise KeyError('global_ns', 'set_unknown', key, value) + + # If the incoming value is not the type we store, we try to convert + # it to that type. This relies on proper coercion rules existing. This + # is the responsibility of whoever defined the symbols: a type should + # not be in the allowed set if the constructor function for the stored + # type does not accept an instance of that type. + if not isinstance(value, (stored_type, input_type)): + raise ValueError('global_ns', 'set_type', key, value, input_type) + + return stored_type + + def __setitem__(self, key, value): + stored_type = self._validate(key, value) + + if not isinstance(value, stored_type): + if issubclass(stored_type, ContextDerivedValue): + value = stored_type(self, value) + else: + value = stored_type(value) + + return KeyedDefaultDict.__setitem__(self, key, value) + + def update(self, iterable={}, **kwargs): + """Like dict.update(), but using the context's setitem. + + This function is transactional: if setitem fails for one of the values, + the context is not updated at all.""" + if isinstance(iterable, dict): + iterable = iterable.items() + + update = {} + for key, value in itertools.chain(iterable, kwargs.items()): + stored_type = self._validate(key, value) + # Don't create an instance of stored_type if coercion is needed, + # until all values are validated. + update[key] = (value, stored_type) + for key, (value, stored_type) in update.items(): + if not isinstance(value, stored_type): + update[key] = stored_type(value) + else: + update[key] = value + KeyedDefaultDict.update(self, update) -class FinalTargetValue(SandboxDerivedValue, unicode): - def __new__(cls, sandbox, value=""): + def get_affected_tiers(self): + """Returns the list of tiers affected by the variables set in the + context. + """ + tiers = (VARIABLES[key][3] for key in self if key in VARIABLES) + return set(tier for tier in tiers if tier) + + +class FinalTargetValue(ContextDerivedValue, unicode): + def __new__(cls, context, value=""): if not value: value = 'dist/' - if sandbox['XPI_NAME']: - value += 'xpi-stage/' + sandbox['XPI_NAME'] + if context['XPI_NAME']: + value += 'xpi-stage/' + context['XPI_NAME'] else: value += 'bin' - if sandbox['DIST_SUBDIR']: - value += '/' + sandbox['DIST_SUBDIR'] + if context['DIST_SUBDIR']: + value += '/' + context['DIST_SUBDIR'] return unicode.__new__(cls, value) @@ -947,8 +1100,13 @@ def __new__(cls, sandbox, value=""): } # Special variables. These complement VARIABLES. +# +# Each entry is a tuple of: +# +# (function returning the corresponding value from a given context, type, docs) +# SPECIAL_VARIABLES = { - 'TOPSRCDIR': (str, + 'TOPSRCDIR': (lambda context: context.config.topsrcdir, str, """Constant defining the top source directory. The top source directory is the parent directory containing the source @@ -956,7 +1114,7 @@ def __new__(cls, sandbox, value=""): cloned repository. """), - 'TOPOBJDIR': (str, + 'TOPOBJDIR': (lambda context: context.config.topobjdir, str, """Constant defining the top object directory. The top object directory is the parent directory which will contain @@ -964,7 +1122,7 @@ def __new__(cls, sandbox, value=""): directory." """), - 'RELATIVEDIR': (str, + 'RELATIVEDIR': (lambda context: context.relsrcdir, str, """Constant defining the relative path of this file. The relative path is from ``TOPSRCDIR``. This is defined as relative @@ -972,20 +1130,21 @@ def __new__(cls, sandbox, value=""): files have been included using ``include()``. """), - 'SRCDIR': (str, + 'SRCDIR': (lambda context: context.srcdir, str, """Constant defining the source directory of this file. This is the path inside ``TOPSRCDIR`` where this file is located. It is the same as ``TOPSRCDIR + RELATIVEDIR``. """), - 'OBJDIR': (str, + 'OBJDIR': (lambda context: context.objdir, str, """The path to the object directory for this file. Is is the same as ``TOPOBJDIR + RELATIVEDIR``. """), - 'CONFIG': (dict, + 'CONFIG': (lambda context: ReadOnlyKeyedDefaultDict( + lambda key: context.config.substs_unicode.get(key)), dict, """Dictionary containing the current configuration variables. All the variables defined by the configuration system are available @@ -996,16 +1155,6 @@ def __new__(cls, sandbox, value=""): Access to an unknown variable will return None. """), - - '__builtins__': (dict, - """Exposes Python built-in types. - - The set of exposed Python built-ins is currently: - - - True - - False - - None - """), } # Deprecation hints. diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py index ea66428f5e6d5..b6ac777c746f6 100644 --- a/python/mozbuild/mozbuild/frontend/data.py +++ b/python/mozbuild/mozbuild/frontend/data.py @@ -25,7 +25,7 @@ StrictOrderingOnAppendList, ) import mozpack.path as mozpath -from .sandbox_symbols import FinalTargetValue +from .context import FinalTargetValue class TreeMetadata(object): @@ -49,46 +49,46 @@ def __init__(self, total_file_count, total_sandbox_execution_time, self.total_emitter_execution_time = total_emitter_execution_time -class SandboxDerived(TreeMetadata): - """Build object derived from a single MozbuildSandbox instance. +class ContextDerived(TreeMetadata): + """Build object derived from a single Context instance. - It holds fields common to all sandboxes. This class is likely never - instantiated directly but is instead derived from. + It holds fields common to all context derived classes. This class is likely + never instantiated directly but is instead derived from. """ __slots__ = ( 'objdir', 'relativedir', - 'sandbox_all_paths', - 'sandbox_path', + 'context_all_paths', + 'context_path', 'srcdir', 'topobjdir', 'topsrcdir', ) - def __init__(self, sandbox): + def __init__(self, context): TreeMetadata.__init__(self) - # Capture the files that were evaluated to build this sandbox. - self.sandbox_main_path = sandbox.main_path - self.sandbox_all_paths = sandbox.all_paths + # Capture the files that were evaluated to fill this context. + self.context_main_path = context.main_path + self.context_all_paths = context.all_paths # Basic directory state. - self.topsrcdir = sandbox.config.topsrcdir - self.topobjdir = sandbox.config.topobjdir + self.topsrcdir = context.config.topsrcdir + self.topobjdir = context.config.topobjdir - self.relativedir = sandbox['RELATIVEDIR'] - self.srcdir = sandbox['SRCDIR'] - self.objdir = sandbox['OBJDIR'] + self.relativedir = context.relsrcdir + self.srcdir = context.srcdir + self.objdir = context.objdir - self.config = sandbox.config + self.config = context.config @property def relobjdir(self): return mozpath.relpath(self.objdir, self.topobjdir) -class DirectoryTraversal(SandboxDerived): +class DirectoryTraversal(ContextDerived): """Describes how directory traversal for building should work. This build object is likely only of interest to the recursive make backend. @@ -106,15 +106,15 @@ class DirectoryTraversal(SandboxDerived): 'tier_dirs', ) - def __init__(self, sandbox): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context): + ContextDerived.__init__(self, context) self.dirs = [] self.test_dirs = [] self.tier_dirs = OrderedDict() -class BaseConfigSubstitution(SandboxDerived): +class BaseConfigSubstitution(ContextDerived): """Base class describing autogenerated files as part of config.status.""" __slots__ = ( @@ -123,8 +123,8 @@ class BaseConfigSubstitution(SandboxDerived): 'relpath', ) - def __init__(self, sandbox): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context): + ContextDerived.__init__(self, context) self.input_path = None self.output_path = None @@ -139,7 +139,7 @@ class HeaderFileSubstitution(BaseConfigSubstitution): """Describes a header file that will be generated using substitutions.""" -class VariablePassthru(SandboxDerived): +class VariablePassthru(ContextDerived): """A dict of variables to pass through to backend.mk unaltered. The purpose of this object is to facilitate rapid transitioning of @@ -150,11 +150,11 @@ class VariablePassthru(SandboxDerived): """ __slots__ = ('variables') - def __init__(self, sandbox): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context): + ContextDerived.__init__(self, context) self.variables = {} -class XPIDLFile(SandboxDerived): +class XPIDLFile(ContextDerived): """Describes an XPIDL file to be compiled.""" __slots__ = ( @@ -162,20 +162,20 @@ class XPIDLFile(SandboxDerived): 'source_path', ) - def __init__(self, sandbox, source, module): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, source, module): + ContextDerived.__init__(self, context) self.source_path = source self.basename = mozpath.basename(source) self.module = module -class Defines(SandboxDerived): - """Sandbox container object for DEFINES, which is an OrderedDict. +class Defines(ContextDerived): + """Context derived container object for DEFINES, which is an OrderedDict. """ __slots__ = ('defines') - def __init__(self, sandbox, defines): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, defines): + ContextDerived.__init__(self, context) self.defines = defines def get_defines(self): @@ -187,88 +187,90 @@ def get_defines(self): else: yield('-D%s=%s' % (define, shell_quote(value))) -class Exports(SandboxDerived): - """Sandbox container object for EXPORTS, which is a HierarchicalStringList. +class Exports(ContextDerived): + """Context derived container object for EXPORTS, which is a + HierarchicalStringList. - We need an object derived from SandboxDerived for use in the backend, so + We need an object derived from ContextDerived for use in the backend, so this object fills that role. It just has a reference to the underlying HierarchicalStringList, which is created when parsing EXPORTS. """ __slots__ = ('exports', 'dist_install') - def __init__(self, sandbox, exports, dist_install=True): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, exports, dist_install=True): + ContextDerived.__init__(self, context) self.exports = exports self.dist_install = dist_install -class Resources(SandboxDerived): - """Sandbox container object for RESOURCE_FILES, which is a HierarchicalStringList, - with an extra ``.preprocess`` property on each entry. +class Resources(ContextDerived): + """Context derived container object for RESOURCE_FILES, which is a + HierarchicalStringList, with an extra ``.preprocess`` property on each + entry. The local defines plus anything in ACDEFINES are stored in ``defines`` as a dictionary, for any files that need preprocessing. """ __slots__ = ('resources', 'defines') - def __init__(self, sandbox, resources, defines=None): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, resources, defines=None): + ContextDerived.__init__(self, context) self.resources = resources defs = {} - defs.update(sandbox.config.defines) + defs.update(context.config.defines) if defines: defs.update(defines) self.defines = defs -class IPDLFile(SandboxDerived): +class IPDLFile(ContextDerived): """Describes an individual .ipdl source file.""" __slots__ = ( 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class WebIDLFile(SandboxDerived): +class WebIDLFile(ContextDerived): """Describes an individual .webidl source file.""" __slots__ = ( 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class GeneratedEventWebIDLFile(SandboxDerived): +class GeneratedEventWebIDLFile(ContextDerived): """Describes an individual .webidl source file.""" __slots__ = ( 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class TestWebIDLFile(SandboxDerived): +class TestWebIDLFile(ContextDerived): """Describes an individual test-only .webidl source file.""" __slots__ = ( 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class PreprocessedTestWebIDLFile(SandboxDerived): +class PreprocessedTestWebIDLFile(ContextDerived): """Describes an individual test-only .webidl source file that requires preprocessing.""" @@ -276,24 +278,24 @@ class PreprocessedTestWebIDLFile(SandboxDerived): 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class PreprocessedWebIDLFile(SandboxDerived): +class PreprocessedWebIDLFile(ContextDerived): """Describes an individual .webidl source file that requires preprocessing.""" __slots__ = ( 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class GeneratedWebIDLFile(SandboxDerived): +class GeneratedWebIDLFile(ContextDerived): """Describes an individual .webidl source file that is generated from build rules.""" @@ -301,21 +303,21 @@ class GeneratedWebIDLFile(SandboxDerived): 'basename', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.basename = path -class ExampleWebIDLInterface(SandboxDerived): +class ExampleWebIDLInterface(ContextDerived): """An individual WebIDL interface to generate.""" __slots__ = ( 'name', ) - def __init__(self, sandbox, name): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, name): + ContextDerived.__init__(self, context) self.name = name @@ -324,15 +326,15 @@ class LinkageWrongKindError(Exception): """Error thrown when trying to link objects of the wrong kind""" -class Linkable(SandboxDerived): - """Generic sandbox container object for programs and libraries""" +class Linkable(ContextDerived): + """Generic context derived container object for programs and libraries""" __slots__ = ( 'linked_libraries', 'linked_system_libs', ) - def __init__(self, sandbox): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context): + ContextDerived.__init__(self, context) self.linked_libraries = [] self.linked_system_libs = [] @@ -362,7 +364,8 @@ def link_system_library(self, lib): class BaseProgram(Linkable): - """Sandbox container object for programs, which is a unicode string. + """Context derived container object for programs, which is a unicode + string. This class handles automatically appending a binary suffix to the program name. @@ -372,10 +375,10 @@ class BaseProgram(Linkable): """ __slots__ = ('program') - def __init__(self, sandbox, program, is_unit_test=False): - Linkable.__init__(self, sandbox) + def __init__(self, context, program, is_unit_test=False): + Linkable.__init__(self, context) - bin_suffix = sandbox['CONFIG'].get(self.SUFFIX_VAR, '') + bin_suffix = context.config.substs.get(self.SUFFIX_VAR, '') if not program.endswith(bin_suffix): program += bin_suffix self.program = program @@ -383,31 +386,32 @@ def __init__(self, sandbox, program, is_unit_test=False): class Program(BaseProgram): - """Sandbox container object for PROGRAM""" + """Context derived container object for PROGRAM""" SUFFIX_VAR = 'BIN_SUFFIX' KIND = 'target' class HostProgram(BaseProgram): - """Sandbox container object for HOST_PROGRAM""" + """Context derived container object for HOST_PROGRAM""" SUFFIX_VAR = 'HOST_BIN_SUFFIX' KIND = 'host' class SimpleProgram(BaseProgram): - """Sandbox container object for each program in SIMPLE_PROGRAMS""" + """Context derived container object for each program in SIMPLE_PROGRAMS""" SUFFIX_VAR = 'BIN_SUFFIX' KIND = 'target' class HostSimpleProgram(BaseProgram): - """Sandbox container object for each program in HOST_SIMPLE_PROGRAMS""" + """Context derived container object for each program in + HOST_SIMPLE_PROGRAMS""" SUFFIX_VAR = 'HOST_BIN_SUFFIX' KIND = 'host' class BaseLibrary(Linkable): - """Generic sandbox container object for libraries.""" + """Generic context derived container object for libraries.""" __slots__ = ( 'basename', 'lib_name', @@ -415,15 +419,15 @@ class BaseLibrary(Linkable): 'refs', ) - def __init__(self, sandbox, basename): - Linkable.__init__(self, sandbox) + def __init__(self, context, basename): + Linkable.__init__(self, context) self.basename = self.lib_name = basename if self.lib_name: self.lib_name = '%s%s%s' % ( - sandbox.config.lib_prefix, + context.config.lib_prefix, self.lib_name, - sandbox.config.lib_suffix + context.config.lib_suffix ) self.import_name = self.lib_name @@ -431,32 +435,32 @@ def __init__(self, sandbox, basename): class Library(BaseLibrary): - """Sandbox container object for a library""" + """Context derived container object for a library""" KIND = 'target' __slots__ = ( 'is_sdk', ) - def __init__(self, sandbox, basename, real_name=None, is_sdk=False): - BaseLibrary.__init__(self, sandbox, real_name or basename) + def __init__(self, context, basename, real_name=None, is_sdk=False): + BaseLibrary.__init__(self, context, real_name or basename) self.basename = basename self.is_sdk = is_sdk class StaticLibrary(Library): - """Sandbox container object for a static library""" + """Context derived container object for a static library""" __slots__ = ( 'link_into', ) - def __init__(self, sandbox, basename, real_name=None, is_sdk=False, + def __init__(self, context, basename, real_name=None, is_sdk=False, link_into=None): - Library.__init__(self, sandbox, basename, real_name, is_sdk) + Library.__init__(self, context, basename, real_name, is_sdk) self.link_into = link_into class SharedLibrary(Library): - """Sandbox container object for a shared library""" + """Context derived container object for a shared library""" __slots__ = ( 'soname', 'variant', @@ -466,10 +470,10 @@ class SharedLibrary(Library): COMPONENT = 2 MAX_VARIANT = 3 - def __init__(self, sandbox, basename, real_name=None, is_sdk=False, + def __init__(self, context, basename, real_name=None, is_sdk=False, soname=None, variant=None): assert(variant in range(1, self.MAX_VARIANT) or variant is None) - Library.__init__(self, sandbox, basename, real_name, is_sdk) + Library.__init__(self, context, basename, real_name, is_sdk) self.variant = variant self.lib_name = real_name or basename assert self.lib_name @@ -478,20 +482,20 @@ def __init__(self, sandbox, basename, real_name=None, is_sdk=False, self.import_name = self.lib_name else: self.import_name = '%s%s%s' % ( - sandbox.config.import_prefix, + context.config.import_prefix, self.lib_name, - sandbox.config.import_suffix, + context.config.import_suffix, ) self.lib_name = '%s%s%s' % ( - sandbox.config.dll_prefix, + context.config.dll_prefix, self.lib_name, - sandbox.config.dll_suffix, + context.config.dll_suffix, ) if soname: self.soname = '%s%s%s' % ( - sandbox.config.dll_prefix, + context.config.dll_prefix, soname, - sandbox.config.dll_suffix, + context.config.dll_suffix, ) else: self.soname = self.lib_name @@ -502,21 +506,21 @@ class ExternalLibrary(object): class ExternalStaticLibrary(StaticLibrary, ExternalLibrary): - """Sandbox container for static libraries built by an external build - system.""" + """Context derived container for static libraries built by an external + build system.""" class ExternalSharedLibrary(SharedLibrary, ExternalLibrary): - """Sandbox container for shared libraries built by an external build - system.""" + """Context derived container for shared libraries built by an external + build system.""" class HostLibrary(BaseLibrary): - """Sandbox container object for a host library""" + """Context derived container object for a host library""" KIND = 'host' -class TestManifest(SandboxDerived): +class TestManifest(ContextDerived): """Represents a manifest file containing information about tests.""" __slots__ = ( @@ -563,9 +567,9 @@ class TestManifest(SandboxDerived): 'dupe_manifest', ) - def __init__(self, sandbox, path, manifest, flavor=None, + def __init__(self, context, path, manifest, flavor=None, install_prefix=None, relpath=None, dupe_manifest=False): - SandboxDerived.__init__(self, sandbox) + ContextDerived.__init__(self, context) self.path = path self.directory = mozpath.dirname(path) @@ -581,32 +585,32 @@ def __init__(self, sandbox, path, manifest, flavor=None, self.external_installs = set() -class LocalInclude(SandboxDerived): +class LocalInclude(ContextDerived): """Describes an individual local include path.""" __slots__ = ( 'path', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.path = path -class GeneratedInclude(SandboxDerived): +class GeneratedInclude(ContextDerived): """Describes an individual generated include path.""" __slots__ = ( 'path', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.path = path -class PerSourceFlag(SandboxDerived): +class PerSourceFlag(ContextDerived): """Describes compiler flags specified for individual source files.""" __slots__ = ( @@ -614,14 +618,14 @@ class PerSourceFlag(SandboxDerived): 'flags', ) - def __init__(self, sandbox, file_name, flags): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, file_name, flags): + ContextDerived.__init__(self, context) self.file_name = file_name self.flags = flags -class JARManifest(SandboxDerived): +class JARManifest(ContextDerived): """Describes an individual JAR manifest file and how to process it. This class isn't very useful for optimizing backends yet because we don't @@ -632,13 +636,13 @@ class JARManifest(SandboxDerived): 'path', ) - def __init__(self, sandbox, path): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, path): + ContextDerived.__init__(self, context) self.path = path -class JavaScriptModules(SandboxDerived): +class JavaScriptModules(ContextDerived): """Describes a JavaScript module.""" __slots__ = ( @@ -646,15 +650,15 @@ class JavaScriptModules(SandboxDerived): 'flavor', ) - def __init__(self, sandbox, modules, flavor): - super(JavaScriptModules, self).__init__(sandbox) + def __init__(self, context, modules, flavor): + super(JavaScriptModules, self).__init__(context) self.modules = modules self.flavor = flavor -class SandboxWrapped(SandboxDerived): - """Generic sandbox container object for a wrapped rich object. +class ContextWrapped(ContextDerived): + """Generic context derived container object for a wrapped rich object. Use this wrapper class to shuttle a rich build system object completely defined in moz.build files through the tree metadata @@ -665,8 +669,8 @@ class SandboxWrapped(SandboxDerived): 'wrapped', ) - def __init__(self, sandbox, wrapped): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context, wrapped): + ContextDerived.__init__(self, context) self.wrapped = wrapped @@ -701,7 +705,7 @@ def __init__(self, name, sources=[], generated_sources=[], self.javac_flags = list(javac_flags) -class InstallationTarget(SandboxDerived): +class InstallationTarget(ContextDerived): """Describes the rules that affect where files get installed to.""" __slots__ = ( @@ -711,13 +715,13 @@ class InstallationTarget(SandboxDerived): 'enabled' ) - def __init__(self, sandbox): - SandboxDerived.__init__(self, sandbox) + def __init__(self, context): + ContextDerived.__init__(self, context) - self.xpiname = sandbox.get('XPI_NAME', '') - self.subdir = sandbox.get('DIST_SUBDIR', '') - self.target = sandbox['FINAL_TARGET'] - self.enabled = not sandbox.get('NO_DIST_INSTALL', False) + self.xpiname = context.get('XPI_NAME', '') + self.subdir = context.get('DIST_SUBDIR', '') + self.target = context['FINAL_TARGET'] + self.enabled = not context.get('NO_DIST_INSTALL', False) def is_custom(self): """Returns whether or not the target is not derived from the default diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py index 496fda981f47e..41e32328e00cf 100644 --- a/python/mozbuild/mozbuild/frontend/emitter.py +++ b/python/mozbuild/mozbuild/frontend/emitter.py @@ -25,6 +25,7 @@ from .data import ( ConfigFileSubstitution, + ContextWrapped, Defines, DirectoryTraversal, Exports, @@ -52,7 +53,6 @@ Program, ReaderSummary, Resources, - SandboxWrapped, SharedLibrary, SimpleProgram, StaticLibrary, @@ -63,13 +63,9 @@ XPIDLFile, ) -from .reader import ( - MozbuildSandbox, - SandboxValidationError, -) +from .reader import SandboxValidationError -from .gyp_reader import GypSandbox -from .sandbox import GlobalNamespace +from .context import Context class TreeMetadataEmitter(LoggingMixin): @@ -121,7 +117,7 @@ def emit(self, output): file_count = 0 sandbox_execution_time = 0.0 emitter_time = 0.0 - sandboxes = {} + contexts = {} def emit_objs(objs): for o in objs: @@ -130,13 +126,13 @@ def emit_objs(objs): raise Exception('Unhandled object of type %s' % type(o)) for out in output: - if isinstance(out, (MozbuildSandbox, GypSandbox)): - # Keep all sandboxes around, we will need them later. - sandboxes[out['OBJDIR']] = out + if isinstance(out, Context): + # Keep all contexts around, we will need them later. + contexts[out.objdir] = out start = time.time() # We need to expand the generator for the timings to work. - objs = list(self.emit_from_sandbox(out)) + objs = list(self.emit_from_context(out)) emitter_time += time.time() - start for o in emit_objs(objs): yield o @@ -149,14 +145,14 @@ def emit_objs(objs): raise Exception('Unhandled output type: %s' % type(out)) start = time.time() - objs = list(self._emit_libs_derived(sandboxes)) + objs = list(self._emit_libs_derived(contexts)) emitter_time += time.time() - start for o in emit_objs(objs): yield o yield ReaderSummary(file_count, sandbox_execution_time, emitter_time) - def _emit_libs_derived(self, sandboxes): + def _emit_libs_derived(self, contexts): # First do FINAL_LIBRARY linkage. for lib in (l for libs in self._libs.values() for l in libs): if not isinstance(lib, StaticLibrary) or not lib.link_into: @@ -164,7 +160,7 @@ def _emit_libs_derived(self, sandboxes): if lib.link_into not in self._libs: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME' - % lib.link_into, sandboxes[lib.objdir]) + % lib.link_into, contexts[lib.objdir]) candidates = self._libs[lib.link_into] # When there are multiple candidates, but all are in the same @@ -181,11 +177,11 @@ def _emit_libs_derived(self, sandboxes): 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in ' 'multiple places:\n %s' % (lib.link_into, '\n '.join(l.objdir for l in candidates)), - sandboxes[lib.objdir]) + contexts[lib.objdir]) # Next, USE_LIBS linkage. - for sandbox, obj, variable in self._linkage: - self._link_libraries(sandbox, obj, variable) + for context, obj, variable in self._linkage: + self._link_libraries(context, obj, variable) def recurse_refs(lib): for o in lib.refs: @@ -206,7 +202,7 @@ def recurse_refs(lib): 'library names:\n %s\n\nMaybe you can remove the ' 'static "%s" library?' % (lib.basename, '\n '.join(shared_libs), lib.basename), - sandboxes[lib.objdir]) + contexts[lib.objdir]) def recurse_libs(lib): for obj in lib.linked_libraries: @@ -227,7 +223,7 @@ def recurse_libs(lib): if p in sent_passthru: continue sent_passthru.add(p) - passthru = VariablePassthru(sandboxes[p]) + passthru = VariablePassthru(contexts[p]) passthru.variables['FINAL_LIBRARY'] = lib.basename yield passthru yield lib @@ -240,21 +236,21 @@ def recurse_libs(lib): 'target': 'LIBRARY_NAME', } - def _link_libraries(self, sandbox, obj, variable): + def _link_libraries(self, context, obj, variable): """Add linkage declarations to a given object.""" assert isinstance(obj, Linkable) extra = [] # Add stdc++compat library when wanted and needed compat_varname = 'MOZ_LIBSTDCXX_%s_VERSION' % obj.KIND.upper() - if sandbox.config.substs.get(compat_varname) \ + if context.config.substs.get(compat_varname) \ and not isinstance(obj, (StaticLibrary, HostLibrary)): extra.append({ 'target': 'stdc++compat', 'host': 'host_stdc++compat', }[obj.KIND]) - for path in sandbox.get(variable, []) + extra: + for path in context.get(variable, []) + extra: force_static = path.startswith('static:') and obj.KIND == 'target' if force_static: path = path[7:] @@ -284,7 +280,7 @@ def _link_libraries(self, sandbox, obj, variable): raise SandboxValidationError( '%s contains "%s", but there is no "%s" %s in %s.' % (variable, path, name, - self.LIBRARY_NAME_VAR[obj.KIND], dir), sandbox) + self.LIBRARY_NAME_VAR[obj.KIND], dir), context) if len(candidates) > 1: # If there's more than one remaining candidate, it could be @@ -307,17 +303,17 @@ def _link_libraries(self, sandbox, obj, variable): raise SandboxValidationError( '%s contains "static:%s", but there is no static ' '"%s" %s in %s.' % (variable, path, name, - self.LIBRARY_NAME_VAR[obj.KIND], dir), sandbox) + self.LIBRARY_NAME_VAR[obj.KIND], dir), context) raise SandboxValidationError( '%s contains "static:%s", but there is no static "%s" ' '%s in the tree' % (variable, name, name, - self.LIBRARY_NAME_VAR[obj.KIND]), sandbox) + self.LIBRARY_NAME_VAR[obj.KIND]), context) if not candidates: raise SandboxValidationError( '%s contains "%s", which does not match any %s in the tree.' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), - sandbox) + context) elif len(candidates) > 1: paths = (mozpath.join(l.relativedir, 'moz.build') @@ -326,7 +322,7 @@ def _link_libraries(self, sandbox, obj, variable): '%s contains "%s", which matches a %s defined in multiple ' 'places:\n %s' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND], - '\n '.join(paths)), sandbox) + '\n '.join(paths)), context) elif force_static and not isinstance(candidates[0], StaticLibrary): raise SandboxValidationError( @@ -334,7 +330,7 @@ def _link_libraries(self, sandbox, obj, variable): 'in %s. You may want to add FORCE_STATIC_LIB=True in ' '%s/moz.build, or remove "static:".' % (variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir), - sandbox) + context) elif isinstance(obj, StaticLibrary) and isinstance(candidates[0], SharedLibrary): @@ -342,78 +338,70 @@ def _link_libraries(self, sandbox, obj, variable): obj.link_library(candidates[0]) # Link system libraries from OS_LIBS/HOST_OS_LIBS. - for lib in sandbox.get(variable.replace('USE', 'OS'), []): + for lib in context.get(variable.replace('USE', 'OS'), []): obj.link_system_library(lib) @memoize def _get_external_library(self, dir, name, force_static): # Create ExternalStaticLibrary or ExternalSharedLibrary object with a - # mock sandbox more or less truthful about where the external library - # is. - sandbox = GlobalNamespace() - sandbox.config = self.config - sandbox.main_path = dir - sandbox.all_paths = set([dir]) - with sandbox.allow_all_writes() as s: - s['RELATIVEDIR'] = dir - s['SRCDIR'] = mozpath.join(self.config.topsrcdir, dir) - s['OBJDIR'] = mozpath.join(self.config.topobjdir, dir) - + # context more or less truthful about where the external library is. + context = Context(config=self.config) + context.add_source(mozpath.join(self.config.topsrcdir, dir, 'dummy')) if force_static: - return ExternalStaticLibrary(sandbox, name) + return ExternalStaticLibrary(context, name) else: - return ExternalSharedLibrary(sandbox, name) + return ExternalSharedLibrary(context, name) - def emit_from_sandbox(self, sandbox): - """Convert a MozbuildSandbox to tree metadata objects. + def emit_from_context(self, context): + """Convert a Context to tree metadata objects. - This is a generator of mozbuild.frontend.data.SandboxDerived instances. + This is a generator of mozbuild.frontend.data.ContextDerived instances. """ # We always emit a directory traversal descriptor. This is needed by # the recursive make backend. - for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o + for o in self._emit_directory_traversal_from_context(context): yield o - for path in sandbox['CONFIGURE_SUBST_FILES']: - yield self._create_substitution(ConfigFileSubstitution, sandbox, + for path in context['CONFIGURE_SUBST_FILES']: + yield self._create_substitution(ConfigFileSubstitution, context, path) - for path in sandbox['CONFIGURE_DEFINE_FILES']: - yield self._create_substitution(HeaderFileSubstitution, sandbox, + for path in context['CONFIGURE_DEFINE_FILES']: + yield self._create_substitution(HeaderFileSubstitution, context, path) # XPIDL source files get processed and turned into .h and .xpt files. # If there are multiple XPIDL files in a directory, they get linked # together into a final .xpt, which has the name defined by # XPIDL_MODULE. - xpidl_module = sandbox['XPIDL_MODULE'] + xpidl_module = context['XPIDL_MODULE'] - if sandbox['XPIDL_SOURCES'] and not xpidl_module: + if context['XPIDL_SOURCES'] and not xpidl_module: raise SandboxValidationError('XPIDL_MODULE must be defined if ' - 'XPIDL_SOURCES is defined.', sandbox) + 'XPIDL_SOURCES is defined.', context) - if xpidl_module and not sandbox['XPIDL_SOURCES']: + if xpidl_module and not context['XPIDL_SOURCES']: raise SandboxValidationError('XPIDL_MODULE cannot be defined ' - 'unless there are XPIDL_SOURCES', sandbox) + 'unless there are XPIDL_SOURCES', context) - if sandbox['XPIDL_SOURCES'] and sandbox['NO_DIST_INSTALL']: + if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']: self.log(logging.WARN, 'mozbuild_warning', dict( - path=sandbox.main_path), + path=context.main_path), '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.') - for idl in sandbox['XPIDL_SOURCES']: - yield XPIDLFile(sandbox, mozpath.join(sandbox['SRCDIR'], idl), + for idl in context['XPIDL_SOURCES']: + yield XPIDLFile(context, mozpath.join(context.srcdir, idl), xpidl_module) for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'): - for src in (sandbox[symbol] or []): - if not os.path.exists(mozpath.join(sandbox['SRCDIR'], src)): + for src in (context[symbol] or []): + if not os.path.exists(mozpath.join(context.srcdir, src)): raise SandboxValidationError('File listed in %s does not ' - 'exist: \'%s\'' % (symbol, src), sandbox) + 'exist: \'%s\'' % (symbol, src), context) # Proxy some variables as-is until we have richer classes to represent # them. We should aim to keep this set small because it violates the # desired abstraction of the build definition away from makefiles. - passthru = VariablePassthru(sandbox) + passthru = VariablePassthru(context) varlist = [ 'ANDROID_GENERATED_RESFILES', 'ANDROID_RES_DIRS', @@ -439,19 +427,20 @@ def emit_from_sandbox(self, sandbox): 'LD_VERSION_SCRIPT', ] for v in varlist: - if v in sandbox and sandbox[v]: - passthru.variables[v] = sandbox[v] + if v in context and context[v]: + passthru.variables[v] = context[v] for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']: - if v in sandbox and sandbox[v]: - passthru.variables['MOZBUILD_' + v] = sandbox[v] + if v in context and context[v]: + passthru.variables['MOZBUILD_' + v] = context[v] # NO_VISIBILITY_FLAGS is slightly different - if sandbox['NO_VISIBILITY_FLAGS']: + if context['NO_VISIBILITY_FLAGS']: passthru.variables['VISIBILITY_FLAGS'] = '' - if sandbox['DELAYLOAD_DLLS']: - passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll) for dll in sandbox['DELAYLOAD_DLLS']] + if context['DELAYLOAD_DLLS']: + passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll) + for dll in context['DELAYLOAD_DLLS']] passthru.variables['USE_DELAYIMP'] = True varmap = dict( @@ -484,24 +473,24 @@ def emit_from_sandbox(self, sandbox): varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items() if k in ('SOURCES', 'UNIFIED_SOURCES'))) for variable, mapping in varmap.items(): - for f in sandbox[variable]: + for f in context[variable]: ext = mozpath.splitext(f)[1] if ext not in mapping: raise SandboxValidationError( - '%s has an unknown file type.' % f, sandbox) + '%s has an unknown file type.' % f, context) l = passthru.variables.setdefault(mapping[ext], []) l.append(f) if variable.startswith('GENERATED_'): l = passthru.variables.setdefault('GARBAGE', []) l.append(f) - no_pgo = sandbox.get('NO_PGO') - sources = sandbox.get('SOURCES', []) + no_pgo = context.get('NO_PGO') + sources = context.get('SOURCES', []) no_pgo_sources = [f for f in sources if sources[f].no_pgo] if no_pgo: if no_pgo_sources: raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo ' - 'cannot be set at the same time', sandbox) + 'cannot be set at the same time', context) passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo if no_pgo_sources: passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources @@ -509,60 +498,60 @@ def emit_from_sandbox(self, sandbox): sources_with_flags = [f for f in sources if sources[f].flags] for f in sources_with_flags: ext = mozpath.splitext(f)[1] - yield PerSourceFlag(sandbox, f, sources[f].flags) + yield PerSourceFlag(context, f, sources[f].flags) - exports = sandbox.get('EXPORTS') + exports = context.get('EXPORTS') if exports: - yield Exports(sandbox, exports, - dist_install=not sandbox.get('NO_DIST_INSTALL', False)) + yield Exports(context, exports, + dist_install=not context.get('NO_DIST_INSTALL', False)) - defines = sandbox.get('DEFINES') + defines = context.get('DEFINES') if defines: - yield Defines(sandbox, defines) + yield Defines(context, defines) - resources = sandbox.get('RESOURCE_FILES') + resources = context.get('RESOURCE_FILES') if resources: - yield Resources(sandbox, resources, defines) + yield Resources(context, resources, defines) for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]: - program = sandbox.get(kind) + program = context.get(kind) if program: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" as %s name, ' 'because it is already used in %s' % (program, kind, - self._binaries[program].relativedir), sandbox) - self._binaries[program] = cls(sandbox, program) - self._linkage.append((sandbox, self._binaries[program], + self._binaries[program].relativedir), context) + self._binaries[program] = cls(context, program) + self._linkage.append((context, self._binaries[program], kind.replace('PROGRAM', 'USE_LIBS'))) for kind, cls in [ ('SIMPLE_PROGRAMS', SimpleProgram), ('CPP_UNIT_TESTS', SimpleProgram), ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]: - for program in sandbox[kind]: + for program in context[kind]: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" in %s, ' 'because it is already used in %s' % (program, kind, - self._binaries[program].relativedir), sandbox) - self._binaries[program] = cls(sandbox, program, + self._binaries[program].relativedir), context) + self._binaries[program] = cls(context, program, is_unit_test=kind == 'CPP_UNIT_TESTS') - self._linkage.append((sandbox, self._binaries[program], + self._linkage.append((context, self._binaries[program], 'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS' else 'USE_LIBS')) - extra_js_modules = sandbox.get('EXTRA_JS_MODULES') + extra_js_modules = context.get('EXTRA_JS_MODULES') if extra_js_modules: - yield JavaScriptModules(sandbox, extra_js_modules, 'extra') + yield JavaScriptModules(context, extra_js_modules, 'extra') - extra_pp_js_modules = sandbox.get('EXTRA_PP_JS_MODULES') + extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES') if extra_pp_js_modules: - yield JavaScriptModules(sandbox, extra_pp_js_modules, 'extra_pp') + yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp') - test_js_modules = sandbox.get('TESTING_JS_MODULES') + test_js_modules = context.get('TESTING_JS_MODULES') if test_js_modules: - yield JavaScriptModules(sandbox, test_js_modules, 'testing') + yield JavaScriptModules(context, test_js_modules, 'testing') simple_lists = [ ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile), @@ -576,62 +565,61 @@ def emit_from_sandbox(self, sandbox): ('WEBIDL_FILES', WebIDLFile), ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface), ] - for sandbox_var, klass in simple_lists: - for name in sandbox.get(sandbox_var, []): - yield klass(sandbox, name) + for context_var, klass in simple_lists: + for name in context.get(context_var, []): + yield klass(context, name) - if sandbox.get('FINAL_TARGET') or sandbox.get('XPI_NAME') or \ - sandbox.get('DIST_SUBDIR'): - yield InstallationTarget(sandbox) + if context.get('FINAL_TARGET') or context.get('XPI_NAME') or \ + context.get('DIST_SUBDIR'): + yield InstallationTarget(context) - host_libname = sandbox.get('HOST_LIBRARY_NAME') - libname = sandbox.get('LIBRARY_NAME') + host_libname = context.get('HOST_LIBRARY_NAME') + libname = context.get('LIBRARY_NAME') if host_libname: if host_libname == libname: raise SandboxValidationError('LIBRARY_NAME and ' - 'HOST_LIBRARY_NAME must have a different value', sandbox) - lib = HostLibrary(sandbox, host_libname) + 'HOST_LIBRARY_NAME must have a different value', context) + lib = HostLibrary(context, host_libname) self._libs[host_libname].append(lib) - self._linkage.append((sandbox, lib, 'HOST_USE_LIBS')) + self._linkage.append((context, lib, 'HOST_USE_LIBS')) - final_lib = sandbox.get('FINAL_LIBRARY') + final_lib = context.get('FINAL_LIBRARY') if not libname and final_lib: # If no LIBRARY_NAME is given, create one. - libname = sandbox['RELATIVEDIR'].replace('/', '_') + libname = context.relsrcdir.replace('/', '_') - static_lib = sandbox.get('FORCE_STATIC_LIB') - shared_lib = sandbox.get('FORCE_SHARED_LIB') + static_lib = context.get('FORCE_STATIC_LIB') + shared_lib = context.get('FORCE_SHARED_LIB') - static_name = sandbox.get('STATIC_LIBRARY_NAME') - shared_name = sandbox.get('SHARED_LIBRARY_NAME') + static_name = context.get('STATIC_LIBRARY_NAME') + shared_name = context.get('SHARED_LIBRARY_NAME') - is_framework = sandbox.get('IS_FRAMEWORK') - is_component = sandbox.get('IS_COMPONENT') + is_framework = context.get('IS_FRAMEWORK') + is_component = context.get('IS_COMPONENT') - soname = sandbox.get('SONAME') + soname = context.get('SONAME') shared_args = {} static_args = {} if final_lib: - if isinstance(sandbox, MozbuildSandbox): - if static_lib: - raise SandboxValidationError( - 'FINAL_LIBRARY implies FORCE_STATIC_LIB. ' - 'Please remove the latter.', sandbox) + if static_lib: + raise SandboxValidationError( + 'FINAL_LIBRARY implies FORCE_STATIC_LIB. ' + 'Please remove the latter.', context) if shared_lib: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) if is_framework: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_FRAMEWORK. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) if is_component: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_COMPONENT. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) static_args['link_into'] = final_lib static_lib = True @@ -640,15 +628,15 @@ def emit_from_sandbox(self, sandbox): if shared_lib: raise SandboxValidationError( 'IS_COMPONENT implies FORCE_SHARED_LIB. ' - 'Please remove the latter.', sandbox) + 'Please remove the latter.', context) if is_framework: raise SandboxValidationError( 'IS_COMPONENT conflicts with IS_FRAMEWORK. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) if static_lib: raise SandboxValidationError( 'IS_COMPONENT conflicts with FORCE_STATIC_LIB. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.COMPONENT @@ -656,30 +644,32 @@ def emit_from_sandbox(self, sandbox): if shared_lib: raise SandboxValidationError( 'IS_FRAMEWORK implies FORCE_SHARED_LIB. ' - 'Please remove the latter.', sandbox) + 'Please remove the latter.', context) if soname: raise SandboxValidationError( 'IS_FRAMEWORK conflicts with SONAME. ' - 'Please remove one.', sandbox) + 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.FRAMEWORK if static_name: if not static_lib: raise SandboxValidationError( - 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', sandbox) + 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', + context) static_args['real_name'] = static_name if shared_name: if not shared_lib: raise SandboxValidationError( - 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', sandbox) + 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', + context) shared_args['real_name'] = shared_name if soname: if not shared_lib: raise SandboxValidationError( - 'SONAME requires FORCE_SHARED_LIB', sandbox) + 'SONAME requires FORCE_SHARED_LIB', context) shared_args['soname'] = soname if not static_lib and not shared_lib: @@ -687,7 +677,7 @@ def emit_from_sandbox(self, sandbox): # If both a shared and a static library are created, only the # shared library is meant to be a SDK library. - if sandbox.get('SDK_LIBRARY'): + if context.get('SDK_LIBRARY'): if shared_lib: shared_args['is_sdk'] = True elif static_lib: @@ -699,36 +689,36 @@ def emit_from_sandbox(self, sandbox): 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but neither STATIC_LIBRARY_NAME or ' 'SHARED_LIBRARY_NAME is set. At least one is required.', - sandbox) + context) if static_name and not shared_name and static_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and SHARED_LIBRARY_NAME is unset. Please either ' 'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set ' - 'SHARED_LIBRARY_NAME.', sandbox) + 'SHARED_LIBRARY_NAME.', context) if shared_name and not static_name and shared_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and STATIC_LIBRARY_NAME is unset. Please either ' 'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set ' - 'STATIC_LIBRARY_NAME.', sandbox) + 'STATIC_LIBRARY_NAME.', context) if shared_name and static_name and shared_name == static_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as ' 'STATIC_LIBRARY_NAME. Please change one of them.', - sandbox) + context) if shared_lib: - lib = SharedLibrary(sandbox, libname, **shared_args) + lib = SharedLibrary(context, libname, **shared_args) self._libs[libname].append(lib) - self._linkage.append((sandbox, lib, 'USE_LIBS')) + self._linkage.append((context, lib, 'USE_LIBS')) if static_lib: - lib = StaticLibrary(sandbox, libname, **static_args) + lib = StaticLibrary(context, libname, **static_args) self._libs[libname].append(lib) - self._linkage.append((sandbox, lib, 'USE_LIBS')) + self._linkage.append((context, lib, 'USE_LIBS')) # While there are multiple test manifests, the behavior is very similar # across them. We enforce this by having common handling of all @@ -761,61 +751,61 @@ def emit_from_sandbox(self, sandbox): ) for prefix, info in test_manifests.items(): - for path in sandbox.get('%s_MANIFESTS' % prefix, []): - for obj in self._process_test_manifest(sandbox, info, path): + for path in context.get('%s_MANIFESTS' % prefix, []): + for obj in self._process_test_manifest(context, info, path): yield obj for flavor in ('crashtest', 'reftest'): - for path in sandbox.get('%s_MANIFESTS' % flavor.upper(), []): - for obj in self._process_reftest_manifest(sandbox, flavor, path): + for path in context.get('%s_MANIFESTS' % flavor.upper(), []): + for obj in self._process_reftest_manifest(context, flavor, path): yield obj - jar_manifests = sandbox.get('JAR_MANIFESTS', []) + jar_manifests = context.get('JAR_MANIFESTS', []) if len(jar_manifests) > 1: raise SandboxValidationError('While JAR_MANIFESTS is a list, ' - 'it is currently limited to one value.', sandbox) + 'it is currently limited to one value.', context) for path in jar_manifests: - yield JARManifest(sandbox, mozpath.join(sandbox['SRCDIR'], path)) + yield JARManifest(context, mozpath.join(context.srcdir, path)) # Temporary test to look for jar.mn files that creep in without using # the new declaration. Before, we didn't require jar.mn files to # declared anywhere (they were discovered). This will detect people # relying on the old behavior. - if os.path.exists(os.path.join(sandbox['SRCDIR'], 'jar.mn')): + if os.path.exists(os.path.join(context.srcdir, 'jar.mn')): if 'jar.mn' not in jar_manifests: raise SandboxValidationError('A jar.mn exists but it ' 'is not referenced in the moz.build file. ' - 'Please define JAR_MANIFESTS.', sandbox) + 'Please define JAR_MANIFESTS.', context) - for name, jar in sandbox.get('JAVA_JAR_TARGETS', {}).items(): - yield SandboxWrapped(sandbox, jar) + for name, jar in context.get('JAVA_JAR_TARGETS', {}).items(): + yield ContextWrapped(context, jar) - for name, data in sandbox.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items(): - yield SandboxWrapped(sandbox, data) + for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items(): + yield ContextWrapped(context, data) if passthru.variables: yield passthru - def _create_substitution(self, cls, sandbox, path): + def _create_substitution(self, cls, context, path): if os.path.isabs(path): path = path[1:] - sub = cls(sandbox) - sub.input_path = mozpath.join(sandbox['SRCDIR'], '%s.in' % path) - sub.output_path = mozpath.join(sandbox['OBJDIR'], path) + sub = cls(context) + sub.input_path = mozpath.join(context.srcdir, '%s.in' % path) + sub.output_path = mozpath.join(context.objdir, path) sub.relpath = path return sub - def _process_test_manifest(self, sandbox, info, manifest_path): + def _process_test_manifest(self, context, info, manifest_path): flavor, install_root, install_subdir, filter_inactive = info manifest_path = mozpath.normpath(manifest_path) - path = mozpath.normpath(mozpath.join(sandbox['SRCDIR'], manifest_path)) + path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname(mozpath.relpath(path, - sandbox.config.topsrcdir)) + context.config.topsrcdir)) install_prefix = mozpath.join(install_root, install_subdir) try: @@ -823,9 +813,9 @@ def _process_test_manifest(self, sandbox, info, manifest_path): defaults = m.manifest_defaults[os.path.normpath(path)] if not m.tests and not 'support-files' in defaults: raise SandboxValidationError('Empty test manifest: %s' - % path, sandbox) + % path, context) - obj = TestManifest(sandbox, path, m, flavor=flavor, + obj = TestManifest(context, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in defaults) @@ -842,7 +832,7 @@ def _process_test_manifest(self, sandbox, info, manifest_path): if missing: raise SandboxValidationError('Test manifest (%s) lists ' 'test that does not exist: %s' % ( - path, ', '.join(missing)), sandbox) + path, ', '.join(missing)), context) out_dir = mozpath.join(install_prefix, manifest_reldir) if 'install-to-subdir' in defaults: @@ -941,7 +931,7 @@ def process_support_files(test): except KeyError: raise SandboxValidationError('Error processing test ' 'manifest %s: entry in generated-files not present ' - 'elsewhere in manifest: %s' % (path, f), sandbox) + 'elsewhere in manifest: %s' % (path, f), context) obj.external_installs.add(mozpath.join(out_dir, f)) @@ -950,14 +940,14 @@ def process_support_files(test): raise SandboxValidationError('Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))), - sandbox) + context) - def _process_reftest_manifest(self, sandbox, flavor, manifest_path): + def _process_reftest_manifest(self, context, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join( - sandbox['SRCDIR'], manifest_path)) + context.srcdir, manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, - sandbox.config.topsrcdir)) + context.config.topsrcdir)) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) @@ -965,7 +955,7 @@ def _process_reftest_manifest(self, sandbox, flavor, manifest_path): # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. - obj = TestManifest(sandbox, manifest_full_path, manifest, + obj = TestManifest(context, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path))) @@ -984,19 +974,19 @@ def _process_reftest_manifest(self, sandbox, flavor, manifest_path): yield obj - def _emit_directory_traversal_from_sandbox(self, sandbox): - o = DirectoryTraversal(sandbox) - o.dirs = sandbox.get('DIRS', []) - o.test_dirs = sandbox.get('TEST_DIRS', []) - o.affected_tiers = sandbox.get_affected_tiers() + def _emit_directory_traversal_from_context(self, context): + o = DirectoryTraversal(context) + o.dirs = context.get('DIRS', []) + o.test_dirs = context.get('TEST_DIRS', []) + o.affected_tiers = context.get_affected_tiers() # Some paths have a subconfigure, yet also have a moz.build. Those # shouldn't end up in self._external_paths. self._external_paths -= { o.relobjdir } - if 'TIERS' in sandbox: - for tier in sandbox['TIERS']: - o.tier_dirs[tier] = sandbox['TIERS'][tier]['regular'] + \ - sandbox['TIERS'][tier]['external'] + if 'TIERS' in context: + for tier in context['TIERS']: + o.tier_dirs[tier] = context['TIERS'][tier]['regular'] + \ + context['TIERS'][tier]['external'] yield o diff --git a/python/mozbuild/mozbuild/frontend/gyp_reader.py b/python/mozbuild/mozbuild/frontend/gyp_reader.py index 367083e47a76b..1af7c57d8a917 100644 --- a/python/mozbuild/mozbuild/frontend/gyp_reader.py +++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py @@ -9,11 +9,15 @@ import os import mozpack.path as mozpath from mozpack.files import FileFinder -from .sandbox import ( - alphabetical_sorted, - GlobalNamespace, +from .sandbox import alphabetical_sorted +from .context import ( + Context, + VARIABLES, +) +from mozbuild.util import ( + List, + memoize, ) -from .sandbox_symbols import VARIABLES from .reader import SandboxValidationError # Define this module as gyp.generator.mozbuild so that gyp can use it @@ -48,22 +52,28 @@ generator_default_variables[unused] = b'' -class GypSandbox(GlobalNamespace): - """Class mimicking MozbuildSandbox for processing of the data - extracted from Gyp by a mozbuild backend. +class GypContext(Context): + """Specialized Context for use with data extracted from Gyp. - Inherits from GlobalNamespace because it doesn't need the extra - functionality from Sandbox. + config is the ConfigEnvironment for this context. + relobjdir is the object directory that will be used for this context, + relative to the topobjdir defined in the ConfigEnvironment. """ - def __init__(self, main_path, dependencies_paths=[]): - self.main_path = main_path - self.all_paths = set([main_path]) | set(dependencies_paths) - self.execution_time = 0 - GlobalNamespace.__init__(self, allowed_variables=VARIABLES) - - def get_affected_tiers(self): - tiers = (VARIABLES[key][3] for key in self if key in VARIABLES) - return set(tier for tier in tiers if tier) + def __init__(self, config, relobjdir): + self._relobjdir = relobjdir + Context.__init__(self, allowed_variables=self.VARIABLES(), config=config) + + @classmethod + @memoize + def VARIABLES(cls): + """Returns the allowed variables for a GypContext.""" + # Using a class method instead of a class variable to hide the content + # from sphinx. + return dict(VARIABLES, + IS_GYP_DIR=(bool, bool, '', None), + EXTRA_ASSEMBLER_FLAGS=(List, list, '', None), + EXTRA_COMPILE_FLAGS=(List, list, '', None), + ) def encode(value): @@ -73,7 +83,7 @@ def encode(value): def read_from_gyp(config, path, output, vars, non_unified_sources = set()): - """Read a gyp configuration and emits GypSandboxes for the backend to + """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration @@ -115,32 +125,29 @@ def read_from_gyp(config, path, output, vars, non_unified_sources = set()): # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) + + # Each target is given its own objdir. The base of that objdir + # is derived from the relative path from the root gyp file path + # to the current build_file, placed under the given output + # directory. Since several targets can be in a given build_file, + # separate them in subdirectories using the build_file basename + # and the target_name. + reldir = mozpath.relpath(mozpath.dirname(build_file), + mozpath.dirname(path)) + subdir = '%s_%s' % ( + mozpath.splitext(mozpath.basename(build_file))[0], + target_name, + ) + # Emit a context for each target. + context = GypContext(config, mozpath.relpath( + mozpath.join(output, reldir, subdir), config.topobjdir)) + context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file - included_files = [mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)) - for f in data[build_file]['included_files']] - # Emit a sandbox for each target. - sandbox = GypSandbox(mozpath.abspath(build_file), included_files) - sandbox.config = config - - with sandbox.allow_all_writes() as d: - topsrcdir = config.topsrcdir - relsrcdir = d['RELATIVEDIR'] = mozpath.relpath(mozpath.dirname(build_file), config.topsrcdir) - d['SRCDIR'] = mozpath.join(topsrcdir, relsrcdir) - - # Each target is given its own objdir. The base of that objdir - # is derived from the relative path from the root gyp file path - # to the current build_file, placed under the given output - # directory. Since several targets can be in a given build_file, - # separate them in subdirectories using the build_file basename - # and the target_name. - reldir = mozpath.relpath(mozpath.dirname(build_file), - mozpath.dirname(path)) - subdir = '%s_%s' % ( - mozpath.splitext(mozpath.basename(build_file))[0], - target_name, - ) - d['OBJDIR'] = mozpath.join(output, reldir, subdir) - d['IS_GYP_DIR'] = True + for f in data[build_file]['included_files']: + context.add_source(mozpath.abspath(mozpath.join( + mozpath.dirname(build_file), f))) + + context['IS_GYP_DIR'] = True spec = targets[target] @@ -154,16 +161,15 @@ def read_from_gyp(config, path, output, vars, non_unified_sources = set()): if spec['type'] == 'none': continue elif spec['type'] == 'static_library': - sandbox['FORCE_STATIC_LIB'] = True # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] - # The sandbox expects an unicode string. - sandbox['LIBRARY_NAME'] = name.decode('utf-8') + # The context expects an unicode string. + context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. - sources = set(mozpath.normpath(mozpath.join(sandbox['SRCDIR'], f)) + sources = set(mozpath.normpath(mozpath.join(context.srcdir, f)) for f in spec.get('sources', []) if mozpath.splitext(f)[-1] != '.h') asm_sources = set(f for f in sources if f.endswith('.S')) @@ -171,31 +177,30 @@ def read_from_gyp(config, path, output, vars, non_unified_sources = set()): unified_sources = sources - non_unified_sources - asm_sources sources -= unified_sources all_sources |= sources - # The sandbox expects alphabetical order when adding sources - sandbox['SOURCES'] = alphabetical_sorted(sources) - sandbox['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) + # The context expects alphabetical order when adding sources + context['SOURCES'] = alphabetical_sorted(sources) + context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) - sandbox['DEFINES'][name] = value + context['DEFINES'][name] = value else: - sandbox['DEFINES'][define] = True + context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): - sandbox['LOCAL_INCLUDES'] += [include] + context['LOCAL_INCLUDES'] += [include] - with sandbox.allow_all_writes() as d: - d['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get('asflags_mozilla', []) - d['EXTRA_COMPILE_FLAGS'] = target_conf.get('cflags_mozilla', []) + context['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get('asflags_mozilla', []) + context['EXTRA_COMPILE_FLAGS'] = target_conf.get('cflags_mozilla', []) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) - sandbox.execution_time = time.time() - time_start - yield sandbox + context.execution_time = time.time() - time_start + yield context time_start = time.time() # remainder = non_unified_sources - all_sources # if remainder: diff --git a/python/mozbuild/mozbuild/frontend/mach_commands.py b/python/mozbuild/mozbuild/frontend/mach_commands.py index 7cc967dd9ec38..1ba8ec2fef8ae 100644 --- a/python/mozbuild/mozbuild/frontend/mach_commands.py +++ b/python/mozbuild/mozbuild/frontend/mach_commands.py @@ -34,7 +34,7 @@ def reference(self, symbol, name_only=False): variable_reference, ) - import mozbuild.frontend.sandbox_symbols as m + import mozbuild.frontend.context as m if name_only: for s in sorted(m.VARIABLES.keys()): diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py index 0a344b4490596..7f5c7d1ba4abc 100644 --- a/python/mozbuild/mozbuild/frontend/reader.py +++ b/python/mozbuild/mozbuild/frontend/reader.py @@ -9,13 +9,8 @@ In terms of code architecture, the main interface is BuildReader. BuildReader starts with a root mozbuild file. It creates a new execution environment for -this file, which is represented by the Sandbox class. The Sandbox class is what -defines what is allowed to execute in an individual mozbuild file. The Sandbox -consists of a local and global namespace, which are modeled by the -LocalNamespace and GlobalNamespace classes, respectively. The global namespace -contains all of the takeaway information from the execution. The local -namespace is for throwaway local variables and its contents are discarded after -execution. +this file, which is represented by the Sandbox class. The Sandbox class is used +to fill a Context, representing the output of an individual mozbuild file. The The BuildReader contains basic logic for traversing a tree of mozbuild files. It does this by examining specific variables populated during execution. @@ -55,10 +50,12 @@ Sandbox, ) -from .sandbox_symbols import ( +from .context import ( + Context, FUNCTIONS, VARIABLES, DEPRECATION_HINTS, + SPECIAL_VARIABLES, ) if sys.version_info.major == 2: @@ -112,42 +109,39 @@ class MozbuildSandbox(Sandbox): We expose a few useful functions and expose the set of variables defining Mozilla's build system. + + context is a Context instance. + + metadata is a dict of metadata that can be used during the sandbox + evaluation. """ - def __init__(self, config, reldir, metadata={}): - """Create an empty mozbuild Sandbox. + def __init__(self, context, metadata={}): + assert isinstance(context, Context) - config is a ConfigStatus instance (the output of configure). reldir is - the path of the directory containing the main mozbuild file that is - being executed, relative to the topsrcdir. It is used to compute - encountered relative paths. - """ - Sandbox.__init__(self, allowed_variables=VARIABLES) + Sandbox.__init__(self, context) self._log = logging.getLogger(__name__) self.metadata = dict(metadata) - - self.config = config - topobjdir = config.topobjdir - topsrcdir = config.topsrcdir - - with self._globals.allow_all_writes() as d: - d['TOPSRCDIR'] = topsrcdir - d['TOPOBJDIR'] = topobjdir - d['RELATIVEDIR'] = reldir - d['SRCDIR'] = mozpath.join(topsrcdir, reldir).rstrip('/') - d['OBJDIR'] = mozpath.join(topobjdir, reldir).rstrip('/') - - d['CONFIG'] = ReadOnlyDefaultDict(lambda: None, - self.config.substs_unicode) - - # Register functions. - for name, func in FUNCTIONS.items(): - d[name] = getattr(self, func[0]) - - # Initialize the exports that we need in the global. - extra_vars = self.metadata.get('exports', dict()) - self._globals.update(extra_vars) + exports = self.metadata.get('exports', {}) + self.exports = set(exports.keys()) + context.update(exports) + + def __getitem__(self, key): + if key in SPECIAL_VARIABLES: + return SPECIAL_VARIABLES[key][0](self._context) + if key in FUNCTIONS: + return getattr(self, FUNCTIONS[key][0]) + return Sandbox.__getitem__(self, key) + + def __setitem__(self, key, value): + if key in SPECIAL_VARIABLES or key in FUNCTIONS: + raise KeyError() + if key in self.exports: + self._context[key] = value + self.exports.remove(key) + return + Sandbox.__setitem__(self, key, value) def normalize_path(self, path, filesystem_absolute=False, srcdir=None): """Normalizes paths. @@ -164,26 +158,28 @@ def normalize_path(self, path, filesystem_absolute=False, srcdir=None): if os.path.isabs(path): if filesystem_absolute: return path - roots = [self.config.topsrcdir] - if self.config.external_source_dir: - roots.append(self.config.external_source_dir) + roots = [self._context.config.topsrcdir] + if self._context.config.external_source_dir: + roots.append(self._context.config.external_source_dir) for root in roots: - # mozpath.join would ignore the self.config.topsrcdir argument - # if we passed in the absolute path, so omit the leading / + # mozpath.join would ignore the self._context.config.topsrcdir + # argument if we passed in the absolute path, so omit the + # leading / p = mozpath.normpath(mozpath.join(root, path[1:])) if os.path.exists(p): return p # mozpath.join would ignore the self.condig.topsrcdir argument if # we passed in the absolute path, so omit the leading / return mozpath.normpath( - mozpath.join(self.config.topsrcdir, path[1:])) + mozpath.join(self._context.config.topsrcdir, path[1:])) elif srcdir: return mozpath.normpath(mozpath.join(srcdir, path)) elif len(self._execution_stack): return mozpath.normpath(mozpath.join( mozpath.dirname(self._execution_stack[-1]), path)) else: - return mozpath.normpath(mozpath.join(self.config.topsrcdir, path)) + return mozpath.normpath( + mozpath.join(self._context.config.topsrcdir, path)) def exec_file(self, path, filesystem_absolute=False): """Override exec_file to normalize paths and restrict file loading. @@ -196,7 +192,7 @@ def exec_file(self, path, filesystem_absolute=False): # protection, so it is omitted. normalized_path = self.normalize_path(path, filesystem_absolute=filesystem_absolute) - if not is_read_allowed(normalized_path, self.config): + if not is_read_allowed(normalized_path, self._context.config): raise SandboxLoadError(list(self._execution_stack), sys.exc_info()[2], illegal_path=path) @@ -273,10 +269,10 @@ def _export(self, varname): raise Exception('Variable has already been exported: %s' % varname) try: - # Doing a regular self._globals[varname] causes a set as a side + # Doing a regular self._context[varname] causes a set as a side # effect. By calling the dict method instead, we don't have any # side effects. - exports[varname] = dict.__getitem__(self._globals, varname) + exports[varname] = dict.__getitem__(self._context, varname) except KeyError: self.last_name_error = KeyError('global_ns', 'get_unknown', varname) raise self.last_name_error @@ -305,9 +301,9 @@ def _error(self, message): class SandboxValidationError(Exception): """Represents an error encountered when validating sandbox results.""" - def __init__(self, message, sandbox): + def __init__(self, message, context): Exception.__init__(self, message) - self.sandbox = sandbox + self.context = context def __str__(self): s = StringIO() @@ -318,7 +314,7 @@ def __str__(self): s.write('The error occurred while processing the following file or ') s.write('one of the files it includes:\n') s.write('\n') - s.write(' %s/moz.build\n' % self.sandbox['SRCDIR']) + s.write(' %s/moz.build\n' % self.context.srcdir) s.write('\n') s.write('The error occurred when validating the result of ') @@ -631,9 +627,9 @@ class BuildReader(object): The reader can optionally call a callable after each sandbox is evaluated but before its evaluated content is processed. This gives callers the - opportunity to modify sandboxes before side-effects occur from their - content. This callback receives the ``Sandbox`` that was evaluated. The - return value is ignored. + opportunity to modify contexts before side-effects occur from their + content. This callback receives the ``Context`` containing the result of + each sandbox evaluation. Its return value is ignored. """ def __init__(self, config, sandbox_post_eval_cb=None): @@ -650,8 +646,8 @@ def read_topsrcdir(self): This starts with the tree's top-most moz.build file and descends into all linked moz.build files until all relevant files have been evaluated. - This is a generator of Sandbox instances. As each moz.build file is - read, a new Sandbox is created and emitted. + This is a generator of Context instances. As each moz.build file is + read, a new Context is created and emitted. """ path = mozpath.join(self.config.topsrcdir, 'moz.build') return self.read_mozbuild(path, self.config, read_tiers=True, @@ -664,7 +660,7 @@ def walk_topsrcdir(self): filesystem walk to discover every moz.build file rather than relying on data from executed moz.build files to drive traversal. - This is a generator of Sandbox instances. + This is a generator of Context instances. """ # In the future, we may traverse moz.build files by looking # for DIRS references in the AST, even if a directory is added behind @@ -706,10 +702,9 @@ def read_mozbuild(self, path, config, read_tiers=False, directories and files per variable values. Arbitrary metadata in the form of a dict can be passed into this - function. This metadata will be attached to the emitted output. This - feature is intended to facilitate the build reader injecting state and - annotations into moz.build files that is independent of the sandbox's - execution context. + function. This feature is intended to facilitate the build reader + injecting state and annotations into moz.build files that is + independent of the sandbox's execution context. Traversal is performed depth first (for no particular reason). """ @@ -778,35 +773,36 @@ def _read_mozbuild(self, path, config, read_tiers, filesystem_absolute, config.topobjdir = topobjdir config.external_source_dir = None - sandbox = MozbuildSandbox(config, reldir, metadata=metadata) + context = Context(VARIABLES, config) + sandbox = MozbuildSandbox(context, metadata=metadata) sandbox.exec_file(path, filesystem_absolute=filesystem_absolute) - sandbox.execution_time = time.time() - time_start + context.execution_time = time.time() - time_start if self._sandbox_post_eval_cb: - self._sandbox_post_eval_cb(sandbox) + self._sandbox_post_eval_cb(context) # We first collect directories populated in variables. dir_vars = ['DIRS'] - if sandbox.config.substs.get('ENABLE_TESTS', False) == '1': + if context.config.substs.get('ENABLE_TESTS', False) == '1': dir_vars.append('TEST_DIRS') - dirs = [(v, sandbox[v]) for v in dir_vars if v in sandbox] + dirs = [(v, context[v]) for v in dir_vars if v in context] curdir = mozpath.dirname(path) - gyp_sandboxes = [] - for target_dir in sandbox['GYP_DIRS']: - gyp_dir = sandbox['GYP_DIRS'][target_dir] + gyp_contexts = [] + for target_dir in context['GYP_DIRS']: + gyp_dir = context['GYP_DIRS'][target_dir] for v in ('input', 'variables'): if not getattr(gyp_dir, v): raise SandboxValidationError('Missing value for ' - 'GYP_DIRS["%s"].%s' % (target_dir, v), sandbox) + 'GYP_DIRS["%s"].%s' % (target_dir, v), context) - # The make backend assumes sandboxes for sub-directories are - # emitted after their parent, so accumulate the gyp sandboxes. - # We could emit the parent sandbox before processing gyp - # configuration, but we need to add the gyp objdirs to that sandbox + # The make backend assumes contexts for sub-directories are + # emitted after their parent, so accumulate the gyp contexts. + # We could emit the parent context before processing gyp + # configuration, but we need to add the gyp objdirs to that context # first. from .gyp_reader import read_from_gyp non_unified_sources = set() @@ -814,27 +810,27 @@ def _read_mozbuild(self, path, config, read_tiers, filesystem_absolute, source = mozpath.normpath(mozpath.join(curdir, s)) if not os.path.exists(source): raise SandboxValidationError('Cannot find %s.' % source, - sandbox) + context) non_unified_sources.add(source) - for gyp_sandbox in read_from_gyp(sandbox.config, + for gyp_context in read_from_gyp(context.config, mozpath.join(curdir, gyp_dir.input), - mozpath.join(sandbox['OBJDIR'], + mozpath.join(context.objdir, target_dir), gyp_dir.variables, non_unified_sources = non_unified_sources): - gyp_sandbox.update(gyp_dir.sandbox_vars) - gyp_sandboxes.append(gyp_sandbox) + gyp_context.update(gyp_dir.sandbox_vars) + gyp_contexts.append(gyp_context) - for gyp_sandbox in gyp_sandboxes: + for gyp_context in gyp_contexts: if self._sandbox_post_eval_cb: - self._sandbox_post_eval_cb(gyp_sandbox) + self._sandbox_post_eval_cb(gyp_context) - sandbox['DIRS'].append(mozpath.relpath(gyp_sandbox['OBJDIR'], sandbox['OBJDIR'])) + context['DIRS'].append(mozpath.relpath(gyp_context.objdir, context.objdir)) - yield sandbox + yield context - for gyp_sandbox in gyp_sandboxes: - yield gyp_sandbox + for gyp_context in gyp_contexts: + yield gyp_context # Traverse into referenced files. @@ -847,7 +843,7 @@ def _read_mozbuild(self, path, config, read_tiers, filesystem_absolute, if d in recurse_info: raise SandboxValidationError( 'Directory (%s) registered multiple times in %s' % ( - d, var), sandbox) + d, var), context) recurse_info[d] = {} if 'exports' in sandbox.metadata: @@ -855,19 +851,19 @@ def _read_mozbuild(self, path, config, read_tiers, filesystem_absolute, recurse_info[d]['exports'] = dict(sandbox.metadata['exports']) # We also have tiers whose members are directories. - if 'TIERS' in sandbox: + if 'TIERS' in context: if not read_tiers: raise SandboxValidationError( - 'TIERS defined but it should not be', sandbox) + 'TIERS defined but it should not be', context) - for tier, values in sandbox['TIERS'].items(): + for tier, values in context['TIERS'].items(): # We don't descend into external directories because external by # definition is external to the build system. for d in values['regular']: if d in recurse_info: raise SandboxValidationError( 'Tier directory (%s) registered multiple ' - 'times in %s' % (d, tier), sandbox) + 'times in %s' % (d, tier), context) recurse_info[d] = {'check_external': True} for relpath, child_metadata in recurse_info.items(): @@ -880,15 +876,15 @@ def _read_mozbuild(self, path, config, read_tiers, filesystem_absolute, # because it isn't necessary. If there are symlinks in the srcdir, # that's not our problem. We're not a hosted application: we don't # need to worry about security too much. - if not is_read_allowed(child_path, sandbox.config): + if not is_read_allowed(child_path, context.config): raise SandboxValidationError( 'Attempting to process file outside of allowed paths: %s' % - child_path, sandbox) + child_path, context) if not descend: continue - for res in self.read_mozbuild(child_path, sandbox.config, + for res in self.read_mozbuild(child_path, context.config, read_tiers=False, filesystem_absolute=True, metadata=child_metadata): yield res diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py index aa8b935894d35..5d2f21e6f089c 100644 --- a/python/mozbuild/mozbuild/frontend/sandbox.py +++ b/python/mozbuild/mozbuild/frontend/sandbox.py @@ -8,11 +8,8 @@ highly-controlled environment. The main class is `Sandbox`. This provides an execution environment for Python -code. - -The behavior inside sandboxes is mostly regulated by the `GlobalNamespace` and -`LocalNamespace` classes. These represent the global and local namespaces in -the sandbox, respectively. +code and is used to fill a Context instance for the takeaway information from +the execution. Code in this module takes a different approach to exception handling compared to what you'd see elsewhere in Python. Arguments to built-in exceptions like @@ -29,12 +26,7 @@ from contextlib import contextmanager from mozbuild.util import ReadOnlyDict - - -class SandboxDerivedValue(object): - """Classes deriving from this one receive a special treatment in a - sandbox GlobalNamespace. See GlobalNamespace documentation. - """ +from context import Context def alphabetical_sorted(iterable, cmp=None, key=lambda x: x.lower(), @@ -45,213 +37,6 @@ def alphabetical_sorted(iterable, cmp=None, key=lambda x: x.lower(), return sorted(iterable, cmp, key, reverse) -class GlobalNamespace(dict): - """Represents the globals namespace in a sandbox. - - This is a highly specialized dictionary employing light magic. - - At the crux we have the concept of a restricted keys set. Only very - specific keys may be retrieved or mutated. The rules are as follows: - - - The '__builtins__' key is hardcoded and is read-only. - - The set of variables that can be assigned or accessed during - execution is passed into the constructor. - - When variables are assigned to, we verify assignment is allowed. Assignment - is allowed if the variable is known (set defined at constructor time) and - if the value being assigned is the expected type (also defined at - constructor time). - - When variables are read, we first try to read the existing value. If a - value is not found and it is defined in the allowed variables set, we - return a new instance of the class for that variable. We don't assign - default instances until they are accessed because this makes debugging - the end-result much simpler. Instead of a data structure with lots of - empty/default values, you have a data structure with only the values - that were read or touched. - - Instances of variables classes are created by invoking class_name(), - except when class_name derives from SandboxDerivedValue, in which - case class_name(instance_of_the_global_namespace) is invoked. - A value is added to those calls when instances are created during - assignment (setitem). - - Instantiators of this class are given a backdoor to perform setting of - arbitrary values. e.g. - - ns = GlobalNamespace() - with ns.allow_all_writes(): - ns['foo'] = True - - ns['bar'] = True # KeyError raised. - """ - - # The default set of builtins. - BUILTINS = ReadOnlyDict({ - # Only real Python built-ins should go here. - 'None': None, - 'False': False, - 'True': True, - 'sorted': alphabetical_sorted, - 'int': int, - }) - - def __init__(self, allowed_variables=None, builtins=None): - """Create a new global namespace having specific variables. - - allowed_variables is a dict of the variables that can be queried and - mutated. Keys in this dict are the strings representing keys in this - namespace which are valid. Values are tuples of stored type, assigned - type, default value, and a docstring describing the purpose of the variable. - - builtins is the value to use for the special __builtins__ key. If not - defined, the BUILTINS constant attached to this class is used. The - __builtins__ object is read-only. - """ - builtins = builtins or self.BUILTINS - - assert isinstance(builtins, ReadOnlyDict) - - dict.__init__(self, {'__builtins__': builtins}) - - self._allowed_variables = allowed_variables or {} - - # We need to record this because it gets swallowed as part of - # evaluation. - self.last_name_error = None - - self._allow_all_writes = False - - self._allow_one_mutation = set() - - def __getitem__(self, name): - try: - return dict.__getitem__(self, name) - except KeyError: - pass - - # The variable isn't present yet. Fall back to VARIABLES. - default = self._allowed_variables.get(name, None) - if default is None: - self.last_name_error = KeyError('global_ns', 'get_unknown', name) - raise self.last_name_error - - # If the default is specifically a lambda (or, rather, any function--but - # not a class that can be called), then it is actually a rule to - # generate the default that should be used. - default = default[0] - if issubclass(default, SandboxDerivedValue): - value = default(self) - else: - value = default() - - dict.__setitem__(self, name, value) - return dict.__getitem__(self, name) - - def __setitem__(self, name, value): - if self._allow_all_writes: - dict.__setitem__(self, name, value) - self._allow_one_mutation.add(name) - return - - # Forbid assigning over a previously set value. Interestingly, when - # doing FOO += ['bar'], python actually does something like: - # foo = namespace.__getitem__('FOO') - # foo.__iadd__(['bar']) - # namespace.__setitem__('FOO', foo) - # This means __setitem__ is called with the value that is already - # in the dict, when doing +=, which is permitted. - if name in self._allow_one_mutation: - self._allow_one_mutation.remove(name) - elif name in self and dict.__getitem__(self, name) is not value: - raise KeyError('global_ns', 'reassign', name) - - # We don't need to check for name.isupper() here because LocalNamespace - # only sends variables our way if isupper() is True. - stored_type, input_type, docs, tier = \ - self._allowed_variables.get(name, (None, None, None, None)) - - # Variable is unknown. - if stored_type is None: - self.last_name_error = KeyError('global_ns', 'set_unknown', name, - value) - raise self.last_name_error - - # If the incoming value is not the type we store, we try to convert - # it to that type. This relies on proper coercion rules existing. This - # is the responsibility of whoever defined the symbols: a type should - # not be in the allowed set if the constructor function for the stored - # type does not accept an instance of that type. - if not isinstance(value, stored_type): - if not isinstance(value, input_type): - self.last_name_error = ValueError('global_ns', 'set_type', name, - value, input_type) - raise self.last_name_error - - if issubclass(stored_type, SandboxDerivedValue): - value = stored_type(self, value) - else: - value = stored_type(value) - - dict.__setitem__(self, name, value) - - @contextmanager - def allow_all_writes(self): - """Allow any variable to be written to this instance. - - This is used as a context manager. When activated, all writes - (__setitem__ calls) are allowed. When the context manager is exited, - the instance goes back to its default behavior of only allowing - whitelisted mutations. - """ - self._allow_all_writes = True - yield self - self._allow_all_writes = False - - # dict.update doesn't call our __setitem__, so we have to override it. - def update(self, other): - for name, value in other.items(): - self.__setitem__(name, value) - - -class LocalNamespace(dict): - """Represents the locals namespace in a Sandbox. - - This behaves like a dict except with some additional behavior tailored - to our sandbox execution model. - - Under normal rules of exec(), doing things like += could have interesting - consequences. Keep in mind that a += is really a read, followed by the - creation of a new variable, followed by a write. If the read came from the - global namespace, then the write would go to the local namespace, resulting - in fragmentation. This is not desired. - - LocalNamespace proxies reads and writes for global-looking variables - (read: UPPERCASE) to the global namespace. This means that attempting to - read or write an unknown variable results in exceptions raised from the - GlobalNamespace. - """ - def __init__(self, global_ns): - """Create a local namespace associated with a GlobalNamespace.""" - dict.__init__({}) - - self._globals = global_ns - self.last_name_error = None - - def __getitem__(self, name): - if name.isupper(): - return self._globals[name] - - return dict.__getitem__(self, name) - - def __setitem__(self, name, value): - if name.isupper(): - self._globals[name] = value - return - - dict.__setitem__(self, name, value) - - class SandboxError(Exception): def __init__(self, file_stack): self.file_stack = file_stack @@ -287,11 +72,12 @@ def __init__(self, file_stack, trace, illegal_path=None, read_error=None): self.read_error = read_error -class Sandbox(object): +class Sandbox(dict): """Represents a sandbox for executing Python code. - This class both provides a sandbox for execution of a single mozbuild - frontend file as well as an interface to the results of that execution. + This class provides a sandbox for execution of a single mozbuild frontend + file. The results of that execution is stored in the Context instance given + as the ``context`` argument. Sandbox is effectively a glorified wrapper around compile() + exec(). You point it at some Python code and it executes it. The main difference from @@ -301,29 +87,41 @@ class Sandbox(object): prevents executed code from doing things like import modules, open files, etc. - Sandboxes are bound to a mozconfig instance. These objects are produced by - the output of configure. + Sandbox instances act as global namespace for the sandboxed execution + itself. They shall not be used to access the results of the execution. + Those results are available in the given Context instance after execution. - Sandbox instances can be accessed like dictionaries to facilitate result - retrieval. e.g. foo = sandbox['FOO']. Direct assignment is not allowed. + The Sandbox itself is responsible for enforcing rules such as forbidding + reassignment of variables. - Each sandbox has associated with it a GlobalNamespace and LocalNamespace. - Only data stored in the GlobalNamespace is retrievable via the dict - interface. This is because the local namespace should be irrelevant: it - should only contain throwaway variables. + Implementation note: Sandbox derives from dict because exec() insists that + what it is given for namespaces is a dict. """ - def __init__(self, allowed_variables=None, builtins=None): - """Initialize a Sandbox ready for execution. + # The default set of builtins. + BUILTINS = ReadOnlyDict({ + # Only real Python built-ins should go here. + 'None': None, + 'False': False, + 'True': True, + 'sorted': alphabetical_sorted, + 'int': int, + }) - The arguments are proxied to GlobalNamespace.__init__. + def __init__(self, context, builtins=None): + """Initialize a Sandbox ready for execution. """ - self._globals = GlobalNamespace(allowed_variables=allowed_variables, - builtins=builtins) - self._allowed_variables = allowed_variables - self._locals = LocalNamespace(self._globals) + self._builtins = builtins or self.BUILTINS + dict.__setitem__(self, '__builtins__', self._builtins) + + assert isinstance(self._builtins, ReadOnlyDict) + assert isinstance(context, Context) + + self._context = context self._execution_stack = [] - self.main_path = None - self.all_paths = set() + + # We need to record this because it gets swallowed as part of + # evaluation. + self._last_name_error = None def exec_file(self, path): """Execute code at a path in the sandbox. @@ -343,7 +141,7 @@ def exec_file(self, path): self.exec_source(source, path) - def exec_source(self, source, path): + def exec_source(self, source, path=''): """Execute Python code within a string. The passed string should contain Python code to be executed. The string @@ -355,10 +153,8 @@ def exec_source(self, source, path): """ self._execution_stack.append(path) - if self.main_path is None: - self.main_path = path - - self.all_paths.add(path) + if path: + self._context.add_source(path) # We don't have to worry about bytecode generation here because we are # too low-level for that. However, we could add bytecode generation via @@ -368,23 +164,24 @@ def exec_source(self, source, path): # compile() inherits the __future__ from the module by default. We # do want Unicode literals. code = compile(source, path, 'exec') - exec(code, self._globals, self._locals) + # We use ourself as the global namespace for the execution. There + # is no need for a separate local namespace as moz.build execution + # is flat, namespace-wise. + exec(code, self) except SandboxError as e: raise e except NameError as e: - # A NameError is raised when a local or global could not be found. + # A NameError is raised when a variable could not be found. # The original KeyError has been dropped by the interpreter. - # However, we should have it cached in our namespace instances! + # However, we should have it cached in our instance! # Unless a script is doing something wonky like catching NameError # itself (that would be silly), if there is an exception on the # global namespace, that's our error. actual = e - if self._globals.last_name_error is not None: - actual = self._globals.last_name_error - elif self._locals.last_name_error is not None: - actual = self._locals.last_name_error + if self._last_name_error is not None: + actual = self._last_name_error raise SandboxExecutionError(list(self._execution_stack), type(actual), actual, sys.exc_info()[2]) @@ -398,26 +195,43 @@ def exec_source(self, source, path): finally: self._execution_stack.pop() - # Dict interface proxies reads to global namespace. - def __len__(self): - return len(self._globals) + def __getitem__(self, key): + if key.isupper(): + try: + return self._context[key] + except Exception as e: + self._last_name_error = e + raise + + return dict.__getitem__(self, key) + + def __setitem__(self, key, value): + if key in self._builtins or key == '__builtins__': + raise KeyError('Cannot reassign builtins') + + if key.isupper(): + # Forbid assigning over a previously set value. Interestingly, when + # doing FOO += ['bar'], python actually does something like: + # foo = namespace.__getitem__('FOO') + # foo.__iadd__(['bar']) + # namespace.__setitem__('FOO', foo) + # This means __setitem__ is called with the value that is already + # in the dict, when doing +=, which is permitted. + if key in self._context and self._context[key] is not value: + raise KeyError('global_ns', 'reassign', key) + + self._context[key] = value + else: + dict.__setitem__(self, key, value) - def __getitem__(self, name): - return self._globals[name] + def get(self, key, default=None): + raise NotImplementedError('Not supported') - def __iter__(self): - return iter(self._globals) + def __len__(self): + raise NotImplementedError('Not supported') - def iterkeys(self): - return self.__iter__() + def __iter__(self): + raise NotImplementedError('Not supported') def __contains__(self, key): - return key in self._globals - - def get(self, key, default=None): - return self._globals.get(key, default) - - def get_affected_tiers(self): - tiers = (self._allowed_variables[key][3] for key in self - if key in self._allowed_variables) - return set(tier for tier in tiers if tier) + raise NotImplementedError('Not supported') diff --git a/python/mozbuild/mozbuild/sphinx.py b/python/mozbuild/mozbuild/sphinx.py index eaf3a9ea7cb02..e6c3ba04151d2 100644 --- a/python/mozbuild/mozbuild/sphinx.py +++ b/python/mozbuild/mozbuild/sphinx.py @@ -75,7 +75,7 @@ def variable_reference(v, st_type, in_type, doc, tier): return lines -def special_reference(v, typ, doc): +def special_reference(v, func, typ, doc): lines = [ v, '-' * len(v), diff --git a/python/mozbuild/mozbuild/test/frontend/test_context.py b/python/mozbuild/mozbuild/test/frontend/test_context.py new file mode 100644 index 0000000000000..14a048bbd400e --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_context.py @@ -0,0 +1,133 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +from mozunit import main + +from mozbuild.frontend.context import ( + Context, + FUNCTIONS, + SPECIAL_VARIABLES, + VARIABLES, +) + + +class TestContext(unittest.TestCase): + def test_defaults(self): + test = Context({ + 'foo': (int, int, '', None), + 'bar': (bool, bool, '', None), + 'baz': (dict, dict, '', None), + }) + + self.assertEqual(test.keys(), []) + + self.assertEqual(test['foo'], 0) + + self.assertEqual(set(test.keys()), { 'foo' }) + + self.assertEqual(test['bar'], False) + + self.assertEqual(set(test.keys()), { 'foo', 'bar' }) + + self.assertEqual(test['baz'], {}) + + self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' }) + + with self.assertRaises(KeyError): + test['qux'] + + self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' }) + + def test_type_check(self): + test = Context({ + 'foo': (int, int, '', None), + 'baz': (dict, list, '', None), + }) + + test['foo'] = 5 + + self.assertEqual(test['foo'], 5) + + with self.assertRaises(ValueError): + test['foo'] = {} + + self.assertEqual(test['foo'], 5) + + with self.assertRaises(KeyError): + test['bar'] = True + + test['baz'] = [('a', 1), ('b', 2)] + + self.assertEqual(test['baz'], { 'a': 1, 'b': 2 }) + + def test_update(self): + test = Context({ + 'foo': (int, int, '', None), + 'bar': (bool, bool, '', None), + 'baz': (dict, list, '', None), + }) + + self.assertEqual(test.keys(), []) + + with self.assertRaises(ValueError): + test.update(bar=True, foo={}) + + self.assertEqual(test.keys(), []) + + test.update(bar=True, foo=1) + + self.assertEqual(set(test.keys()), { 'foo', 'bar' }) + self.assertEqual(test['foo'], 1) + self.assertEqual(test['bar'], True) + + test.update([('bar', False), ('foo', 2)]) + self.assertEqual(test['foo'], 2) + self.assertEqual(test['bar'], False) + + test.update([('foo', 0), ('baz', { 'a': 1, 'b': 2 })]) + self.assertEqual(test['foo'], 0) + self.assertEqual(test['baz'], { 'a': 1, 'b': 2 }) + + test.update([('foo', 42), ('baz', [('c', 3), ('d', 4)])]) + self.assertEqual(test['foo'], 42) + self.assertEqual(test['baz'], { 'c': 3, 'd': 4 }) + + +class TestSymbols(unittest.TestCase): + def _verify_doc(self, doc): + # Documentation should be of the format: + # """SUMMARY LINE + # + # EXTRA PARAGRAPHS + # """ + + self.assertNotIn('\r', doc) + + lines = doc.split('\n') + + # No trailing whitespace. + for line in lines[0:-1]: + self.assertEqual(line, line.rstrip()) + + self.assertGreater(len(lines), 0) + self.assertGreater(len(lines[0].strip()), 0) + + # Last line should be empty. + self.assertEqual(lines[-1].strip(), '') + + def test_documentation_formatting(self): + for typ, inp, doc, tier in VARIABLES.values(): + self._verify_doc(doc) + + for attr, args, doc in FUNCTIONS.values(): + self._verify_doc(doc) + + for func, typ, doc in SPECIAL_VARIABLES.values(): + self._verify_doc(doc) + + +if __name__ == '__main__': + main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_emitter.py b/python/mozbuild/mozbuild/test/frontend/test_emitter.py index 9a4bb30084abf..59572abe5585f 100644 --- a/python/mozbuild/mozbuild/test/frontend/test_emitter.py +++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py @@ -89,8 +89,8 @@ def test_dirs_traversal_simple(self): self.assertIsInstance(o, DirectoryTraversal) self.assertEqual(o.test_dirs, []) self.assertEqual(len(o.tier_dirs), 0) - self.assertTrue(os.path.isabs(o.sandbox_main_path)) - self.assertEqual(len(o.sandbox_all_paths), 1) + self.assertTrue(os.path.isabs(o.context_main_path)) + self.assertEqual(len(o.context_all_paths), 1) reldirs = [o.relativedir for o in objs] self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar']) diff --git a/python/mozbuild/mozbuild/test/frontend/test_namespaces.py b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py index 7784e7f65755e..ef5b48367a3e2 100644 --- a/python/mozbuild/mozbuild/test/frontend/test_namespaces.py +++ b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py @@ -8,24 +8,16 @@ from mozunit import main -from mozbuild.frontend.sandbox import ( - GlobalNamespace, - LocalNamespace, +from mozbuild.frontend.context import ( + Context, + VARIABLES, ) -from mozbuild.frontend.sandbox_symbols import VARIABLES - - -class TestGlobalNamespace(unittest.TestCase): - def test_builtins(self): - ns = GlobalNamespace() - - self.assertIn('__builtins__', ns) - self.assertEqual(ns['__builtins__']['True'], True) +class TestContext(unittest.TestCase): def test_key_rejection(self): # Lowercase keys should be rejected during normal operation. - ns = GlobalNamespace(allowed_variables=VARIABLES) + ns = Context(allowed_variables=VARIABLES) with self.assertRaises(KeyError) as ke: ns['foo'] = True @@ -49,13 +41,13 @@ def test_key_rejection(self): def test_allowed_set(self): self.assertIn('DIRS', VARIABLES) - ns = GlobalNamespace(allowed_variables=VARIABLES) + ns = Context(allowed_variables=VARIABLES) ns['DIRS'] = ['foo'] self.assertEqual(ns['DIRS'], ['foo']) def test_value_checking(self): - ns = GlobalNamespace(allowed_variables=VARIABLES) + ns = Context(allowed_variables=VARIABLES) # Setting to a non-allowed type should not work. with self.assertRaises(ValueError) as ve: @@ -68,88 +60,14 @@ def test_value_checking(self): self.assertTrue(e[3]) self.assertEqual(e[4], list) - def test_allow_all_writes(self): - ns = GlobalNamespace(allowed_variables=VARIABLES) - - with ns.allow_all_writes() as d: - d['foo'] = True - self.assertTrue(d['foo']) - - with self.assertRaises(KeyError) as ke: - ns['bar'] = False - - self.assertEqual(ke.exception.args[1], 'set_unknown') - - ns['DIRS'] = [] - with self.assertRaises(KeyError) as ke: - ns['DIRS'] = [] - - e = ke.exception.args - self.assertEqual(e[0], 'global_ns') - self.assertEqual(e[1], 'reassign') - self.assertEqual(e[2], 'DIRS') - - with ns.allow_all_writes() as d: - d['DIST_SUBDIR'] = 'foo' - - self.assertEqual(ns['DIST_SUBDIR'], 'foo') - ns['DIST_SUBDIR'] = 'bar' - self.assertEqual(ns['DIST_SUBDIR'], 'bar') - with self.assertRaises(KeyError) as ke: - ns['DIST_SUBDIR'] = 'baz' - - e = ke.exception.args - self.assertEqual(e[0], 'global_ns') - self.assertEqual(e[1], 'reassign') - self.assertEqual(e[2], 'DIST_SUBDIR') - - self.assertTrue(d['foo']) - def test_key_checking(self): # Checking for existence of a key should not populate the key if it # doesn't exist. - g = GlobalNamespace(allowed_variables=VARIABLES) + g = Context(allowed_variables=VARIABLES) self.assertFalse('DIRS' in g) self.assertFalse('DIRS' in g) -class TestLocalNamespace(unittest.TestCase): - def test_locals(self): - g = GlobalNamespace(allowed_variables=VARIABLES) - l = LocalNamespace(g) - - l['foo'] = ['foo'] - self.assertEqual(l['foo'], ['foo']) - - l['foo'] += ['bar'] - self.assertEqual(l['foo'], ['foo', 'bar']) - - def test_global_proxy_reads(self): - g = GlobalNamespace(allowed_variables=VARIABLES) - g['DIRS'] = ['foo'] - - l = LocalNamespace(g) - - self.assertEqual(l['DIRS'], g['DIRS']) - - # Reads to missing UPPERCASE vars should result in KeyError. - with self.assertRaises(KeyError) as ke: - v = l['FOO'] - - e = ke.exception - self.assertEqual(e.args[0], 'global_ns') - self.assertEqual(e.args[1], 'get_unknown') - - def test_global_proxy_writes(self): - g = GlobalNamespace(allowed_variables=VARIABLES) - l = LocalNamespace(g) - - l['DIRS'] = ['foo'] - - self.assertEqual(l['DIRS'], ['foo']) - self.assertEqual(g['DIRS'], ['foo']) - - if __name__ == '__main__': main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_reader.py b/python/mozbuild/mozbuild/test/frontend/test_reader.py index 8474aa1a211b3..824f9b000c579 100644 --- a/python/mozbuild/mozbuild/test/frontend/test_reader.py +++ b/python/mozbuild/mozbuild/test/frontend/test_reader.py @@ -47,9 +47,9 @@ def file_path(self, name, *args): def test_dirs_traversal_simple(self): reader = self.reader('traversal-simple') - sandboxes = list(reader.read_topsrcdir()) + contexts = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 4) + self.assertEqual(len(contexts), 4) def test_dirs_traversal_no_descend(self): reader = self.reader('traversal-simple') @@ -57,16 +57,16 @@ def test_dirs_traversal_no_descend(self): path = mozpath.join(reader.config.topsrcdir, 'moz.build') self.assertTrue(os.path.exists(path)) - sandboxes = list(reader.read_mozbuild(path, reader.config, + contexts = list(reader.read_mozbuild(path, reader.config, filesystem_absolute=True, descend=False)) - self.assertEqual(len(sandboxes), 1) + self.assertEqual(len(contexts), 1) def test_dirs_traversal_all_variables(self): reader = self.reader('traversal-all-vars', enable_tests=True) - sandboxes = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 3) + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) def test_tier_subdir(self): # add_tier_dir() should fail when not in the top directory. @@ -79,15 +79,15 @@ def test_relative_dirs(self): # Ensure relative directories are traversed. reader = self.reader('traversal-relative-dirs') - sandboxes = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 3) + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) def test_repeated_dirs_ignored(self): # Ensure repeated directories are ignored. reader = self.reader('traversal-repeated-dirs') - sandboxes = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 3) + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) def test_outside_topsrcdir(self): # References to directories outside the topsrcdir should fail. @@ -236,17 +236,17 @@ def test_error_error_func(self): def test_inheriting_variables(self): reader = self.reader('inheriting-variables') - sandboxes = list(reader.read_topsrcdir()) + contexts = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 4) - self.assertEqual([sandbox['RELATIVEDIR'] for sandbox in sandboxes], + self.assertEqual(len(contexts), 4) + self.assertEqual([context.relsrcdir for context in contexts], ['', 'foo', 'foo/baz', 'bar']) - self.assertEqual([sandbox['XPIDL_MODULE'] for sandbox in sandboxes], + self.assertEqual([context['XPIDL_MODULE'] for context in contexts], ['foobar', 'foobar', 'baz', 'foobar']) def test_process_eval_callback(self): - def strip_dirs(sandbox): - sandbox['DIRS'][:] = [] + def strip_dirs(context): + context['DIRS'][:] = [] count[0] += 1 reader = self.reader('traversal-simple', @@ -254,9 +254,9 @@ def strip_dirs(sandbox): count = [0] - sandboxes = list(reader.read_topsrcdir()) + contexts = list(reader.read_topsrcdir()) - self.assertEqual(len(sandboxes), 1) + self.assertEqual(len(contexts), 1) self.assertEqual(len(count), 1) diff --git a/python/mozbuild/mozbuild/test/frontend/test_sandbox.py b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py index 7a3a09a6ab7a2..98c0a97a13f58 100644 --- a/python/mozbuild/mozbuild/test/frontend/test_sandbox.py +++ b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py @@ -16,11 +16,13 @@ ) from mozbuild.frontend.sandbox import ( + Sandbox, SandboxExecutionError, SandboxLoadError, ) -from mozbuild.frontend.sandbox_symbols import ( +from mozbuild.frontend.context import ( + Context, FUNCTIONS, SPECIAL_VARIABLES, VARIABLES, @@ -35,110 +37,34 @@ class TestSandbox(unittest.TestCase): - def sandbox(self, relpath='moz.build', data_path=None): - config = None - - if data_path is not None: - config = MockConfig(mozpath.join(test_data_path, data_path)) - else: - config = MockConfig() - - return MozbuildSandbox(config, mozpath.dirname(relpath)) - - def test_default_state(self): - sandbox = self.sandbox() - config = sandbox.config - - self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir) - self.assertEqual(sandbox['TOPOBJDIR'], - mozpath.abspath(config.topobjdir)) - self.assertEqual(sandbox['RELATIVEDIR'], '') - self.assertEqual(sandbox['SRCDIR'], config.topsrcdir) - self.assertEqual(sandbox['OBJDIR'], - mozpath.abspath(config.topobjdir).replace(os.sep, '/')) - - def test_symbol_presence(self): - # Ensure no discrepancies between the master symbol table and what's in - # the sandbox. - sandbox = self.sandbox() - - all_symbols = set() - all_symbols |= set(FUNCTIONS.keys()) - all_symbols |= set(SPECIAL_VARIABLES.keys()) - - for symbol in sandbox: - self.assertIn(symbol, all_symbols) - all_symbols.remove(symbol) - - self.assertEqual(len(all_symbols), 0) - - def test_path_calculation(self): - sandbox = self.sandbox('foo/bar/moz.build') - config = sandbox.config - - self.assertEqual(sandbox['RELATIVEDIR'], 'foo/bar') - self.assertEqual(sandbox['SRCDIR'], '/'.join([config.topsrcdir, - 'foo/bar'])) - self.assertEqual(sandbox['OBJDIR'], - mozpath.abspath('/'.join([config.topobjdir, 'foo/bar'])).replace(os.sep, '/')) - - def test_config_access(self): - sandbox = self.sandbox() - config = sandbox.config - - self.assertIn('CONFIG', sandbox) - self.assertEqual(sandbox['CONFIG']['MOZ_TRUE'], '1') - self.assertEqual(sandbox['CONFIG']['MOZ_FOO'], config.substs['MOZ_FOO']) - - # Access to an undefined substitution should return None. - self.assertNotIn('MISSING', sandbox['CONFIG']) - self.assertIsNone(sandbox['CONFIG']['MISSING']) - - # Should shouldn't be allowed to assign to the config. - with self.assertRaises(Exception): - sandbox['CONFIG']['FOO'] = '' - - def test_dict_interface(self): - sandbox = self.sandbox() - config = sandbox.config - - self.assertFalse('foo' in sandbox) - self.assertFalse('FOO' in sandbox) - - self.assertTrue(sandbox.get('foo', True)) - self.assertEqual(sandbox.get('TOPSRCDIR'), config.topsrcdir) - self.assertGreater(len(sandbox), 6) - - for key in sandbox: - continue - - for key in sandbox.iterkeys(): - continue + def sandbox(self): + return Sandbox(Context(VARIABLES)) def test_exec_source_success(self): sandbox = self.sandbox() + context = sandbox._context - sandbox.exec_source('foo = True', 'foo.py') + sandbox.exec_source('foo = True', mozpath.abspath('foo.py')) - self.assertNotIn('foo', sandbox) - self.assertEqual(sandbox.main_path, 'foo.py') - self.assertEqual(sandbox.all_paths, set(['foo.py'])) + self.assertNotIn('foo', context) + self.assertEqual(context.main_path, mozpath.abspath('foo.py')) + self.assertEqual(context.all_paths, set([mozpath.abspath('foo.py')])) def test_exec_compile_error(self): sandbox = self.sandbox() with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('2f23;k;asfj', 'foo.py') + sandbox.exec_source('2f23;k;asfj', mozpath.abspath('foo.py')) - self.assertEqual(se.exception.file_stack, ['foo.py']) + self.assertEqual(se.exception.file_stack, [mozpath.abspath('foo.py')]) self.assertIsInstance(se.exception.exc_value, SyntaxError) - self.assertEqual(sandbox.main_path, 'foo.py') + self.assertEqual(sandbox._context.main_path, mozpath.abspath('foo.py')) def test_exec_import_denied(self): sandbox = self.sandbox() with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('import sys', 'import.py') + sandbox.exec_source('import sys') self.assertIsInstance(se.exception, SandboxExecutionError) self.assertEqual(se.exception.exc_type, ImportError) @@ -146,8 +72,8 @@ def test_exec_import_denied(self): def test_exec_source_multiple(self): sandbox = self.sandbox() - sandbox.exec_source('DIRS = ["foo"]', 'foo.py') - sandbox.exec_source('DIRS += ["bar"]', 'foo.py') + sandbox.exec_source('DIRS = ["foo"]') + sandbox.exec_source('DIRS += ["bar"]') self.assertEqual(sandbox['DIRS'], ['foo', 'bar']) @@ -155,7 +81,7 @@ def test_exec_source_illegal_key_set(self): sandbox = self.sandbox() with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('ILLEGAL = True', 'foo.py') + sandbox.exec_source('ILLEGAL = True') e = se.exception self.assertIsInstance(e.exc_value, KeyError) @@ -167,9 +93,9 @@ def test_exec_source_illegal_key_set(self): def test_exec_source_reassign(self): sandbox = self.sandbox() - sandbox.exec_source('DIRS = ["foo"]', 'foo.py') + sandbox.exec_source('DIRS = ["foo"]') with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('DIRS = ["bar"]', 'foo.py') + sandbox.exec_source('DIRS = ["bar"]') self.assertEqual(sandbox['DIRS'], ['foo']) e = se.exception @@ -180,18 +106,102 @@ def test_exec_source_reassign(self): self.assertEqual(e.args[1], 'reassign') self.assertEqual(e.args[2], 'DIRS') + def test_exec_source_reassign_builtin(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source('True = 1') + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], 'Cannot reassign builtins') + + +class TestMozbuildSandbox(unittest.TestCase): + def sandbox(self, data_path=None): + config = None + + if data_path is not None: + config = MockConfig(mozpath.join(test_data_path, data_path)) + else: + config = MockConfig() + + return MozbuildSandbox(Context(VARIABLES, config)) + + def test_default_state(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path('moz.build')) + config = sandbox._context.config + + self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir) + self.assertEqual(sandbox['TOPOBJDIR'], config.topobjdir) + self.assertEqual(sandbox['RELATIVEDIR'], '') + self.assertEqual(sandbox['SRCDIR'], config.topsrcdir) + self.assertEqual(sandbox['OBJDIR'], config.topobjdir) + + def test_symbol_presence(self): + # Ensure no discrepancies between the master symbol table and what's in + # the sandbox. + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path('moz.build')) + + all_symbols = set() + all_symbols |= set(FUNCTIONS.keys()) + all_symbols |= set(SPECIAL_VARIABLES.keys()) + + for symbol in all_symbols: + self.assertIsNotNone(sandbox[symbol]) + + def test_path_calculation(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path('foo/bar/moz.build')) + config = sandbox._context.config + + self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir) + self.assertEqual(sandbox['TOPOBJDIR'], config.topobjdir) + self.assertEqual(sandbox['RELATIVEDIR'], 'foo/bar') + self.assertEqual(sandbox['SRCDIR'], + mozpath.join(config.topsrcdir, 'foo/bar')) + self.assertEqual(sandbox['OBJDIR'], + mozpath.join(config.topobjdir, 'foo/bar')) + + def test_config_access(self): + sandbox = self.sandbox() + config = sandbox._context.config + + self.assertEqual(sandbox['CONFIG']['MOZ_TRUE'], '1') + self.assertEqual(sandbox['CONFIG']['MOZ_FOO'], config.substs['MOZ_FOO']) + + # Access to an undefined substitution should return None. + self.assertNotIn('MISSING', sandbox['CONFIG']) + self.assertIsNone(sandbox['CONFIG']['MISSING']) + + # Should shouldn't be allowed to assign to the config. + with self.assertRaises(Exception): + sandbox['CONFIG']['FOO'] = '' + + def test_special_variables(self): + sandbox = self.sandbox() + + for k in SPECIAL_VARIABLES: + with self.assertRaises(KeyError): + sandbox[k] = 0 + def test_exec_source_reassign_exported(self): config = MockConfig() exports = {'DIST_SUBDIR': 'browser'} - sandbox = MozbuildSandbox(config, '', metadata={'exports': exports}) + sandbox = MozbuildSandbox(Context(VARIABLES, config), + metadata={'exports': exports}) self.assertEqual(sandbox['DIST_SUBDIR'], 'browser') - sandbox.exec_source('DIST_SUBDIR = "foo"', 'foo.py') + sandbox.exec_source('DIST_SUBDIR = "foo"') with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('DIST_SUBDIR = "bar"', 'foo.py') + sandbox.exec_source('DIST_SUBDIR = "bar"') self.assertEqual(sandbox['DIST_SUBDIR'], 'foo') e = se.exception @@ -205,7 +215,7 @@ def test_exec_source_reassign_exported(self): def test_add_tier_dir_regular_str(self): sandbox = self.sandbox() - sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py') + sandbox.exec_source('add_tier_dir("t1", "foo")') self.assertEqual(sandbox['TIERS']['t1'], {'regular': ['foo'], 'external': []}) @@ -213,7 +223,7 @@ def test_add_tier_dir_regular_str(self): def test_add_tier_dir_regular_list(self): sandbox = self.sandbox() - sandbox.exec_source('add_tier_dir("t1", ["foo", "bar"])', 'foo.py') + sandbox.exec_source('add_tier_dir("t1", ["foo", "bar"])') self.assertEqual(sandbox['TIERS']['t1'], {'regular': ['foo', 'bar'], 'external': []}) @@ -221,7 +231,7 @@ def test_add_tier_dir_regular_list(self): def test_add_tier_dir_external(self): sandbox = self.sandbox() - sandbox.exec_source('add_tier_dir("t1", "foo", external=True)', 'foo.py') + sandbox.exec_source('add_tier_dir("t1", "foo", external=True)') self.assertEqual(sandbox['TIERS']['t1'], {'regular': [], 'external': ['foo']}) @@ -237,17 +247,17 @@ def test_tier_order(self): add_tier_dir('t1', 'bat') ''' - sandbox.exec_source(source, 'foo.py') + sandbox.exec_source(source) self.assertEqual([k for k in sandbox['TIERS'].keys()], ['t1', 't2', 't3']) def test_tier_multiple_registration(self): sandbox = self.sandbox() - sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py') + sandbox.exec_source('add_tier_dir("t1", "foo")') with self.assertRaises(SandboxExecutionError): - sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py') + sandbox.exec_source('add_tier_dir("t1", "foo")') def test_include_basic(self): sandbox = self.sandbox(data_path='include-basic') @@ -255,9 +265,9 @@ def test_include_basic(self): sandbox.exec_file('moz.build') self.assertEqual(sandbox['DIRS'], ['foo', 'bar']) - self.assertEqual(sandbox.main_path, - mozpath.join(sandbox['TOPSRCDIR'], 'moz.build')) - self.assertEqual(len(sandbox.all_paths), 2) + self.assertEqual(sandbox._context.main_path, + sandbox.normalize_path('moz.build')) + self.assertEqual(len(sandbox._context.all_paths), 2) def test_include_outside_topsrcdir(self): sandbox = self.sandbox(data_path='include-outside-topsrcdir') @@ -282,7 +292,7 @@ def test_include_error_stack(self): self.assertEqual(args[1], 'set_unknown') self.assertEqual(args[2], 'ILLEGAL') - expected_stack = [mozpath.join(sandbox.config.topsrcdir, p) for p in [ + expected_stack = [mozpath.join(sandbox._context.config.topsrcdir, p) for p in [ 'moz.build', 'included-1.build', 'included-2.build']] self.assertEqual(e.file_stack, expected_stack) @@ -318,7 +328,7 @@ def test_error(self): sandbox = self.sandbox() with self.assertRaises(SandboxCalledError) as sce: - sandbox.exec_source('error("This is an error.")', 'test.py') + sandbox.exec_source('error("This is an error.")') e = sce.exception self.assertEqual(e.message, 'This is an error.') @@ -326,8 +336,7 @@ def test_error(self): def test_substitute_config_files(self): sandbox = self.sandbox() - sandbox.exec_source('CONFIGURE_SUBST_FILES += ["bar", "foo"]', - 'test.py') + sandbox.exec_source('CONFIGURE_SUBST_FILES += ["bar", "foo"]') self.assertEqual(sandbox['CONFIGURE_SUBST_FILES'], ['bar', 'foo']) def test_invalid_utf8_substs(self): @@ -336,7 +345,7 @@ def test_invalid_utf8_substs(self): # This is really mbcs. It's a bunch of invalid UTF-8. config = MockConfig(extra_substs={'BAD_UTF8': b'\x83\x81\x83\x82\x3A'}) - sandbox = MozbuildSandbox(config, '/foo/moz.build') + sandbox = MozbuildSandbox(Context(VARIABLES, config)) self.assertEqual(sandbox['CONFIG']['BAD_UTF8'], u'\ufffd\ufffd\ufffd\ufffd:') @@ -345,7 +354,7 @@ def test_invalid_exports_set_base(self): sandbox = self.sandbox() with self.assertRaises(SandboxExecutionError) as se: - sandbox.exec_source('EXPORTS = "foo.h"', 'foo.py') + sandbox.exec_source('EXPORTS = "foo.h"') self.assertEqual(se.exception.exc_type, ValueError) diff --git a/python/mozbuild/mozbuild/test/frontend/test_sandbox_symbols.py b/python/mozbuild/mozbuild/test/frontend/test_sandbox_symbols.py deleted file mode 100644 index b324f0d2560a0..0000000000000 --- a/python/mozbuild/mozbuild/test/frontend/test_sandbox_symbols.py +++ /dev/null @@ -1,50 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import unittest - -from mozunit import main - -from mozbuild.frontend.sandbox_symbols import ( - FUNCTIONS, - SPECIAL_VARIABLES, - VARIABLES, -) - - -class TestSymbols(unittest.TestCase): - def _verify_doc(self, doc): - # Documentation should be of the format: - # """SUMMARY LINE - # - # EXTRA PARAGRAPHS - # """ - - self.assertNotIn('\r', doc) - - lines = doc.split('\n') - - # No trailing whitespace. - for line in lines[0:-1]: - self.assertEqual(line, line.rstrip()) - - self.assertGreater(len(lines), 0) - self.assertGreater(len(lines[0].strip()), 0) - - # Last line should be empty. - self.assertEqual(lines[-1].strip(), '') - - def test_documentation_formatting(self): - for typ, inp, doc, tier in VARIABLES.values(): - self._verify_doc(doc) - - for attr, args, doc in FUNCTIONS.values(): - self._verify_doc(doc) - - for typ, doc in SPECIAL_VARIABLES.values(): - self._verify_doc(doc) - - -if __name__ == '__main__': - main() diff --git a/tools/docs/mach_commands.py b/tools/docs/mach_commands.py index 7948f2490de62..060608450402d 100644 --- a/tools/docs/mach_commands.py +++ b/tools/docs/mach_commands.py @@ -40,19 +40,19 @@ def build_docs(self, format=None, outdir=None): # We don't care about GYP projects, so don't process them. This makes # scanning faster and may even prevent an exception. - def remove_gyp_dirs(sandbox): - sandbox['GYP_DIRS'][:] = [] + def remove_gyp_dirs(context): + context['GYP_DIRS'][:] = [] reader = BuildReader(self.config_environment, sandbox_post_eval_cb=remove_gyp_dirs) - for sandbox in reader.walk_topsrcdir(): - for dest_dir, source_dir in sandbox['SPHINX_TREES'].items(): - manager.add_tree(os.path.join(sandbox['RELATIVEDIR'], + for context in reader.walk_topsrcdir(): + for dest_dir, source_dir in context['SPHINX_TREES'].items(): + manager.add_tree(os.path.join(context.relsrcdir, source_dir), dest_dir) - for entry in sandbox['SPHINX_PYTHON_PACKAGE_DIRS']: - manager.add_python_package_dir(os.path.join(sandbox['RELATIVEDIR'], + for entry in context['SPHINX_PYTHON_PACKAGE_DIRS']: + manager.add_python_package_dir(os.path.join(context.relsrcdir, entry)) return manager.generate_docs(format)