def _write_global_derived(self):
from Codegen import GlobalGenRoots
things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES]
things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES)
result = (set(), set(), set())
for what, filename in things:
stem = mozpath.splitext(filename)[0]
root = getattr(GlobalGenRoots, stem)(self._config)
if what == 'declare':
code = root.declare()
output_root = self._exported_header_dir
elif what == 'define':
code = root.define()
output_root = self._codegen_dir
else:
raise Exception('Unknown global gen type: %s' % what)
output_path = mozpath.join(output_root, filename)
self._maybe_write_file(output_path, code, result)
return result
def _handle_manifest_entry(self, entry, jars):
jarpath = None
if isinstance(entry, ManifestEntryWithRelPath) and \
urlparse(entry.relpath).scheme == 'jar':
jarpath, entry = self._unjarize(entry, entry.relpath)
elif isinstance(entry, ManifestResource) and \
urlparse(entry.target).scheme == 'jar':
jarpath, entry = self._unjarize(entry, entry.target)
if jarpath:
# Don't defer unpacking the jar file. If we already saw
# it, take (and remove) it from the registry. If we
# haven't, try to find it now.
if self.files.contains(jarpath):
jar = self.files[jarpath]
self.files.remove(jarpath)
else:
jar = [f for p, f in FileFinder.find(self, jarpath)]
assert len(jar) == 1
jar = jar[0]
if not jarpath in jars:
base = mozpath.splitext(jarpath)[0]
for j in self._open_jar(jarpath, jar):
self.files.add(mozpath.join(base,
j.filename),
DeflatedFile(j))
jars.add(jarpath)
self.kind = 'jar'
return entry
def _binding_info(self, p):
"""Compute binding metadata for an input path.
Returns a tuple of:
(stem, binding_stem, is_event, output_files)
output_files is itself a tuple. The first two items are the binding
header and C++ paths, respectively. The 2nd pair are the event header
and C++ paths or None if this isn't an event binding.
"""
basename = mozpath.basename(p)
stem = mozpath.splitext(basename)[0]
binding_stem = '%sBinding' % stem
if stem in self._exported_stems:
header_dir = self._exported_header_dir
else:
header_dir = self._codegen_dir
is_event = stem in self._generated_events_stems
files = (
mozpath.join(header_dir, '%s.h' % binding_stem),
mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem),
mozpath.join(header_dir, '%s.h' % stem) if is_event else None,
mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None,
)
return stem, binding_stem, is_event, header_dir, files
def _unjarize(self, entry, relpath):
'''
Transform a manifest entry pointing to chrome data in a jar in one
pointing to the corresponding unpacked path. Return the jar path and
the new entry.
'''
base = entry.base
jar, relpath = urlparse(relpath).path.split('!', 1)
entry = entry.rebase(mozpath.join(base, 'jar:%s!' % jar)) \
.move(mozpath.join(base, mozpath.splitext(jar)[0])) \
.rebase(base)
return mozpath.join(base, jar), entry
def _preprocess(self, backend_file, input_file, destdir=None):
# .css files use '%' as the preprocessor marker, which must be scaped as
# '%%' in the Tupfile.
marker = '%%' if input_file.endswith('.css') else '#'
cmd = self._py_action('preprocessor')
cmd.extend([shell_quote(d) for d in backend_file.defines])
cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker])
base_input = mozpath.basename(input_file)
if base_input.endswith('.in'):
base_input = mozpath.splitext(base_input)[0]
output = mozpath.join(destdir, base_input) if destdir else base_input
backend_file.rule(
inputs=[input_file],
display='Preprocess %o',
cmd=cmd,
outputs=[output],
)
开发者ID:luke-chang,项目名称:gecko-1,代码行数:20,代码来源:tup.py
示例10: register_idl
def register_idl(self, source, module, allow_existing=False):
"""Registers an IDL file with this instance.
The IDL file will be built, installed, etc.
"""
basename = mozpath.basename(source)
root = mozpath.splitext(basename)[0]
entry = {
'source': source,
'module': module,
'basename': basename,
'root': root,
}
if not allow_existing and entry['basename'] in self.idls:
raise Exception('IDL already registered: %' % entry['basename'])
self.idls[entry['basename']] = entry
self.modules.setdefault(entry['module'], set()).add(entry['root'])
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
non_unified_sources, action_overrides):
flat_list, targets, data = gyp_result
no_chromium = gyp_dir_attrs.no_chromium
no_unified = gyp_dir_attrs.no_unified
# Process all targets from the given gyp files and its dependencies.
# The path given to AllTargets needs to use os.sep, while the frontend code
# gives us paths normalized with forward slash separator.
for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
# Each target is given its own objdir. The base of that objdir
# is derived from the relative path from the root gyp file path
# to the current build_file, placed under the given output
# directory. Since several targets can be in a given build_file,
# separate them in subdirectories using the build_file basename
# and the target_name.
reldir = mozpath.relpath(mozpath.dirname(build_file),
mozpath.dirname(path))
subdir = '%s_%s' % (
mozpath.splitext(mozpath.basename(build_file))[0],
target_name,
)
# Emit a context for each target.
context = GypContext(config, mozpath.relpath(
mozpath.join(output, reldir, subdir), config.topobjdir))
context.add_source(mozpath.abspath(build_file))
# The list of included files returned by gyp are relative to build_file
for f in data[build_file]['included_files']:
context.add_source(mozpath.abspath(mozpath.join(
mozpath.dirname(build_file), f)))
spec = targets[target]
# Derive which gyp configuration to use based on MOZ_DEBUG.
c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
if c not in spec['configurations']:
raise RuntimeError('Missing %s gyp configuration for target %s '
'in %s' % (c, target_name, build_file))
target_conf = spec['configurations'][c]
if 'actions' in spec:
handle_actions(spec['actions'], context, action_overrides)
if 'copies' in spec:
handle_copies(spec['copies'], context)
use_libs = []
libs = []
def add_deps(s):
for t in s.get('dependencies', []) + s.get('dependencies_original', []):
ty = targets[t]['type']
if ty in ('static_library', 'shared_library'):
use_libs.append(targets[t]['target_name'])
# Manually expand out transitive dependencies--
# gyp won't do this for static libs or none targets.
if ty in ('static_library', 'none'):
add_deps(targets[t])
libs.extend(spec.get('libraries', []))
#XXX: this sucks, but webrtc breaks with this right now because
# it builds a library called 'gtest' and we just get lucky
# that it isn't in USE_LIBS by that name anywhere.
if no_chromium:
add_deps(spec)
os_libs = []
for l in libs:
if l.startswith('-'):
os_libs.append(l)
elif l.endswith('.lib'):
os_libs.append(l[:-4])
elif l:
# For library names passed in from moz.build.
use_libs.append(os.path.basename(l))
if spec['type'] == 'none':
if not ('actions' in spec or 'copies' in spec):
continue
elif spec['type'] in ('static_library', 'shared_library', 'executable'):
# Remove leading 'lib' from the target_name if any, and use as
# library name.
name = spec['target_name']
if spec['type'] in ('static_library', 'shared_library'):
if name.startswith('lib'):
name = name[3:]
# The context expects an unicode string.
context['LIBRARY_NAME'] = name.decode('utf-8')
else:
context['PROGRAM'] = name.decode('utf-8')
if spec['type'] == 'shared_library':
context['FORCE_SHARED_LIB'] = True
elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1':
# PSM links a NSS static library, but our folded libnss
# doesn't actually export everything that all of the
# objects within would need, so that one library
# should be built as a real static library.
context['NO_EXPAND_LIBS'] = True
if use_libs:
context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower())
if os_libs:
#.........这里部分代码省略.........
def _process_sources(self, context, passthru):
for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
for src in (context[symbol] or []):
if not os.path.exists(mozpath.join(context.srcdir, src)):
raise SandboxValidationError('File listed in %s does not '
'exist: \'%s\'' % (symbol, src), context)
no_pgo = context.get('NO_PGO')
sources = context.get('SOURCES', [])
no_pgo_sources = [f for f in sources if sources[f].no_pgo]
if no_pgo:
if no_pgo_sources:
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
'cannot be set at the same time', context)
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
if no_pgo_sources:
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
# A map from "canonical suffixes" for a particular source file
# language to the range of suffixes associated with that language.
#
# We deliberately don't list the canonical suffix in the suffix list
# in the definition; we'll add it in programmatically after defining
# things.
suffix_map = {
'.s': set(['.asm']),
'.c': set(),
'.m': set(),
'.mm': set(),
'.cpp': set(['.cc', '.cxx']),
'.S': set(),
}
# The inverse of the above, mapping suffixes to their canonical suffix.
canonicalized_suffix_map = {}
for suffix, alternatives in suffix_map.iteritems():
alternatives.add(suffix)
for a in alternatives:
canonicalized_suffix_map[a] = suffix
def canonical_suffix_for_file(f):
return canonicalized_suffix_map[mozpath.splitext(f)[1]]
# A map from moz.build variables to the canonical suffixes of file
# kinds that can be listed therein.
all_suffixes = list(suffix_map.keys())
varmap = dict(
SOURCES=(Sources, all_suffixes),
HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
GENERATED_SOURCES=(GeneratedSources, all_suffixes),
)
for variable, (klass, suffixes) in varmap.items():
allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])
# First ensure that we haven't been given filetypes that we don't
# recognize.
for f in context[variable]:
ext = mozpath.splitext(f)[1]
if ext not in allowed_suffixes:
raise SandboxValidationError(
'%s has an unknown file type.' % f, context)
if variable.startswith('GENERATED_'):
l = passthru.variables.setdefault('GARBAGE', [])
l.append(f)
# Now sort the files to let groupby work.
sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
arglist = [context, list(files), canonical_suffix]
if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
arglist.append(context['FILES_PER_UNIFIED_FILE'])
yield klass(*arglist)
sources_with_flags = [f for f in sources if sources[f].flags]
for f in sources_with_flags:
ext = mozpath.splitext(f)[1]
yield PerSourceFlag(context, f, sources[f].flags)
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
sandbox_vars, mozilla_flags):
# Translates a json gn config into attributes that can be used to write out
# moz.build files for this configuration.
# Much of this code is based on similar functionality in `gyp_reader.py`.
mozbuild_attrs = {'mozbuild_args': gn_config.get('mozbuild_args', None),
'dirs': {}}
targets = gn_config["targets"]
project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir)
def target_info(fullname):
path, name = target_fullname.split(':')
# Stripping '//' gives us a path relative to the project root,
# adding a suffix avoids name collisions with libraries already
# in the tree (like "webrtc").
return path.lstrip('//'), name + '_gn'
# Process all targets from the given gn project and its dependencies.
for target_fullname, spec in targets.iteritems():
target_path, target_name = target_info(target_fullname)
context_attrs = {}
# Remove leading 'lib' from the target_name if any, and use as
# library name.
name = target_name
if spec['type'] in ('static_library', 'shared_library', 'source_set'):
if name.startswith('lib'):
name = name[3:]
context_attrs['LIBRARY_NAME'] = name.decode('utf-8')
else:
raise Exception('The following GN target type is not currently '
'consumed by moz.build: "%s". It may need to be '
'added, or you may need to re-run the '
'`GnConfigGen` step.' % spec['type'])
if spec['type'] == 'shared_library':
context_attrs['FORCE_SHARED_LIB'] = True
sources = []
unified_sources = []
extensions = set()
use_defines_in_asflags = False
for f in spec.get('sources', []):
f = f.lstrip("//")
ext = mozpath.splitext(f)[-1]
extensions.add(ext)
src = '%s/%s' % (project_relsrcdir, f)
if ext == '.h':
continue
elif ext == '.def':
context_attrs['SYMBOLS_FILE'] = src
elif ext != '.S' and src not in non_unified_sources:
unified_sources.append('/%s' % src)
else:
sources.append('/%s' % src)
# The Mozilla build system doesn't use DEFINES for building
# ASFILES.
if ext == '.s':
use_defines_in_asflags = True
context_attrs['SOURCES'] = sources
context_attrs['UNIFIED_SOURCES'] = unified_sources
context_attrs['DEFINES'] = {}
for define in spec.get('defines', []):
if '=' in define:
name, value = define.split('=', 1)
context_attrs['DEFINES'][name] = value
else:
context_attrs['DEFINES'][define] = True
context_attrs['LOCAL_INCLUDES'] = []
for include in spec.get('include_dirs', []):
# GN will have resolved all these paths relative to the root of
# the project indicated by "//".
if include.startswith('//'):
include = include[2:]
# moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
if include.startswith('/'):
resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
else:
resolved = mozpath.abspath(mozpath.join(srcdir, include))
if not os.path.exists(resolved):
# GN files may refer to include dirs that are outside of the
# tree or we simply didn't vendor. Print a warning in this case.
if not resolved.endswith('gn-output/gen'):
print("Included path: '%s' does not exist, dropping include from GN "
"configuration." % resolved, file=sys.stderr)
continue
if not include.startswith('/'):
include = '/%s/%s' % (project_relsrcdir, include)
context_attrs['LOCAL_INCLUDES'] += [include]
context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
#.........这里部分代码省略.........
#.........这里部分代码省略.........
OS_LIBS="OS_LIBS",
SDK_LIBRARY="SDK_LIBRARY",
)
for mak, moz in varmap.items():
if sandbox[moz]:
passthru.variables[mak] = sandbox[moz]
# NO_VISIBILITY_FLAGS is slightly different
if sandbox["NO_VISIBILITY_FLAGS"]:
passthru.variables["VISIBILITY_FLAGS"] = ""
varmap = dict(
SOURCES={
".s": "ASFILES",
".asm": "ASFILES",
".c": "CSRCS",
".m": "CMSRCS",
".mm": "CMMSRCS",
".cc": "CPPSRCS",
".cpp": "CPPSRCS",
".S": "SSRCS",
},
HOST_SOURCES={".c": "HOST_CSRCS", ".mm": "HOST_CMMSRCS", ".cc": "HOST_CPPSRCS", ".cpp": "HOST_CPPSRCS"},
UNIFIED_SOURCES={
".c": "UNIFIED_CSRCS",
".mm": "UNIFIED_CMMSRCS",
".cc": "UNIFIED_CPPSRCS",
".cpp": "UNIFIED_CPPSRCS",
},
)
varmap.update(dict(("GENERATED_%s" % k, v) for k, v in varmap.items() if k in ("SOURCES", "UNIFIED_SOURCES")))
for variable, mapping in varmap.items():
for f in sandbox[variable]:
ext = mozpath.splitext(f)[1]
if ext not in mapping:
raise SandboxValidationError("%s has an unknown file type in %s" % (f, sandbox["RELATIVEDIR"]))
l = passthru.variables.setdefault(mapping[ext], [])
l.append(f)
if variable.startswith("GENERATED_"):
l = passthru.variables.setdefault("GARBAGE", [])
l.append(f)
no_pgo = sandbox.get("NO_PGO")
sources = sandbox.get("SOURCES", [])
no_pgo_sources = [f for f in sources if sources[f].no_pgo]
if no_pgo:
if no_pgo_sources:
raise SandboxValidationError("NO_PGO and SOURCES[...].no_pgo cannot be set at the same time")
passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
if no_pgo_sources:
passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources
exports = sandbox.get("EXPORTS")
if exports:
yield Exports(sandbox, exports, dist_install=not sandbox.get("NO_DIST_INSTALL", False))
defines = sandbox.get("DEFINES")
if defines:
yield Defines(sandbox, defines)
program = sandbox.get("PROGRAM")
if program:
yield Program(sandbox, program, sandbox["CONFIG"]["BIN_SUFFIX"])
program = sandbox.get("HOST_PROGRAM")
if program:
def read_from_gyp(config, path, output, vars, non_unified_sources=set()):
"""Read a gyp configuration and emits GypContexts for the backend to
process.
config is a ConfigEnvironment, path is the path to a root gyp configuration
file, output is the base path under which the objdir for the various gyp
dependencies will be, and vars a dict of variables to pass to the gyp
processor.
"""
time_start = time.time()
# gyp expects plain str instead of unicode. The frontend code gives us
# unicode strings, so convert them.
path = encode(path)
str_vars = dict((name, encode(value)) for name, value in vars.items())
params = {b"parallel": False, b"generator_flags": {}, b"build_files": [path]}
# Files that gyp_chromium always includes
includes = [encode(mozpath.join(script_dir, "common.gypi"))]
finder = FileFinder(chrome_src, find_executables=False)
includes.extend(encode(mozpath.join(chrome_src, name)) for name, _ in finder.find("*/supplement.gypi"))
# Read the given gyp file and its dependencies.
generator, flat_list, targets, data = gyp.Load(
[path],
format=b"mozbuild",
default_variables=str_vars,
includes=includes,
depth=encode(mozpath.dirname(path)),
params=params,
)
# Process all targets from the given gyp files and its dependencies.
# The path given to AllTargets needs to use os.sep, while the frontend code
# gives us paths normalized with forward slash separator.
for target in gyp.common.AllTargets(flat_list, targets, path.replace(b"/", os.sep)):
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
# Each target is given its own objdir. The base of that objdir
# is derived from the relative path from the root gyp file path
# to the current build_file, placed under the given output
# directory. Since several targets can be in a given build_file,
# separate them in subdirectories using the build_file basename
# and the target_name.
reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path))
subdir = "%s_%s" % (mozpath.splitext(mozpath.basename(build_file))[0], target_name)
# Emit a context for each target.
context = GypContext(config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir))
context.add_source(mozpath.abspath(build_file))
# The list of included files returned by gyp are relative to build_file
for f in data[build_file]["included_files"]:
context.add_source(mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))
spec = targets[target]
# Derive which gyp configuration to use based on MOZ_DEBUG.
c = "Debug" if config.substs["MOZ_DEBUG"] else "Release"
if c not in spec["configurations"]:
raise RuntimeError("Missing %s gyp configuration for target %s " "in %s" % (c, target_name, build_file))
target_conf = spec["configurations"][c]
if spec["type"] == "none":
continue
elif spec["type"] == "static_library":
# Remove leading 'lib' from the target_name if any, and use as
# library name.
name = spec["target_name"]
if name.startswith("lib"):
name = name[3:]
# The context expects an unicode string.
context["LIBRARY_NAME"] = name.decode("utf-8")
# gyp files contain headers and asm sources in sources lists.
sources = []
unified_sources = []
extensions = set()
for f in spec.get("sources", []):
ext = mozpath.splitext(f)[-1]
extensions.add(ext)
s = SourcePath(context, f)
if ext == ".h":
continue
if ext != ".S" and s not in non_unified_sources:
unified_sources.append(s)
else:
sources.append(s)
# The context expects alphabetical order when adding sources
context["SOURCES"] = alphabetical_sorted(sources)
context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources)
for define in target_conf.get("defines", []):
if "=" in define:
name, value = define.split("=", 1)
context["DEFINES"][name] = value
else:
context["DEFINES"][define] = True
for include in target_conf.get("include_dirs", []):
#.........这里部分代码省略.........
def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
"""Read a gyp configuration and emits GypContexts for the backend to
process.
config is a ConfigEnvironment, path is the path to a root gyp configuration
file, output is the base path under which the objdir for the various gyp
dependencies will be, and vars a dict of variables to pass to the gyp
processor.
"""
# gyp expects plain str instead of unicode. The frontend code gives us
# unicode strings, so convert them.
path = encode(path)
str_vars = dict((name, encode(value)) for name, value in vars.items())
params = {
b'parallel': False,
b'generator_flags': {},
b'build_files': [path],
}
# Files that gyp_chromium always includes
includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
finder = FileFinder(chrome_src, find_executables=False)
includes.extend(encode(mozpath.join(chrome_src, name))
for name, _ in finder.find('*/supplement.gypi'))
# Read the given gyp file and its dependencies.
generator, flat_list, targets, data = \
gyp.Load([path], format=b'mozbuild',
default_variables=str_vars,
includes=includes,
depth=encode(mozpath.dirname(path)),
params=params)
# Process all targets from the given gyp files and its dependencies.
# The path given to AllTargets needs to use os.sep, while the frontend code
# gives us paths normalized with forward slash separator.
for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
# Each target is given its own objdir. The base of that objdir
# is derived from the relative path from the root gyp file path
# to the current build_file, placed under the given output
# directory. Since several targets can be in a given build_file,
# separate them in subdirectories using the build_file basename
# and the target_name.
reldir = mozpath.relpath(mozpath.dirname(build_file),
mozpath.dirname(path))
subdir = '%s_%s' % (
mozpath.splitext(mozpath.basename(build_file))[0],
target_name,
)
# Emit a context for each target.
context = GypContext(config, mozpath.relpath(
mozpath.join(output, reldir, subdir), config.topobjdir))
context.add_source(mozpath.abspath(build_file))
# The list of included files returned by gyp are relative to build_file
for f in data[build_file]['included_files']:
context.add_source(mozpath.abspath(mozpath.join(
mozpath.dirname(build_file), f)))
spec = targets[target]
# Derive which gyp configuration to use based on MOZ_DEBUG.
c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
if c not in spec['configurations']:
raise RuntimeError('Missing %s gyp configuration for target %s '
'in %s' % (c, target_name, build_file))
target_conf = spec['configurations'][c]
if spec['type'] == 'none':
continue
elif spec['type'] == 'static_library':
# Remove leading 'lib' from the target_name if any, and use as
# library name.
name = spec['target_name']
if name.startswith('lib'):
name = name[3:]
# The context expects an unicode string.
context['LIBRARY_NAME'] = name.decode('utf-8')
# gyp files contain headers and asm sources in sources lists.
sources = []
unified_sources = []
extensions = set()
for f in spec.get('sources', []):
ext = mozpath.splitext(f)[-1]
extensions.add(ext)
s = SourcePath(context, f)
if ext == '.h':
continue
if ext != '.S' and s not in non_unified_sources:
unified_sources.append(s)
else:
sources.append(s)
# The context expects alphabetical order when adding sources
context['SOURCES'] = alphabetical_sorted(sources)
context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)
#.........这里部分代码省略.........
#.........这里部分代码省略.........
sources = context.get('SOURCES', [])
no_pgo_sources = [f for f in sources if sources[f].no_pgo]
if no_pgo:
if no_pgo_sources:
raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
'cannot be set at the same time', context)
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
if no_pgo_sources:
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
# A map from "canonical suffixes" for a particular source file
# language to the range of suffixes associated with that language.
#
# We deliberately don't list the canonical suffix in the suffix list
# in the definition; we'll add it in programmatically after defining
# things.
suffix_map = {
'.s': set(['.asm']),
'.c': set(),
'.m': set(),
'.mm': set(),
'.cpp': set(['.cc', '.cxx']),
'.S': set(),
}
# The inverse of the above, mapping suffixes to their canonical suffix.
canonicalized_suffix_map = {}
for suffix, alternatives in suffix_map.iteritems():
alternatives.add(suffix)
for a in alternatives:
canonicalized_suffix_map[a] = suffix
def canonical_suffix_for_file(f):
return canonicalized_suffix_map[mozpath.splitext(f)[1]]
# A map from moz.build variables to the canonical suffixes of file
# kinds that can be listed therein.
all_suffixes = list(suffix_map.keys())
varmap = dict(
SOURCES=(Sources, all_suffixes),
HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
GENERATED_SOURCES=(GeneratedSources, all_suffixes),
)
for variable, (klass, suffixes) in varmap.items():
allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])
# First ensure that we haven't been given filetypes that we don't
# recognize.
for f in context[variable]:
ext = mozpath.splitext(f)[1]
if ext not in allowed_suffixes:
raise SandboxValidationError(
'%s has an unknown file type.' % f, context)
if variable.startswith('GENERATED_'):
l = passthru.variables.setdefault('GARBAGE', [])
l.append(f)
# Now sort the files to let groupby work.
sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
arglist = [context, list(files), canonical_suffix]
if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
arglist.append(context['FILES_PER_UNIFIED_FILE'])
yield klass(*arglist)
请发表评论