def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
generatedEventsWebIDLFiles):
"""
|config| Is the configuration object.
|outputprefix| is a prefix to use for the header guards and filename.
"""
depsname = ".deps/" + outputprefix + ".pp"
root = CGBindingRoot(config, outputprefix, webidlfile)
replaceFileIfChanged(outputprefix + ".h", root.declare())
replaceFileIfChanged(outputprefix + ".cpp", root.define())
if webidlfile in generatedEventsWebIDLFiles:
eventName = webidlfile[:-len(".webidl")]
generatedEvent = CGEventRoot(config, eventName)
replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())
mk = Makefile()
# NOTE: it's VERY important that we output dependencies for the FooBinding
# file here, not for the header or generated cpp file. These dependencies
# are used later to properly determine changedDeps and prevent rebuilding
# too much. See the comment explaining $(binding_dependency_trackers) in
# Makefile.in.
rule = mk.create_rule([outputprefix])
rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
rule.add_dependencies(iter_modules_in_path(topsrcdir))
with open(depsname, 'w') as f:
mk.dump(f)
def InvokeClWithDependencyGeneration(cmdline):
target = ""
# Figure out what the target is
for arg in cmdline:
if arg.startswith("-Fo"):
target = arg[3:]
break
if target == None:
print >>sys.stderr, "No target set" and sys.exit(1)
# Assume the source file is the last argument
source = cmdline[-1]
assert not source.startswith('-')
# The deps target lives here
depstarget = os.path.basename(target) + ".pp"
cmdline += ['-showIncludes']
cl = subprocess.Popen(cmdline, stdout=subprocess.PIPE)
mk = Makefile()
rule = mk.create_rule([target])
rule.add_dependencies([normcase(source)])
for line in cl.stdout:
# cl -showIncludes prefixes every header with "Note: including file:"
# and an indentation corresponding to the depth (which we don't need)
if line.startswith(CL_INCLUDES_PREFIX):
dep = line[len(CL_INCLUDES_PREFIX):].strip()
# We can't handle pathes with spaces properly in mddepend.pl, but
# we can assume that anything in a path with spaces is a system
# header and throw it away.
dep = normcase(dep)
if ' ' not in dep:
rule.add_dependencies([dep])
else:
sys.stdout.write(line) # Make sure we preserve the relevant output
# from cl
ret = cl.wait()
if ret != 0 or target == "":
sys.exit(ret)
depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
depstarget = os.path.join(depsdir, depstarget)
if not os.path.isdir(depsdir):
try:
os.makedirs(depsdir)
except OSError:
pass # This suppresses the error we get when the dir exists, at the
# cost of masking failure to create the directory. We'll just
# die on the next line though, so it's not that much of a loss.
with open(depstarget, "w") as f:
mk.dump(f)
开发者ID:yzhang90,项目名称:jsmop,代码行数:55,代码来源:cl.py
示例5: main
def main(argv):
parser = argparse.ArgumentParser('Generate a file from a Python script',
add_help=False)
parser.add_argument('python_script', metavar='python-script', type=str,
help='The Python script to run')
parser.add_argument('method_name', metavar='method-name', type=str,
help='The method of the script to invoke')
parser.add_argument('output_file', metavar='output-file', type=str,
help='The file to generate')
parser.add_argument('dep_file', metavar='dep-file', type=str,
help='File to write any additional make dependencies to')
parser.add_argument('additional_arguments', metavar='arg', nargs='*',
help="Additional arguments to the script's main() method")
args = parser.parse_args(argv)
script = args.python_script
# Permit the script to import modules from the same directory in which it
# resides. The justification for doing this is that if we were invoking
# the script as:
#
# python script arg1...
#
# then importing modules from the script's directory would come for free.
# Since we're invoking the script in a roundabout way, we provide this
# bit of convenience.
sys.path.append(os.path.dirname(script))
with open(script, 'r') as fh:
module = imp.load_module('script', fh, script,
('.py', 'r', imp.PY_SOURCE))
method = args.method_name
if not hasattr(module, method):
print('Error: script "{0}" is missing a {1} method'.format(script, method),
file=sys.stderr)
return 1
ret = 1
try:
with FileAvoidWrite(args.output_file) as output:
ret = module.__dict__[method](output, *args.additional_arguments)
# We treat sets as a statement of success. Everything else
# is an error (so scripts can conveniently |return 1| or
# similar).
if isinstance(ret, set) and ret:
mk = Makefile()
mk.create_rule([args.output_file]).add_dependencies(ret)
with FileAvoidWrite(args.dep_file) as dep_file:
mk.dump(dep_file)
# The script succeeded, so reset |ret| to indicate that.
ret = None
except IOError as e:
print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
traceback.print_exc()
return 1
return ret
def dump(self, fh, removal_guard=True):
rules = {}
for t, (depfile, deps) in self._targets.items():
if self._group == Grouping.BY_DEPFILE:
if depfile not in rules:
rules[depfile] = self.create_rule([depfile])
rules[depfile].add_dependencies(d if d not in self._targets else self._targets[d][0] for d in deps)
elif self._group == Grouping.ALL_TARGETS:
if 'all' not in rules:
rules['all'] = self.create_rule()
rules['all'].add_targets([t]) \
.add_dependencies(deps)
elif self._group == Grouping.NO:
self.create_rule([t]) \
.add_dependencies(deps)
Makefile.dump(self, fh, removal_guard)
def consume_finished(self):
mk = Makefile()
# Add the default rule at the very beginning.
mk.create_rule(['default'])
mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
# Add a few necessary variables inherited from configure
for var in (
'PYTHON',
'ACDEFINES',
'MOZ_BUILD_APP',
'MOZ_WIDGET_TOOLKIT',
):
mk.add_statement('%s = %s' % (var, self.environment.substs[var]))
# Add information for chrome manifest generation
manifest_targets = []
for target, entries in self._manifest_entries.iteritems():
manifest_targets.append(target)
target = '$(TOPOBJDIR)/%s' % target
mk.create_rule([target]).add_dependencies(
['content = %s' % ' '.join('"%s"' % e for e in entries)])
mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets))
# Add information for install manifests.
mk.add_statement('INSTALL_MANIFESTS = %s'
% ' '.join(self._install_manifests.keys()))
# Add dependencies we infered:
for target, deps in self._dependencies.iteritems():
mk.create_rule([target]).add_dependencies(
'$(TOPOBJDIR)/%s' % d for d in deps)
mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
for base, install_manifest in self._install_manifests.iteritems():
with self._write_file(
mozpath.join(self.environment.topobjdir, 'faster',
'install_%s' % base.replace('/', '_'))) as fh:
install_manifest.write(fileobj=fh)
with self._write_file(
mozpath.join(self.environment.topobjdir, 'faster',
'Makefile')) as fh:
mk.dump(fh, removal_guard=False)
def consume_finished(self):
mk = Makefile()
# Add the default rule at the very beginning.
mk.create_rule(["default"])
mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir)
mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir)
# Add a few necessary variables inherited from configure
for var in ("PYTHON", "ACDEFINES", "MOZ_CHROME_FILE_FORMAT"):
mk.add_statement("%s = %s" % (var, self.environment.substs[var]))
# Add all necessary information for jar manifest processing
jar_mn_targets = []
for path, (objdir, install_target, defines) in self._jar_manifests.iteritems():
rel_manifest = mozpath.relpath(path, self.environment.topsrcdir)
target = rel_manifest.replace("/", "-")
assert target not in jar_mn_targets
jar_mn_targets.append(target)
target = "jar-%s" % target
mk.create_rule([target]).add_dependencies([path])
if objdir != mozpath.join(self.environment.topobjdir, mozpath.dirname(rel_manifest)):
mk.create_rule([target]).add_dependencies(["objdir = %s" % objdir])
if install_target != "dist/bin":
mk.create_rule([target]).add_dependencies(["install_target = %s" % install_target])
if defines:
mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)])
mk.add_statement("JAR_MN_TARGETS = %s" % " ".join(jar_mn_targets))
# Add information for chrome manifest generation
manifest_targets = []
for target, entries in self._manifest_entries.iteritems():
manifest_targets.append(target)
target = "$(TOPOBJDIR)/%s" % target
mk.create_rule([target]).add_dependencies(["content = %s" % " ".join('"%s"' % e for e in entries)])
mk.add_statement("MANIFEST_TARGETS = %s" % " ".join(manifest_targets))
# Add information for preprocessed files.
preprocess_targets = []
for target, (srcdir, f, defines) in self._preprocess_files.iteritems():
# This matches what PP_TARGETS do in config/rules.
if target.endswith(".in"):
target = target[:-3]
# PP_TARGETS assumes this is true, but doesn't enforce it.
assert target not in self._preprocess_files
preprocess_targets.append(target)
target = "$(TOPOBJDIR)/%s" % target
mk.create_rule([target]).add_dependencies([mozpath.join(srcdir, f)])
if defines:
mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)])
mk.add_statement("PP_TARGETS = %s" % " ".join(preprocess_targets))
# Add information for install manifests.
mk.add_statement("INSTALL_MANIFESTS = %s" % " ".join(self._install_manifests.keys()))
mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")
for base, install_manifest in self._install_manifests.iteritems():
with self._write_file(
mozpath.join(self.environment.topobjdir, "faster", "install_%s" % base.replace("/", "_"))
) as fh:
install_manifest.write(fileobj=fh)
with self._write_file(mozpath.join(self.environment.topobjdir, "faster", "Makefile")) as fh:
mk.dump(fh, removal_guard=False)
def consume_finished(self):
mk = Makefile()
# Add the default rule at the very beginning.
mk.create_rule(['default'])
mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
mk.add_statement('BACKEND = %s' % self._backend_output_list_file)
if not self._has_xpidl:
mk.add_statement('NO_XPIDL = 1')
# Add a few necessary variables inherited from configure
for var in (
'PYTHON',
'ACDEFINES',
'MOZ_BUILD_APP',
'MOZ_WIDGET_TOOLKIT',
):
mk.add_statement('%s = %s' % (var, self.environment.substs[var]))
install_manifests_bases = self._install_manifests.keys()
# Add information for chrome manifest generation
manifest_targets = []
for target, entries in self._manifest_entries.iteritems():
manifest_targets.append(target)
install_target = mozpath.basedir(target, install_manifests_bases)
self._install_manifests[install_target].add_content(
''.join('%s\n' % e for e in sorted(entries)),
mozpath.relpath(target, install_target))
# Add information for install manifests.
mk.add_statement('INSTALL_MANIFESTS = %s'
% ' '.join(self._install_manifests.keys()))
# Add dependencies we infered:
for target, deps in self._dependencies.iteritems():
mk.create_rule([target]).add_dependencies(
'$(TOPOBJDIR)/%s' % d for d in deps)
# Add backend dependencies:
mk.create_rule([self._backend_output_list_file]).add_dependencies(
self.backend_input_files)
mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
for base, install_manifest in self._install_manifests.iteritems():
with self._write_file(
mozpath.join(self.environment.topobjdir, 'faster',
'install_%s' % base.replace('/', '_'))) as fh:
install_manifest.write(fileobj=fh)
with self._write_file(
mozpath.join(self.environment.topobjdir, 'faster',
'Makefile')) as fh:
mk.dump(fh, removal_guard=False)
if options.verbose:
print_command(sys.stderr, args)
try:
proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
except Exception, e:
print >>sys.stderr, 'error: Launching', args, ':', e
raise e
(stdout, stderr) = proc.communicate()
if proc.returncode and not options.verbose:
print_command(sys.stderr, args)
sys.stderr.write(stdout)
sys.stderr.flush()
if proc.returncode:
exit(proc.returncode)
if not options.depend:
return
ensureParentDir(options.depend)
mk = Makefile()
deps = [dep for dep in deps if os.path.isfile(dep) and dep != options.target
and os.path.abspath(dep) != os.path.abspath(options.depend)]
no_dynamic_lib = [dep for dep in deps if not isDynamicLib(dep)]
mk.create_rule([options.target]).add_dependencies(no_dynamic_lib)
if len(deps) != len(no_dynamic_lib):
mk.create_rule(['%s_order_only' % options.target]).add_dependencies(dep for dep in deps if isDynamicLib(dep))
with open(options.depend, 'w') as depfile:
mk.dump(depfile, removal_guard=True)
if __name__ == '__main__':
main()
def config_status(config):
# Sanitize config data to feed config.status
# Ideally, all the backend and frontend code would handle the booleans, but
# there are so many things involved, that it's easier to keep config.status
# untouched for now.
def sanitized_bools(v):
if v is True:
return '1'
if v is False:
return ''
return v
sanitized_config = {}
sanitized_config['substs'] = {
k: sanitized_bools(v) for k, v in config.iteritems()
if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
'ALL_CONFIGURE_PATHS')
}
sanitized_config['defines'] = {
k: sanitized_bools(v) for k, v in config['DEFINES'].iteritems()
}
sanitized_config['non_global_defines'] = config['non_global_defines']
sanitized_config['topsrcdir'] = config['TOPSRCDIR']
sanitized_config['topobjdir'] = config['TOPOBJDIR']
sanitized_config['mozconfig'] = config.get('MOZCONFIG')
# Create config.status. Eventually, we'll want to just do the work it does
# here, when we're able to skip configure tests/use cached results/not rely
# on autoconf.
print("Creating config.status", file=sys.stderr)
encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
with codecs.open('config.status', 'w', encoding) as fh:
fh.write(textwrap.dedent('''\
#!%(python)s
# coding=%(encoding)s
from __future__ import unicode_literals
from mozbuild.util import encode
encoding = '%(encoding)s'
''') % {'python': config['PYTHON'], 'encoding': encoding})
# A lot of the build backend code is currently expecting byte
# strings and breaks in subtle ways with unicode strings. (bug 1296508)
for k, v in sanitized_config.iteritems():
fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
"'non_global_defines', 'substs', 'mozconfig']")
if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
fh.write(textwrap.dedent('''
if __name__ == '__main__':
from mozbuild.util import patch_main
patch_main()
from mozbuild.config_status import config_status
args = dict([(name, globals()[name]) for name in __all__])
config_status(**args)
'''))
partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
partial_config.write_vars(sanitized_config)
# Write out a depfile so Make knows to re-run configure when relevant Python
# changes.
mk = Makefile()
rule = mk.create_rule()
rule.add_targets(["%s/config.status" % config['TOPOBJDIR']])
rule.add_dependencies(itertools.chain(config['ALL_CONFIGURE_PATHS'],
iter_modules_in_path(config['TOPOBJDIR'],
config['TOPSRCDIR'])))
with open('configure.d', 'w') as fh:
mk.dump(fh)
# Other things than us are going to run this file, so we need to give it
# executable permissions.
os.chmod('config.status', 0o755)
if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
os.environ[b'WRITE_MOZINFO'] = b'1'
from mozbuild.config_status import config_status
# Some values in sanitized_config also have more complex types, such as
# EnumString, which using when calling config_status would currently
# break the build, as well as making it inconsistent with re-running
# config.status. Fortunately, EnumString derives from unicode, so it's
# covered by converting unicode strings.
# A lot of the build backend code is currently expecting byte strings
# and breaks in subtle ways with unicode strings.
return config_status(args=[], **encode(sanitized_config, encoding))
return 0
def main(argv):
parser = argparse.ArgumentParser('Generate a file from a Python script',
add_help=False)
parser.add_argument('python_script', metavar='python-script', type=str,
help='The Python script to run')
parser.add_argument('method_name', metavar='method-name', type=str,
help='The method of the script to invoke')
parser.add_argument('output_file', metavar='output-file', type=str,
help='The file to generate')
parser.add_argument('dep_file', metavar='dep-file', type=str,
help='File to write any additional make dependencies to')
parser.add_argument('additional_arguments', metavar='arg',
nargs=argparse.REMAINDER,
help="Additional arguments to the script's main() method")
args = parser.parse_args(argv)
script = args.python_script
# Permit the script to import modules from the same directory in which it
# resides. The justification for doing this is that if we were invoking
# the script as:
#
# python script arg1...
#
# then importing modules from the script's directory would come for free.
# Since we're invoking the script in a roundabout way, we provide this
# bit of convenience.
sys.path.append(os.path.dirname(script))
with open(script, 'r') as fh:
module = imp.load_module('script', fh, script,
('.py', 'r', imp.PY_SOURCE))
method = args.method_name
if not hasattr(module, method):
print('Error: script "{0}" is missing a {1} method'.format(script, method),
file=sys.stderr)
return 1
ret = 1
try:
with FileAvoidWrite(args.output_file) as output:
ret = module.__dict__[method](output, *args.additional_arguments)
# The following values indicate a statement of success:
# - a set() (see below)
# - 0
# - False
# - None
#
# Everything else is an error (so scripts can conveniently |return
# 1| or similar). If a set is returned, the elements of the set
# indicate additional dependencies that will be listed in the deps
# file. Python module imports are automatically included as
# dependencies.
if isinstance(ret, set):
deps = ret
# The script succeeded, so reset |ret| to indicate that.
ret = None
else:
deps = set()
# Only write out the dependencies if the script was successful
if not ret:
# Add dependencies on any python modules that were imported by
# the script.
deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
buildconfig.topobjdir))
mk = Makefile()
mk.create_rule([args.output_file]).add_dependencies(deps)
with FileAvoidWrite(args.dep_file) as dep_file:
mk.dump(dep_file)
# Even when our file's contents haven't changed, we want to update
# the file's mtime so make knows this target isn't still older than
# whatever prerequisite caused it to be built this time around.
try:
os.utime(args.output_file, None)
except:
print('Error processing file "{0}"'.format(args.output_file),
file=sys.stderr)
traceback.print_exc()
except IOError as e:
print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
traceback.print_exc()
return 1
return ret
def main(argv):
parser = argparse.ArgumentParser("Generate a file from a Python script", add_help=False)
parser.add_argument("python_script", metavar="python-script", type=str, help="The Python script to run")
parser.add_argument("method_name", metavar="method-name", type=str, help="The method of the script to invoke")
parser.add_argument("output_file", metavar="output-file", type=str, help="The file to generate")
parser.add_argument(
"dep_file", metavar="dep-file", type=str, help="File to write any additional make dependencies to"
)
parser.add_argument(
"additional_arguments",
metavar="arg",
nargs=argparse.REMAINDER,
help="Additional arguments to the script's main() method",
)
args = parser.parse_args(argv)
script = args.python_script
# Permit the script to import modules from the same directory in which it
# resides. The justification for doing this is that if we were invoking
# the script as:
#
# python script arg1...
#
# then importing modules from the script's directory would come for free.
# Since we're invoking the script in a roundabout way, we provide this
# bit of convenience.
sys.path.append(os.path.dirname(script))
with open(script, "r") as fh:
module = imp.load_module("script", fh, script, (".py", "r", imp.PY_SOURCE))
method = args.method_name
if not hasattr(module, method):
print('Error: script "{0}" is missing a {1} method'.format(script, method), file=sys.stderr)
return 1
ret = 1
try:
with FileAvoidWrite(args.output_file) as output:
ret = module.__dict__[method](output, *args.additional_arguments)
# We treat sets as a statement of success. Everything else
# is an error (so scripts can conveniently |return 1| or
# similar).
if isinstance(ret, set) and ret:
ret |= set(iter_modules_in_path(buildconfig.topsrcdir, buildconfig.topobjdir))
mk = Makefile()
mk.create_rule([args.output_file]).add_dependencies(ret)
with FileAvoidWrite(args.dep_file) as dep_file:
mk.dump(dep_file)
# The script succeeded, so reset |ret| to indicate that.
ret = None
# Even when our file's contents haven't changed, we want to update
# the file's mtime so make knows this target isn't still older than
# whatever prerequisite caused it to be built this time around.
try:
os.utime(args.output_file, None)
except:
print('Error processing file "{0}"'.format(args.output_file), file=sys.stderr)
traceback.print_exc()
except IOError as e:
print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
traceback.print_exc()
return 1
return ret
请发表评论