Merge pull request #1 from google/master

Merge in head from google/anvil-build.
This commit is contained in:
Joshua Harrison 2014-12-02 22:48:27 -08:00
commit d0f6e5e2f6
3 changed files with 221 additions and 170 deletions

View File

@ -116,6 +116,8 @@ class BuildContext(object):
self.stop_on_error = stop_on_error
self.raise_on_error = raise_on_error
self.error_encountered = False
# Build the rule graph
self.rule_graph = graph.RuleGraph(self.project)
@ -154,9 +156,9 @@ class BuildContext(object):
d = self.execute_async(target_rule_names)
self.wait(d)
result = [None]
def _callback():
def _callback(*args, **kwargs):
result[0] = True
def _errback():
def _errback(*args, **kwargs):
result[0] = False
d.add_callback_fn(_callback)
d.add_errback_fn(_errback)
@ -188,126 +190,88 @@ class BuildContext(object):
# Calculate the sequence of rules to execute
rule_sequence = self.rule_graph.calculate_rule_sequence(target_rule_names)
any_failed = [False]
main_deferred = Deferred()
remaining_rules = rule_sequence[:]
in_flight_rules = []
pumping = [False]
def _issue_rule(rule):
def _issue_rule(rule, deferred=None):
"""Issues a single rule into the current execution context.
Updates the in_flight_rules list and pumps when the rule completes.
Args:
rule: Rule to issue.
deferred: Deferred to wait on before executing the rule.
"""
def _rule_callback(*args, **kwargs):
in_flight_rules.remove(rule)
_pump(previous_succeeded=True)
remaining_rules.remove(rule)
def _rule_errback(exception=None, *args, **kwargs):
in_flight_rules.remove(rule)
remaining_rules.remove(rule)
if self.stop_on_error:
self.error_encountered = True
# TODO(benvanik): log result/exception/etc?
if exception: # pragma: no cover
print exception
any_failed[0] = True
_pump(previous_succeeded=False)
in_flight_rules.append(rule)
rule_deferred = self._execute_rule(rule)
# All RuleContexts should be created by the time this method is called.
assert self.rule_contexts[rule.path]
rule_deferred = self.rule_contexts[rule.path].deferred
rule_deferred.add_callback_fn(_rule_callback)
rule_deferred.add_errback_fn(_rule_errback)
def _execute(*args, **kwargs):
self._execute_rule(rule)
def _on_failure(*args, **kwards):
self._execute_rule(rule)
if deferred:
deferred.add_callback_fn(_execute)
deferred.add_errback_fn(_on_failure)
else:
_execute()
return rule_deferred
def _pump(previous_succeeded=True):
"""Attempts to run another rule and signals the main_deferred if done.
def _chain_rule_execution(target_rules):
"""Given a list of target rules, build them and all dependencies.
This method builds the passed in target rules and all dependencies. It
first assembles a list of the dependencies to target rules orded as:
[dependencies -> target_rules]
It then traverses the list, issuing execute commands for all rules that
do not have dependencies within the list. For all rules that do have
dependencies within the list, a deferred is used to trigger the rule's
exeution once all dependencies have completed executing.
Args:
previous_succeeded: Whether the previous rule succeeded.
target_rules: A list of rules to be executed.
Returns:
A deferred that resolves once all target_rules have either executed
successfully or failed.
"""
# If we're already done, gracefully exit
if main_deferred.is_done():
return
issued_rules = []
all_deferreds = []
for rule in target_rules:
# Create the RuleContexts here so that failures can cascade and the
# deferred is accessible by any rules that depend on this one.
rule_ctx = rule.create_context(self)
self.rule_contexts[rule.path] = rule_ctx
# If we failed and we are supposed to stop, gracefully stop by
# killing all future rules
# This is better than terminating immediately, as it allows legit tasks
# to finish
if any_failed[0] and self.stop_on_error:
remaining_rules[:] = []
# TODO(benvanik): better error message
main_deferred.errback()
return
if pumping[0]:
return
pumping[0] = True
# Scan through all remaining rules - if any are unblocked, issue them
to_issue = []
for i in range(0, len(remaining_rules)):
next_rule = remaining_rules[i]
# Ignore if any dependency on any rule before it in the list
skip_rule = False
if i:
for old_rule in remaining_rules[:i]:
if self.rule_graph.has_dependency(next_rule.path, old_rule.path):
# Blocked on previous rule
skip_rule = True
break
if skip_rule:
continue
# Ignore if any dependency on an in-flight rule
for in_flight_rule in in_flight_rules:
if self.rule_graph.has_dependency(next_rule.path,
in_flight_rule.path):
# Blocked on a previous rule, so pass and wait for the next pump
skip_rule = True
break
if skip_rule:
continue
# If here then we found no conflicting rules, queue for running
to_issue.append(next_rule)
# Run all rules that we can
for rule in to_issue:
remaining_rules.remove(rule)
for rule in to_issue:
_issue_rule(rule)
if (not len(remaining_rules) and
not len(in_flight_rules) and
not main_deferred.is_done()):
assert not len(remaining_rules)
# Done!
# TODO(benvanik): better errbacks? some kind of BuildResults?
if not any_failed[0]:
# Only save the cache when we have succeeded
# This causes some stuff to be rebuilt in failure cases, but prevents
# a lot of weirdness when things are partially broken
self.cache.save()
main_deferred.callback()
# Make the execution of the current rule dependent on the execution
# of all rules it depends on.
dependent_deferreds = []
for executable_rule in issued_rules:
if self.rule_graph.has_dependency(rule.path, executable_rule.path):
executable_ctx = self.rule_contexts[executable_rule.path]
dependent_deferreds.append(executable_ctx.deferred)
if dependent_deferreds:
dependent_deferred = async.gather_deferreds(
dependent_deferreds, errback_if_any_fail=True)
all_deferreds.append(_issue_rule(rule, dependent_deferred))
else:
main_deferred.errback()
all_deferreds.append(_issue_rule(rule))
return async.gather_deferreds(all_deferreds, errback_if_any_fail=True)
pumping[0] = False
# Keep the queue pumping
if not len(in_flight_rules) and len(remaining_rules):
_pump()
# Kick off execution (once for each rule as a heuristic for filling the
# pipeline)
for rule in rule_sequence:
_pump()
return main_deferred
return _chain_rule_execution(rule_sequence)
def wait(self, deferreds):
"""Blocks waiting on a list of deferreds until they all complete.
@ -330,10 +294,10 @@ class BuildContext(object):
Returns:
A Deferred that will callback when the rule has completed executing.
"""
assert not self.rule_contexts.has_key(rule.path)
rule_ctx = rule.create_context(self)
self.rule_contexts[rule.path] = rule_ctx
if rule_ctx.check_predecessor_failures():
assert self.rule_contexts.has_key(rule.path)
rule_ctx = self.rule_contexts[rule.path]
if (rule_ctx.check_predecessor_failures() or
self.stop_on_error and self.error_encountered):
return rule_ctx.cascade_failure()
else:
rule_ctx.begin()
@ -647,7 +611,7 @@ class RuleContext(object):
"""Checks all dependencies for failure.
Returns:
True if any dependency has failed.
True if any dependency has failed or been interrupted.
"""
for dep in self.rule.get_dependent_paths():
if util.is_rule_path(dep):
@ -655,7 +619,7 @@ class RuleContext(object):
dep, requesting_module=self.rule.parent_module)
other_rule_ctx = self.build_context.rule_contexts.get(
other_rule.path, None)
if other_rule_ctx.status == Status.FAILED:
if (other_rule_ctx.status == Status.FAILED):
return True
return False
@ -695,6 +659,15 @@ class RuleContext(object):
Returns:
True if no inputs or outputs have changed.
"""
# If -f (force) was passed to the BuildContext, return False.
if self.build_context.force:
return False
# If the rule does not have any source or output files, then nothing can
# be cached. Return False.
if not self.src_paths and not self.all_output_files:
return False
# If any input changed...
if self.file_delta.any_changes():
return False

View File

@ -12,6 +12,7 @@ import os
import unittest2
from anvil import async
from anvil import cache
from anvil.context import *
from anvil.module import *
from anvil.rule import *
@ -152,24 +153,96 @@ class BuildContextTest(FixtureTestCase):
results = ctx.get_rule_results('m:b')
self.assertEqual(results[0], Status.FAILED)
print '*******************************************************'
project = Project(modules=[Module('m', rules=[
FailRule('a'),
SucceedRule('b', deps=[':a'])])])
SucceedRule('b', deps=[':a']),
SucceedRule('c'),
SucceedRule('d', deps=[':c']),
SucceedRule('e', deps=[':c']),
SucceedRule('f', deps=[':d', ':e'])])])
with BuildContext(self.build_env, project, stop_on_error=True) as ctx:
d = ctx.execute_async(['m:b'])
d = ctx.execute_async(['m:b', 'm:f'])
ctx.wait(d)
self.assertErrback(d)
results = ctx.get_rule_results('m:a')
self.assertEqual(results[0], Status.FAILED)
results = ctx.get_rule_results('m:b')
self.assertEqual(results[0], Status.FAILED)
# Because m:a failed and stop_on_error is true, even though m:c is a
# succed rule, m:d, m:e and m:f should all be FAILED as well.
results = ctx.get_rule_results('m:d')
self.assertEqual(results[0], Status.FAILED)
results = ctx.get_rule_results('m:e')
self.assertEqual(results[0], Status.FAILED)
results = ctx.get_rule_results('m:f')
self.assertEqual(results[0], Status.FAILED)
# TODO(benvanik): test stop_on_error
# TODO(benvanik): test raise_on_error
def testCaching(self):
# TODO(benvanik): test caching and force arg
pass
rule_was_cached = [False]
class OutputRule(Rule):
class _Context(RuleContext):
def begin(self):
super(OutputRule._Context, self).begin()
# Make sure an output path is defined so that caching is meaningful.
self._append_output_paths(['./output'])
rule_was_cached[0] = self._check_if_cached()
self._succeed()
class NoSourceNoOutRule(Rule):
class _Context(RuleContext):
def begin(self):
super(NoSourceNoOutRule._Context, self).begin()
rule_was_cached[0] = self._check_if_cached()
self._succeed()
file_delta = cache.FileDelta()
class TestCache(cache.RuleCache):
def compute_delta(self, rule_path, mode, src_paths):
return file_delta
rule_cache = TestCache()
project = Project(modules=[Module('m', rules=[
OutputRule('a')])])
# With no changed_files in the FileDelta, cached should return true.
with BuildContext(self.build_env, project, rule_cache=rule_cache) as ctx:
file_delta = cache.FileDelta()
d = ctx.execute_sync(['m:a'])
self.assertTrue(rule_was_cached[0])
# With a changed_files entry in the FileDelta, cached should return false.
with BuildContext(self.build_env, project, rule_cache=rule_cache) as ctx:
file_delta = cache.FileDelta()
file_delta.changed_files = ['b']
d = ctx.execute_sync(['m:a'])
self.assertFalse(rule_was_cached[0])
# If any output files were removed, cached should return false.
with BuildContext(self.build_env, project, rule_cache=rule_cache) as ctx:
file_delta = cache.FileDelta()
file_delta.removed_files = ['b']
d = ctx.execute_sync(['m:a'])
self.assertFalse(rule_was_cached[0])
# If -f was passed, cached should return false.
with BuildContext(self.build_env, project, rule_cache=rule_cache,
force=True) as ctx:
file_delta = cache.FileDelta()
file_delta.removed_files = ['b']
d = ctx.execute_sync(['m:a'])
self.assertFalse(rule_was_cached[0])
# If there are no source or output files, then the cache check should be
# short-circuited and the cache check should return false.
project = Project(modules=[Module('m', rules=[
NoSourceNoOutRule('a')])])
with BuildContext(self.build_env, project, rule_cache=rule_cache) as ctx:
file_delta = cache.FileDelta()
file_delta.removed_files = ['b']
d = ctx.execute_sync(['m:a'])
self.assertFalse(rule_was_cached[0])
def testBuild(self):
project = Project(module_resolver=FileModuleResolver(self.root_path))
@ -277,26 +350,29 @@ class RuleContextTest(FixtureTestCase):
project = Project(module_resolver=FileModuleResolver(self.root_path))
build_ctx = BuildContext(self.build_env, project)
rule = project.resolve_rule(':file_input')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':file_input'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt']))
rule = project.resolve_rule(':local_txt')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':local_txt'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt', 'b.txt', 'c.txt']))
rule = project.resolve_rule(':recursive_txt')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':recursive_txt'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt', 'b.txt', 'c.txt', 'd.txt', 'e.txt']))
@ -305,63 +381,67 @@ class RuleContextTest(FixtureTestCase):
project = Project(module_resolver=FileModuleResolver(self.root_path))
build_ctx = BuildContext(self.build_env, project)
rule = project.resolve_rule(':missing_txt')
rule = ':missing_txt'
with self.assertRaises(OSError):
build_ctx._execute_rule(rule)
build_ctx.execute_sync([rule])
rule = project.resolve_rule(':missing_glob_txt')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':missing_glob_txt'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(len(rule_outputs), 0)
rule = project.resolve_rule(':local_txt_filter')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':local_txt_filter'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt', 'b.txt', 'c.txt']))
rule = project.resolve_rule(':recursive_txt_filter')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':recursive_txt_filter'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt', 'b.txt', 'c.txt', 'd.txt', 'e.txt']))
rule = project.resolve_rule(':exclude_txt_filter')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':exclude_txt_filter'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['dir_2', 'a.txt-a', 'b.txt-b', 'c.txt-c', 'g.not-txt', 'BUILD']))
rule = project.resolve_rule(':include_exclude_filter')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':include_exclude_filter'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt-a', 'b.txt-b']))
rule = project.resolve_rule(':multi_exts')
build_ctx._execute_rule(rule)
rule = project.resolve_rule(':only_a')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':only_a'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt-a']))
rule = project.resolve_rule(':only_ab')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule_outputs = build_ctx.get_rule_outputs(rule)
rule = ':only_ab'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(resolved)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt-a', 'b.txt-b']))
@ -370,36 +450,33 @@ class RuleContextTest(FixtureTestCase):
project = Project(module_resolver=FileModuleResolver(self.root_path))
build_ctx = BuildContext(self.build_env, project)
rule = project.resolve_rule(':file_input')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule = ':file_input'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(rule)
self.assertNotEqual(len(rule_outputs), 0)
rule = project.resolve_rule(':rule_input')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule = ':rule_input'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(rule)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt']))
rule = project.resolve_rule(':mixed_input')
d = build_ctx._execute_rule(rule)
self.assertTrue(d.is_done())
rule = ':mixed_input'
resolved = project.resolve_rule(rule)
success = build_ctx.execute_sync([rule])
self.assertTrue(success)
rule_outputs = build_ctx.get_rule_outputs(rule)
self.assertEqual(
set([os.path.basename(f) for f in rule_outputs]),
set(['a.txt', 'b.txt']))
rule = project.resolve_rule(':missing_input')
with self.assertRaises(KeyError):
build_ctx._execute_rule(rule)
build_ctx = BuildContext(self.build_env, project)
rule = project.resolve_rule(':rule_input')
with self.assertRaises(RuntimeError):
build_ctx._execute_rule(rule)
build_ctx.execute_sync([':missing_input'])
def _compare_path(self, result, expected):
result = os.path.relpath(result, self.root_path)

View File

@ -151,7 +151,8 @@ class ExecutableTask(Task):
call_args: Arguments to pass to the executable.
env: Additional environment variables.
"""
super(ExecutableTask, self).__init__(build_env, *args, **kwargs)
super(ExecutableTask, self).__init__(
build_env, executable_name, *args, **kwargs)
self.executable_name = executable_name
self.call_args = call_args[:] if call_args else []
self.env = env.copy() if env else {}