Adding preprocess and overlay_view rules

This commit is contained in:
Ben Vanik 2012-10-08 15:38:44 -07:00
parent f8a4b1f208
commit 98c5fcaaff
13 changed files with 503 additions and 221 deletions

View File

@ -472,6 +472,22 @@ class RuleContext(object):
rel_path = os.path.relpath(module_path, root_path)
return os.path.normpath(os.path.join(base_path, rel_path, name))
def _get_root_path(self, name=None, suffix=None):
"""Gets the path of the given output in the root path.
Always prefer _get_out_path and _get_gen_path.
Args:
name: If a name is provided it will be used instead of the rule name.
suffix: Suffix to add to whatever path is built, such as '.txt' to add
an extension.
Returns:
A full path that can be used to write a file to the proper root path.
"""
base_path = self.build_context.build_env.root_path
return self.__get_target_path(base_path, name=name, suffix=suffix)
def _get_out_path(self, name=None, suffix=None):
"""Gets the 'out' path for an output.
If no name is provided then the rule name will be used.

View File

@ -468,7 +468,7 @@ class JsDependencyGraph(object):
for src_path in src_paths:
dep_file = self.dep_files[src_path]
rel_path = os.path.relpath(dep_file.src_path, base_path)
rel_path = anvil.util.strip_build_paths(rel_path)
#rel_path = anvil.util.strip_build_paths(rel_path)
lines.append('goog.addDependency(\'%s\', %s, %s);' % (
anvil.util.ensure_forwardslashes(rel_path),
dep_file.provides, dep_file.requires))

View File

@ -15,6 +15,7 @@ import string
from anvil.context import RuleContext
from anvil.rule import Rule, build_rule
from anvil.task import Task, ExecutableTask
import anvil.util
@build_rule('file_set')
@ -65,18 +66,26 @@ class CopyFilesRule(Rule):
Inputs:
srcs: Source file paths.
out: Optional output path. If none is provided then the main output root
will be used.
flatten_paths: A list of paths to flatten into the root. For example,
pass ['a/'] to flatten 'a/b/c.txt' to 'b/c.txt'
Outputs:
All of the copied files in the output path.
"""
def __init__(self, name, *args, **kwargs):
def __init__(self, name, out=None, flatten_paths=None, *args, **kwargs):
"""Initializes a copy files rule.
Args:
name: Rule name.
"""
super(CopyFilesRule, self).__init__(name, *args, **kwargs)
self.out = out
self.flatten_paths = flatten_paths or []
self.flatten_paths = [path.replace('/', os.path.sep)
for path in self.flatten_paths]
class _Context(RuleContext):
def begin(self):
@ -85,7 +94,15 @@ class CopyFilesRule(Rule):
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
rel_path = os.path.relpath(src_path, self.build_env.root_path)
rel_path = anvil.util.strip_build_paths(rel_path)
for prefix in self.rule.flatten_paths:
rel_path = rel_path.replace(prefix, '')
if self.rule.out:
out_path = os.path.join(self.rule.out, rel_path)
out_path = self._get_out_path(name=out_path)
else:
out_path = self._get_out_path_for_src(rel_path)
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
@ -177,173 +194,6 @@ class _ConcatFilesTask(Task):
return True
@build_rule('template_files')
class TemplateFilesRule(Rule):
"""Applies simple templating to a set of files.
Processes each source file replacing a list of strings with corresponding
strings.
This uses the Python string templating functionality documented here:
http://docs.python.org/library/string.html#template-strings
Identifiers in the source template should be of the form "${identifier}", each
of which maps to a key in the params dictionary.
In order to prevent conflicts, it is strongly encouraged that a new_extension
value is provided. If a source file has an extension it will be replaced with
the specified one, and files without extensions will have it added.
TODO(benvanik): more advanced template vars? perhaps regex?
Inputs:
srcs: Source file paths.
new_extension: The extension to replace (or add) to all output files, with a
leading dot ('.txt').
params: A dictionary of key-value replacement parameters.
Outputs:
One file for each source file with the templating rules applied.
"""
def __init__(self, name, new_extension=None, params=None, *args, **kwargs):
"""Initializes a file templating rule.
Args:
name: Rule name.
new_extension: Replacement extension ('.txt').
params: A dictionary of key-value replacement parameters.
"""
super(TemplateFilesRule, self).__init__(name, *args, **kwargs)
self.new_extension = new_extension
self.params = params
class _Context(RuleContext):
def begin(self):
super(TemplateFilesRule._Context, self).begin()
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
if self.rule.new_extension:
out_path = os.path.splitext(out_path)[0] + self.rule.new_extension
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
# Skip if cache hit
if self._check_if_cached():
self._succeed()
return
# Async issue templating task
d = self._run_task_async(_TemplateFilesTask(
self.build_env, file_pairs, self.rule.params))
self._chain(d)
class _TemplateFilesTask(Task):
def __init__(self, build_env, file_pairs, params, *args, **kwargs):
super(_TemplateFilesTask, self).__init__(build_env, *args, **kwargs)
self.file_pairs = file_pairs
self.params = params
def execute(self):
for file_pair in self.file_pairs:
with io.open(file_pair[0], 'rt') as f:
template_str = f.read()
template = string.Template(template_str)
result_str = template.substitute(self.params)
with io.open(file_pair[1], 'wt') as f:
f.write(result_str)
return True
@build_rule('strip_comments')
class StripCommentsRule(Rule):
"""Applies simple comment stripping to a set of files.
Processes each source file removing C/C++-style comments.
Note that this is incredibly hacky and may break in all sorts of cases.
In order to prevent conflicts, it is strongly encouraged that a new_extension
value is provided. If a source file has an extension it will be replaced with
the specified one, and files without extensions will have it added.
Inputs:
srcs: Source file paths.
new_extension: The extension to replace (or add) to all output files, with a
leading dot ('.txt').
Outputs:
One file for each source file with the comments removed.
"""
def __init__(self, name, new_extension=None, *args, **kwargs):
"""Initializes a comment stripping rule.
Args:
name: Rule name.
new_extension: Replacement extension ('.txt').
"""
super(StripCommentsRule, self).__init__(name, *args, **kwargs)
self.new_extension = new_extension
class _Context(RuleContext):
def begin(self):
super(StripCommentsRule._Context, self).begin()
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
if self.rule.new_extension:
out_path = os.path.splitext(out_path)[0] + self.rule.new_extension
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
# Skip if cache hit
if self._check_if_cached():
self._succeed()
return
# Async issue stripping task
d = self._run_task_async(_StripCommentsRuleTask(
self.build_env, file_pairs))
self._chain(d)
class _StripCommentsRuleTask(Task):
def __init__(self, build_env, file_pairs, *args, **kwargs):
super(_StripCommentsRuleTask, self).__init__(build_env, *args, **kwargs)
self.file_pairs = file_pairs
def execute(self):
for file_pair in self.file_pairs:
with io.open(file_pair[0], 'rt') as f:
raw_str = f.read()
# Code from Markus Jarderot, posted to stackoverflow
def replacer(match):
s = match.group(0)
if s.startswith('/'):
return ""
else:
return s
pattern = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE)
result_str = re.sub(pattern, replacer, raw_str)
with io.open(file_pair[1], 'wt') as f:
f.write(result_str)
return True
@build_rule('shell_execute')
class ShellExecuteRule(Rule):
"""Executes a command on the shell.

View File

@ -158,56 +158,5 @@ class ConcatFilesRuleTest(RuleTestCase):
'1\n2\n3\n4\nxworld!x\n1\n2\n3\n4\n')
class TemplateFilesRuleTest(RuleTestCase):
"""Behavioral tests of the TemplateFilesRule type."""
fixture='core_rules/template_files'
def setUp(self):
super(TemplateFilesRuleTest, self).setUp()
self.build_env = BuildEnvironment(root_path=self.root_path)
def test(self):
project = Project(module_resolver=FileModuleResolver(self.root_path))
with BuildContext(self.build_env, project) as ctx:
self.assertTrue(ctx.execute_sync([
':template_all',
':template_dep_2',
]))
self.assertRuleResultsEqual(ctx,
':template_all', ['a.txt',
'dir/b.txt'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.txt'),
'123world456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.txt'),
'b123world456\n')
self.assertRuleResultsEqual(ctx,
':template_dep_1', ['a.nfo',
'dir/b.nfo'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.nfo'),
'123${arg2}456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.nfo'),
'b123${arg2}456\n')
self.assertRuleResultsEqual(ctx,
':template_dep_2', ['a.out',
'dir/b.out'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.out'),
'123world!456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.out'),
'b123world!456\n')
if __name__ == '__main__':
unittest2.main()

View File

@ -0,0 +1,102 @@
# Copyright 2012 Google Inc. All Rights Reserved.
"""Merged path view rules for the build system.
"""
__author__ = 'benvanik@google.com (Ben Vanik)'
import os
from anvil.context import RuleContext
from anvil.rule import Rule, build_rule
from anvil.task import Task
import anvil.util
@build_rule('overlay_view')
class OverlayViewRule(Rule):
"""Constructs or updates an view over merged paths.
This uses system symlinks to build a path that contains access to all source
paths as if they existed side-by-side. This only needs to be re-run when
file structure changes, and allows for access to files at their sources
(adding edit-reloadability).
Inputs:
srcs: Source file paths. All of the files that will be available.
out: Optional output name. If none is provided than the rule name will be
used.
flatten_paths: A list of paths to flatten into the root. For example,
pass ['a/'] to flatten 'a/b/c.txt' to 'b/c.txt'
Outputs:
Merged directory filled with symlinks.
"""
def __init__(self, name, out=None, flatten_paths=None, *args, **kwargs):
"""Initializes an overlay view rule.
Args:
name: Rule name.
out: Optional output name. If none is provided than the rule name will be
used.
"""
super(OverlayViewRule, self).__init__(name, *args, **kwargs)
self.out = out
self.flatten_paths = flatten_paths or []
self.flatten_paths = [path.replace('/', os.path.sep)
for path in self.flatten_paths]
class _Context(RuleContext):
def begin(self):
super(OverlayViewRule._Context, self).begin()
# Could, if output exists, only modify added/removed symlinks
# file_delta = self.file_delta
# file_delta.added_files
# file_delta.removed_files
# Ensure output exists
output_path = self._get_root_path(name=self.rule.out)
self._ensure_output_exists(output_path)
self._append_output_paths([output_path])
# Compute the relative path for each file
paths = []
for src_path in self.src_paths:
rel_path = os.path.relpath(src_path, self.build_env.root_path)
rel_path = anvil.util.strip_build_paths(rel_path)
for prefix in self.rule.flatten_paths:
rel_path = rel_path.replace(prefix, '')
paths.append((src_path, rel_path))
# Async issue linking task
d = self._run_task_async(_SymlinkTask(
self.build_env, paths, output_path))
self._chain(d)
class _SymlinkTask(Task):
def __init__(self, build_env, paths, output_path, *args, **kwargs):
super(_SymlinkTask, self).__init__(build_env, *args, **kwargs)
self.paths = paths
self.output_path = output_path
def execute(self):
# Tracks all exists checks on link parent paths
checked_dirs = {}
for path in self.paths:
(src_path, rel_path) = path
link_path = os.path.join(self.output_path, rel_path)
if not os.path.exists(link_path):
# Ensure parent of link path exists
link_parent = os.path.dirname(link_path)
if not checked_dirs.get(link_parent, False):
if not os.path.exists(link_parent):
os.makedirs(link_parent)
checked_dirs[link_parent] = True
os.symlink(src_path, link_path)
return True

View File

@ -0,0 +1,293 @@
# Copyright 2012 Google Inc. All Rights Reserved.
"""Simple preprocessor rules for the build system.
"""
__author__ = 'benvanik@google.com (Ben Vanik)'
import io
import os
import re
import shutil
import string
from anvil.context import RuleContext
from anvil.rule import Rule, build_rule
from anvil.task import Task, ExecutableTask
@build_rule('template_files')
class TemplateFilesRule(Rule):
"""Applies simple templating to a set of files.
Processes each source file replacing a list of strings with corresponding
strings.
This uses the Python string templating functionality documented here:
http://docs.python.org/library/string.html#template-strings
Identifiers in the source template should be of the form "${identifier}", each
of which maps to a key in the params dictionary.
In order to prevent conflicts, it is strongly encouraged that a new_extension
value is provided. If a source file has an extension it will be replaced with
the specified one, and files without extensions will have it added.
TODO(benvanik): more advanced template vars? perhaps regex?
Inputs:
srcs: Source file paths.
new_extension: The extension to replace (or add) to all output files, with a
leading dot ('.txt').
params: A dictionary of key-value replacement parameters.
Outputs:
One file for each source file with the templating rules applied.
"""
def __init__(self, name, new_extension=None, params=None, *args, **kwargs):
"""Initializes a file templating rule.
Args:
name: Rule name.
new_extension: Replacement extension ('.txt').
params: A dictionary of key-value replacement parameters.
"""
super(TemplateFilesRule, self).__init__(name, *args, **kwargs)
self.new_extension = new_extension
self.params = params
class _Context(RuleContext):
def begin(self):
super(TemplateFilesRule._Context, self).begin()
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
if self.rule.new_extension:
out_path = os.path.splitext(out_path)[0] + self.rule.new_extension
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
# Skip if cache hit
if self._check_if_cached():
self._succeed()
return
# Async issue templating task
d = self._run_task_async(_TemplateFilesTask(
self.build_env, file_pairs, self.rule.params))
self._chain(d)
class _TemplateFilesTask(Task):
def __init__(self, build_env, file_pairs, params, *args, **kwargs):
super(_TemplateFilesTask, self).__init__(build_env, *args, **kwargs)
self.file_pairs = file_pairs
self.params = params
def execute(self):
for file_pair in self.file_pairs:
with io.open(file_pair[0], 'rt') as f:
template_str = f.read()
template = string.Template(template_str)
result_str = template.substitute(self.params)
with io.open(file_pair[1], 'wt') as f:
f.write(result_str)
return True
@build_rule('strip_comments')
class StripCommentsRule(Rule):
"""Applies simple comment stripping to a set of files.
Processes each source file removing C/C++-style comments.
Note that this is incredibly hacky and may break in all sorts of cases.
In order to prevent conflicts, it is strongly encouraged that a new_extension
value is provided. If a source file has an extension it will be replaced with
the specified one, and files without extensions will have it added.
Inputs:
srcs: Source file paths.
new_extension: The extension to replace (or add) to all output files, with a
leading dot ('.txt').
Outputs:
One file for each source file with the comments removed.
"""
def __init__(self, name, new_extension=None, *args, **kwargs):
"""Initializes a comment stripping rule.
Args:
name: Rule name.
new_extension: Replacement extension ('.txt').
"""
super(StripCommentsRule, self).__init__(name, *args, **kwargs)
self.new_extension = new_extension
class _Context(RuleContext):
def begin(self):
super(StripCommentsRule._Context, self).begin()
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
if self.rule.new_extension:
out_path = os.path.splitext(out_path)[0] + self.rule.new_extension
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
# Skip if cache hit
if self._check_if_cached():
self._succeed()
return
# Async issue stripping task
d = self._run_task_async(_StripCommentsRuleTask(
self.build_env, file_pairs))
self._chain(d)
class _StripCommentsRuleTask(Task):
def __init__(self, build_env, file_pairs, *args, **kwargs):
super(_StripCommentsRuleTask, self).__init__(build_env, *args, **kwargs)
self.file_pairs = file_pairs
def execute(self):
for file_pair in self.file_pairs:
with io.open(file_pair[0], 'rt') as f:
raw_str = f.read()
# Code from Markus Jarderot, posted to stackoverflow
def replacer(match):
s = match.group(0)
if s.startswith('/'):
return ""
else:
return s
pattern = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE)
result_str = re.sub(pattern, replacer, raw_str)
with io.open(file_pair[1], 'wt') as f:
f.write(result_str)
return True
@build_rule('preprocess')
class PreprocessRule(Rule):
"""Applies simple C-style preprocessing to a set of files.
Processes each source file handling the built-in preprocessor rules.
Note that this is incredibly hacky and may break in all sorts of cases.
In order to prevent conflicts, it is strongly encouraged that a new_extension
value is provided. If a source file has an extension it will be replaced with
the specified one, and files without extensions will have it added.
Inputs:
srcs: Source file paths.
new_extension: The extension to replace (or add) to all output files, with a
leading dot ('.txt').
defines: A list of values to be defined by default.
Example - 'DEBUG'.
Outputs:
One file for each source file after preprocessing.
"""
def __init__(self, name, new_extension=None, defines=None, *args, **kwargs):
"""Initializes a preprocessing rule.
Args:
name: Rule name.
new_extension: Replacement extension ('.txt').
defines: A list of defines.
"""
super(PreprocessRule, self).__init__(name, *args, **kwargs)
self.new_extension = new_extension
self.defines = defines[:] if defines else []
class _Context(RuleContext):
def begin(self):
super(PreprocessRule._Context, self).begin()
# Get all source -> output paths (and ensure directories exist)
file_pairs = []
for src_path in self.src_paths:
out_path = self._get_out_path_for_src(src_path)
if self.rule.new_extension:
out_path = os.path.splitext(out_path)[0] + self.rule.new_extension
self._ensure_output_exists(os.path.dirname(out_path))
self._append_output_paths([out_path])
file_pairs.append((src_path, out_path))
# Skip if cache hit
if self._check_if_cached():
self._succeed()
return
# Async issue stripping task
d = self._run_task_async(_PreprocessFilesTask(
self.build_env, file_pairs, self.rule.defines))
self._chain(d)
class _PreprocessFilesTask(Task):
def __init__(self, build_env, file_pairs, defines, *args, **kwargs):
super(_PreprocessFilesTask, self).__init__(build_env, *args, **kwargs)
self.file_pairs = file_pairs
self.defines = defines
def execute(self):
for file_pair in self.file_pairs:
with io.open(file_pair[0], 'rt') as f:
source_lines = f.readlines()
result_str = self._preprocess(source_lines, self.defines)
with io.open(file_pair[1], 'wt') as f:
f.write(result_str)
return True
def _preprocess(self, source_lines, global_defines):
# All defines in global + #defined in file
file_defines = set(global_defines)
# A stack of #ifdef scopes - for a given line to be included all must be
# set to true
inclusion_scopes = [True]
target_lines = []
for line in source_lines:
line_included = all(inclusion_scopes)
if line[0] == '#':
line_included = False
if line.startswith('#ifdef '):
value = line[7:].strip()
inclusion_scopes.append(value in file_defines)
elif line.startswith('#else'):
inclusion_scopes[-1] = not inclusion_scopes[-1]
elif line.startswith('#endif'):
inclusion_scopes.pop()
elif line.startswith('#define '):
value = line[8:].strip()
file_defines.add(value)
elif line.startswith('#undef '):
value = line[7:].strip()
file_defines.remove(value)
if line_included:
target_lines.append(line)
return '\n'.join(target_lines)

View File

@ -0,0 +1,72 @@
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Tests for the preprocessor_rules module.
"""
__author__ = 'benvanik@google.com (Ben Vanik)'
import os
import unittest2
from anvil.context import BuildContext, BuildEnvironment, Status
from anvil.project import FileModuleResolver, Project
from anvil.test import FixtureTestCase
from preprocessor_rules import *
class TemplateFilesRuleTest(RuleTestCase):
"""Behavioral tests of the TemplateFilesRule type."""
fixture='core_rules/template_files'
def setUp(self):
super(TemplateFilesRuleTest, self).setUp()
self.build_env = BuildEnvironment(root_path=self.root_path)
def test(self):
project = Project(module_resolver=FileModuleResolver(self.root_path))
with BuildContext(self.build_env, project) as ctx:
self.assertTrue(ctx.execute_sync([
':template_all',
':template_dep_2',
]))
self.assertRuleResultsEqual(ctx,
':template_all', ['a.txt',
'dir/b.txt'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.txt'),
'123world456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.txt'),
'b123world456\n')
self.assertRuleResultsEqual(ctx,
':template_dep_1', ['a.nfo',
'dir/b.nfo'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.nfo'),
'123${arg2}456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.nfo'),
'b123${arg2}456\n')
self.assertRuleResultsEqual(ctx,
':template_dep_2', ['a.out',
'dir/b.out'],
output_prefix='build-out')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/a.out'),
'123world!456\n')
self.assertFileContents(
os.path.join(self.root_path, 'build-out/dir/b.out'),
'b123world!456\n')
if __name__ == '__main__':
unittest2.main()