ICU-20300 Changing buildtool nametuples to polymorphic classes.

This commit is contained in:
Shane Carr 2018-12-13 00:56:13 -08:00 committed by Shane F. Carr
parent 2666d18e54
commit 1b5c721ef2
8 changed files with 347 additions and 453 deletions

View file

@ -9,6 +9,7 @@ from distutils.sysconfig import parse_makefile
from buildtool import *
from buildtool import utils
from buildtool.request_types import *
import sys
@ -164,7 +165,7 @@ def generate_cnvalias(config, glob, common_vars):
SingleExecutionRequest(
name = "cnvalias",
category = "cnvalias",
dep_files = [],
dep_targets = [],
input_files = [input_file],
output_files = [output_file],
tool = IcuTool("gencnval"),
@ -184,7 +185,7 @@ def generate_confusables(config, glob, common_vars):
SingleExecutionRequest(
name = "confusables",
category = "confusables",
dep_files = [DepTarget("cnvalias")],
dep_targets = [DepTarget("cnvalias")],
input_files = [txt1, txt2],
output_files = [cfu],
tool = IcuTool("gencfu"),
@ -205,7 +206,7 @@ def generate_conversion_mappings(config, glob, common_vars):
RepeatedOrSingleExecutionRequest(
name = "conversion_mappings",
category = "conversion_mappings",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("makeconv"),
@ -226,7 +227,7 @@ def generate_brkitr_brk(config, glob, common_vars):
RepeatedExecutionRequest(
name = "brkitr_brk",
category = "brkitr_rules",
dep_files = [DepTarget("cnvalias")],
dep_targets = [DepTarget("cnvalias")],
input_files = input_files,
output_files = output_files,
tool = IcuTool("genbrk"),
@ -248,7 +249,7 @@ def generate_stringprep(config, glob, common_vars):
RepeatedExecutionRequest(
name = "stringprep",
category = "stringprep",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("gensprep"),
@ -278,7 +279,7 @@ def generate_brkitr_dictionaries(config, glob, common_vars):
RepeatedExecutionRequest(
name = "dictionaries",
category = "brkitr_dictionaries",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("gendict"),
@ -302,7 +303,7 @@ def generate_normalization(config, glob, common_vars):
RepeatedExecutionRequest(
name = "normalization",
category = "normalization",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("icupkg"),
@ -321,7 +322,7 @@ def generate_coll_ucadata(config, glob, common_vars):
SingleExecutionRequest(
name = "coll_ucadata",
category = "coll_ucadata",
dep_files = [],
dep_targets = [],
input_files = [input_file],
output_files = [output_file],
tool = IcuTool("icupkg"),
@ -339,7 +340,7 @@ def generate_unames(config, glob, common_vars):
SingleExecutionRequest(
name = "unames",
category = "unames",
dep_files = [],
dep_targets = [],
input_files = [input_file],
output_files = [output_file],
tool = IcuTool("icupkg"),
@ -358,7 +359,7 @@ def generate_misc(config, glob, common_vars):
RepeatedExecutionRequest(
name = "misc_res",
category = "misc",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("genrb"),
@ -382,7 +383,7 @@ def generate_curr_supplemental(config, glob, common_vars):
SingleExecutionRequest(
name = "curr_supplemental_res",
category = "curr_supplemental",
dep_files = [],
dep_targets = [],
input_files = [input_file],
output_files = [output_file],
tool = IcuTool("genrb"),
@ -411,7 +412,7 @@ def generate_translit(config, glob, common_vars):
RepeatedOrSingleExecutionRequest(
name = "translit_res",
category = "translit",
dep_files = [],
dep_targets = [],
input_files = input_files,
output_files = output_files,
tool = IcuTool("genrb"),
@ -437,7 +438,7 @@ def generate_tree(
version_var,
source_var,
use_pool_bundle,
dep_files):
dep_targets):
requests = []
category = "%s_tree" % sub_dir
out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
@ -463,7 +464,7 @@ def generate_tree(
SingleExecutionRequest(
name = pool_target_name,
category = category,
dep_files = dep_files,
dep_targets = dep_targets,
input_files = input_files,
output_files = input_pool_files,
tool = IcuTool("genrb"),
@ -477,7 +478,7 @@ def generate_tree(
}
),
]
dep_files = dep_files + [DepTarget(pool_target_name)]
dep_targets = dep_targets + [DepTarget(pool_target_name)]
else:
use_pool_bundle_option = ""
@ -486,7 +487,7 @@ def generate_tree(
RepeatedOrSingleExecutionRequest(
name = "%s_res" % sub_dir,
category = category,
dep_files = dep_files,
dep_targets = dep_targets,
input_files = input_files,
output_files = output_files,
tool = IcuTool("genrb"),
@ -542,7 +543,7 @@ def generate_tree(
SingleExecutionRequest(
name = "%s_index_res" % sub_dir,
category = "%s_index" % sub_dir,
dep_files = [],
dep_targets = [],
input_files = [index_file_txt],
output_files = [index_res_file],
tool = IcuTool("genrb"),

View file

@ -33,108 +33,3 @@ IcuTool = namedtuple("IcuTool", ["name"])
SystemTool = namedtuple("SystemTool", ["name"])
DepTarget = namedtuple("DepTarget", ["name"])
SingleExecutionRequest = namedtuple("SingleExecutionRequest", [
# Used for identification purposes
"name",
# The filter category that applies to this request
"category",
# Names of targets (requests) or files that this request depends on;
# targets are of type DepTarget
"dep_files",
# Primary input files
"input_files",
# Output files
"output_files",
# What tool to use
"tool",
# Argument string to pass to the tool with optional placeholders
"args",
# Placeholders to substitute into the argument string; if any of these
# have a list type, the list must be equal in length to input_files
"format_with"
])
RepeatedExecutionRequest = namedtuple("RepeatedExecutionRequest", [
# Used for identification purposes
"name",
# The filter category that applies to this request
"category",
# Names of targets (requests) or files that this request depends on;
# targets are of type DepTarget
"dep_files",
# Primary input files
"input_files",
# Output files
"output_files",
# What tool to use
"tool",
# Argument string to pass to the tool with optional placeholders
"args",
# Placeholders to substitute into the argument string for all executions;
# if any of these have a list type, the list must be equal in length to
# input_files
"format_with",
# Placeholders to substitute into the argument string unique to each
# iteration; all values must be lists equal in length to input_files
"repeat_with"
])
RepeatedOrSingleExecutionRequest = namedtuple("RepeatedOrSingleExecutionRequest", [
"name",
"category",
"dep_files",
"input_files",
"output_files",
"tool",
"args",
"format_with",
"repeat_with"
])
PrintFileRequest = namedtuple("PrintFileRequest", [
"name",
"output_file",
"content"
])
CopyRequest = namedtuple("CopyRequest", [
"name",
"input_file",
"output_file"
])
VariableRequest = namedtuple("VariableRequest", [
"name",
"input_files"
])
ListRequest = namedtuple("ListRequest", [
"name",
"variable_name",
"output_file",
"include_tmp"
])
IndexTxtRequest = namedtuple("IndexTxtRequest", [
"name",
"category",
"input_files",
"output_file",
"cldr_version"
])

View file

@ -12,6 +12,7 @@ import sys
from . import *
from . import utils
from .request_types import *
# Note: for this to be a proper abstract class, it should extend abc.ABC.
@ -37,14 +38,21 @@ class Filter(object):
print("Error: Unknown filterType option: %s" % filter_type, file=sys.stderr)
return None
@abstractmethod
def filter(self, request):
if not request.apply_file_filter(self):
return []
for file in request.all_input_files():
assert self.match(file)
return [request]
@abstractmethod
def match(self, file):
pass
class ExclusionFilter(Filter):
def filter(self, request):
return []
def match(self, file):
return False
class WhitelistBlacklistFilter(Filter):
@ -56,139 +64,9 @@ class WhitelistBlacklistFilter(Filter):
self.is_whitelist = False
self.blacklist = json_data["blacklist"]
def filter(self, request):
if isinstance(request, SingleExecutionRequest):
return self._filter_single(request)
elif isinstance(request, RepeatedExecutionRequest):
return self._filter_repeated(request)
elif isinstance(request, RepeatedOrSingleExecutionRequest):
return self._filter_repeated_or_single(request)
elif isinstance(request, IndexTxtRequest):
return self._filter_index_txt(request)
else:
# Assert that no other types are needed
for file in utils.get_input_files(request):
file_stem = self._file_to_file_stem(file)
assert self._should_include(file_stem), request
return [request]
def _filter_single(self, request):
new_input_files = []
new_format_with = defaultdict(utils.SpaceSeparatedList)
for i in range(len(request.input_files)):
file_stem = self._file_to_file_stem(request.input_files[i])
if self._should_include(file_stem):
new_input_files.append(request.input_files[i])
for k,v in request.format_with.items():
if isinstance(v, list):
new_format_with[k].append(v[i])
# Return a new request if there are still >= 1 input files.
if new_input_files:
return [
SingleExecutionRequest(
name = request.name,
category = request.category,
dep_files = request.dep_files,
input_files = new_input_files,
output_files = request.output_files,
tool = request.tool,
args = request.args,
format_with = utils.concat_dicts(request.format_with, new_format_with)
)
]
return []
def _filter_repeated(self, request):
new_input_files = []
new_output_files = []
new_format_with = defaultdict(utils.SpaceSeparatedList)
new_repeat_with = defaultdict(utils.SpaceSeparatedList)
for i in range(len(request.input_files)):
file_stem = self._file_to_file_stem(request.input_files[i])
if self._should_include(file_stem):
new_input_files.append(request.input_files[i])
new_output_files.append(request.output_files[i])
for k,v in request.format_with.items():
if isinstance(v, list):
new_format_with[k].append(v[i])
for k,v in request.repeat_with.items():
assert isinstance(v, list)
new_repeat_with[k].append(v[i])
# Return a new request if there are still >= 1 input files.
if new_input_files:
return [
RepeatedExecutionRequest(
name = request.name,
category = request.category,
dep_files = request.dep_files,
input_files = new_input_files,
output_files = new_output_files,
tool = request.tool,
args = request.args,
format_with = utils.concat_dicts(request.format_with, new_format_with),
repeat_with = utils.concat_dicts(request.repeat_with, new_repeat_with)
)
]
else:
return []
def _filter_repeated_or_single(self, request):
new_input_files = []
new_output_files = []
new_format_with = defaultdict(utils.SpaceSeparatedList)
new_repeat_with = defaultdict(utils.SpaceSeparatedList)
for i in range(len(request.input_files)):
file_stem = self._file_to_file_stem(request.input_files[i])
if self._should_include(file_stem):
new_input_files.append(request.input_files[i])
new_output_files.append(request.output_files[i])
for k,v in request.format_with.items():
if isinstance(v, list):
new_format_with[k].append(v[i])
for k,v in request.repeat_with.items():
assert isinstance(v, list)
new_repeat_with[k].append(v[i])
# Return a new request if there are still >= 1 input files.
if new_input_files:
return [
RepeatedOrSingleExecutionRequest(
name = request.name,
category = request.category,
dep_files = request.dep_files,
input_files = new_input_files,
output_files = new_output_files,
tool = request.tool,
args = request.args,
format_with = utils.concat_dicts(request.format_with, new_format_with),
repeat_with = utils.concat_dicts(request.repeat_with, new_repeat_with)
)
]
else:
return []
def _filter_index_txt(self, request):
new_input_files = []
for file in request.input_files:
file_stem = self._file_to_file_stem(file)
if self._should_include(file_stem):
new_input_files.append(file)
# Return a new request if there are still >= 1 input files.
if new_input_files:
return [
IndexTxtRequest(
name = request.name,
category = request.category,
input_files = new_input_files,
output_file = request.output_file,
cldr_version = request.cldr_version
)
]
else:
return []
def match(self, file):
file_stem = self._file_to_file_stem(file)
return self._should_include(file_stem)
@classmethod
def _file_to_file_stem(cls, file):
@ -243,12 +121,19 @@ class RegexFilter(WhitelistBlacklistFilter):
return True
def apply_filters(old_requests, config):
def apply_filters(requests, config):
"""Runs the filters and returns a new list of requests."""
filters = _preprocess_filters(old_requests, config)
requests = _apply_file_filters(requests, config)
requests = _apply_resource_filters(requests, config)
return requests
def _apply_file_filters(old_requests, config):
"""Filters out entire files."""
filters = _preprocess_file_filters(old_requests, config)
new_requests = []
for request in old_requests:
category = utils.get_category(request)
category = request.category
if category in filters:
new_requests += filters[category].filter(request)
else:
@ -256,9 +141,9 @@ def apply_filters(old_requests, config):
return new_requests
def _preprocess_filters(requests, config):
def _preprocess_file_filters(requests, config):
all_categories = set(
utils.get_category(request)
request.category
for request in requests
)
all_categories.remove(None)
@ -279,3 +164,8 @@ def _preprocess_filters(requests, config):
if category not in all_categories:
print("Warning: category %s is not known" % category, file=sys.stderr)
return filters
def _apply_resource_filters(old_requests, config):
"""Creates filters for looking within resource bundle files."""
return old_requests

View file

@ -8,6 +8,7 @@ from __future__ import print_function
from . import *
from .. import *
from .. import utils
from ..request_types import *
def get_gnumake_rules(build_dirs, requests, makefile_vars, **kwargs):
makefile_string = ""
@ -141,7 +142,7 @@ def get_gnumake_rules_helper(request, common_vars, **kwargs):
if isinstance(request, SingleExecutionRequest):
cmd = utils.format_single_request_command(request, cmd_template, common_vars)
dep_files = utils.get_input_files(request)
dep_files = request.all_input_files()
if len(request.output_files) > 1:
# Special case for multiple output files: Makefile rules should have only one

View file

@ -4,6 +4,7 @@
from . import *
from .. import *
from .. import utils
from ..request_types import *
import os
import shutil

View file

@ -0,0 +1,288 @@
# Copyright (C) 2018 and later: Unicode, Inc. and others.
# License & terms of use: http://www.unicode.org/copyright.html
# Python 2/3 Compatibility (ICU-20299)
# TODO(ICU-20301): Remove this.
from __future__ import print_function
from abc import abstractmethod
import copy
import sys
from . import utils
# TODO(ICU-20301): Remove arguments from all instances of super() in this file
# Note: for this to be a proper abstract class, it should extend abc.ABC.
# There is no nice way to do this that works in both Python 2 and 3.
# TODO(ICU-20301): Make this inherit from abc.ABC.
class AbstractRequest(object):
def __init__(self, **kwargs):
# Used for identification purposes
self.name = None
# The filter category that applies to this request
self.category = None
self._set_fields(kwargs)
def _set_fields(self, kwargs):
for key, value in list(kwargs.items()):
if hasattr(self, key):
if isinstance(value, list):
value = copy.copy(value)
elif isinstance(value, dict):
value = copy.deepcopy(value)
setattr(self, key, value)
else:
raise ValueError("Unknown argument: %s" % key)
def apply_file_filter(self, filter):
return True
def flatten(self, config, all_requests, common_vars):
return [self]
def all_input_files(self):
return []
def all_output_files(self):
return []
class AbstractExecutionRequest(AbstractRequest):
def __init__(self, **kwargs):
# Names of targets (requests) or files that this request depends on;
# targets are of type DepTarget
self.dep_targets = []
self.dep_files = []
# Primary input files
self.input_files = []
# Output files; for some subclasses, this must be the same length
# as input_files
self.output_files = []
# What tool to execute
self.tool = None
# Argument string to pass to the tool with optional placeholders
self.args = ""
# Placeholders to substitute into the argument string; if any of these
# have a list type, the list must be equal in length to input_files
self.format_with = {}
super(AbstractExecutionRequest, self).__init__(**kwargs)
def apply_file_filter(self, filter):
i = 0
while i < len(self.input_files):
if filter.match(self.input_files[i]):
i += 1
continue
self._del_at(i)
return i > 0
def _del_at(self, i):
del self.input_files[i]
for _, v in self.format_with.items():
if isinstance(v, list):
del v[i]
def flatten(self, config, all_requests, common_vars):
self._dep_targets_to_files(all_requests)
return super(AbstractExecutionRequest, self).flatten(config, all_requests, common_vars)
def _dep_targets_to_files(self, all_requests):
if not self.dep_targets:
return
for dep_target in self.dep_targets:
for request in all_requests:
if request.name == dep_target.name:
self.dep_files += request.all_output_files()
break
else:
print("Warning: Unable to find target %s, a dependency of %s" % (
dep_target.name,
self.name
), file=sys.stderr)
def all_input_files(self):
return self.dep_files + self.input_files
def all_output_files(self):
return self.output_files
class SingleExecutionRequest(AbstractExecutionRequest):
def __init__(self, **kwargs):
super(SingleExecutionRequest, self).__init__(**kwargs)
class RepeatedExecutionRequest(AbstractExecutionRequest):
def __init__(self, **kwargs):
# Placeholders to substitute into the argument string unique to each
# iteration; all values must be lists equal in length to input_files
self.repeat_with = {}
super(RepeatedExecutionRequest, self).__init__(**kwargs)
def _del_at(self, i):
super(RepeatedExecutionRequest, self)._del_at(i)
del self.output_files[i]
for _, v in self.repeat_with.items():
if isinstance(v, list):
del v[i]
class RepeatedOrSingleExecutionRequest(AbstractExecutionRequest):
def __init__(self, **kwargs):
self.repeat_with = {}
super(RepeatedOrSingleExecutionRequest, self).__init__(**kwargs)
def flatten(self, config, all_requests, common_vars):
if config.max_parallel:
new_request = RepeatedExecutionRequest(
name = self.name,
category = self.category,
dep_targets = self.dep_targets,
input_files = self.input_files,
output_files = self.output_files,
tool = self.tool,
args = self.args,
format_with = self.format_with,
repeat_with = self.repeat_with
)
else:
new_request = SingleExecutionRequest(
name = self.name,
category = self.category,
dep_targets = self.dep_targets,
input_files = self.input_files,
output_files = self.output_files,
tool = self.tool,
args = self.args,
format_with = utils.concat_dicts(self.format_with, self.repeat_with)
)
return new_request.flatten(config, all_requests, common_vars)
def _del_at(self, i):
super(RepeatedOrSingleExecutionRequest, self)._del_at(i)
del self.output_files[i]
for _, v in self.repeat_with.items():
if isinstance(v, list):
del v[i]
class PrintFileRequest(AbstractRequest):
def __init__(self, **kwargs):
self.output_file = None
self.content = None
super(PrintFileRequest, self).__init__(**kwargs)
def all_output_files(self):
return [self.output_file]
class CopyRequest(AbstractRequest):
def __init__(self, **kwargs):
self.input_file = None
self.output_file = None
super(CopyRequest, self).__init__(**kwargs)
def all_input_files(self):
return [self.input_file]
def all_output_files(self):
return [self.output_file]
class VariableRequest(AbstractRequest):
def __init__(self, **kwargs):
self.input_files = []
super(VariableRequest, self).__init__(**kwargs)
def all_input_files(self):
return self.input_files
class ListRequest(AbstractRequest):
def __init__(self, **kwargs):
self.variable_name = None
self.output_file = None
self.include_tmp = None
super(ListRequest, self).__init__(**kwargs)
def flatten(self, config, all_requests, common_vars):
list_files = list(sorted(utils.get_all_output_files(all_requests)))
if self.include_tmp:
variable_files = list(sorted(utils.get_all_output_files(all_requests, include_tmp=True)))
else:
# Always include the list file itself
variable_files = list_files + [self.output_file]
return PrintFileRequest(
name = self.name,
output_file = self.output_file,
content = "\n".join(file.filename for file in list_files)
).flatten(config, all_requests, common_vars) + VariableRequest(
name = self.variable_name,
input_files = variable_files
).flatten(config, all_requests, common_vars)
def all_output_files(self):
return [self.output_file]
class IndexTxtRequest(AbstractRequest):
def __init__(self, **kwargs):
self.input_files = []
self.output_file = None
self.cldr_version = ""
super(IndexTxtRequest, self).__init__(**kwargs)
def apply_file_filter(self, filter):
i = 0
while i < len(self.input_files):
if filter.match(self.input_files[i]):
i += 1
continue
del self.input_files[i]
return i > 0
def flatten(self, config, all_requests, common_vars):
return PrintFileRequest(
name = self.name,
output_file = self.output_file,
content = self._generate_index_file(common_vars)
).flatten(config, all_requests, common_vars)
def _generate_index_file(self, common_vars):
locales = [f.filename[f.filename.rfind("/")+1:-4] for f in self.input_files]
formatted_version = " CLDRVersion { \"%s\" }\n" % self.cldr_version if self.cldr_version else ""
formatted_locales = "\n".join([" %s {\"\"}" % v for v in locales])
# TODO: CLDRVersion is required only in the base file
return ("// Warning this file is automatically generated\n"
"{INDEX_NAME}:table(nofallback) {{\n"
"{FORMATTED_VERSION}"
" InstalledLocales {{\n"
"{FORMATTED_LOCALES}\n"
" }}\n"
"}}").format(
FORMATTED_VERSION = formatted_version,
FORMATTED_LOCALES = formatted_locales,
**common_vars
)
def all_input_files(self):
return self.input_files
def all_output_files(self):
return [self.output_file]

View file

@ -63,200 +63,17 @@ def format_repeated_request_command(request, cmd_template, loop_vars, common_var
)
def dep_targets_to_files(this_request, all_requests):
if not this_request.dep_files:
return []
dep_files = []
for dep_target in this_request.dep_files:
for request in all_requests:
if request.name == dep_target.name:
dep_files += get_output_files(request)
break
else:
print("Warning: Unable to find target %s, a dependency of %s" % (
dep_target.name,
this_request.name
), file=sys.stderr)
return dep_files
def flatten_requests(raw_requests, config, common_vars):
"""Post-processes "meta" requests into normal requests.
Affected classes:
- RepeatedOrSingleExecutionRequest becomes either
RepeatedExecutionRequest or SingleExecutionRequest
- ListRequest becomes PrintFileRequest and VariableRequest
- IndexTxtRequest becomes PrintFileRequest
"""
flattened_requests = []
for request in raw_requests:
if isinstance(request, RepeatedOrSingleExecutionRequest):
if config.max_parallel:
flattened_requests.append(RepeatedExecutionRequest(
name = request.name,
category = request.category,
dep_files = dep_targets_to_files(
request, raw_requests
),
input_files = request.input_files,
output_files = request.output_files,
tool = request.tool,
args = request.args,
format_with = request.format_with,
repeat_with = request.repeat_with
))
else:
flattened_requests.append(SingleExecutionRequest(
name = request.name,
category = request.category,
input_files = request.input_files + dep_targets_to_files(
request, raw_requests
),
output_files = request.output_files,
tool = request.tool,
args = request.args,
format_with = concat_dicts(request.format_with, request.repeat_with)
))
elif isinstance(request, SingleExecutionRequest):
flattened_requests += [
SingleExecutionRequest(
name = request.name,
category = request.category,
dep_files = dep_targets_to_files(
request, raw_requests
),
input_files = request.input_files,
output_files = request.output_files,
tool = request.tool,
args = request.args,
format_with = request.format_with
)
]
elif isinstance(request, RepeatedExecutionRequest):
flattened_requests += [
RepeatedExecutionRequest(
name = request.name,
category = request.category,
dep_files = dep_targets_to_files(
request, raw_requests
),
input_files = request.input_files,
output_files = request.output_files,
tool = request.tool,
args = request.args,
format_with = request.format_with,
repeat_with = request.repeat_with
)
]
elif isinstance(request, ListRequest):
list_files = list(sorted(get_all_output_files(raw_requests)))
if request.include_tmp:
variable_files = list(sorted(get_all_output_files(raw_requests, include_tmp=True)))
else:
# Always include the list file itself
variable_files = list_files + [request.output_file]
flattened_requests += [
PrintFileRequest(
name = request.name,
output_file = request.output_file,
content = "\n".join(file.filename for file in list_files)
),
VariableRequest(
name = request.variable_name,
input_files = variable_files
)
]
elif isinstance(request, IndexTxtRequest):
flattened_requests += [
PrintFileRequest(
name = request.name,
output_file = request.output_file,
content = generate_index_file(request.input_files, request.cldr_version, common_vars)
)
]
else:
flattened_requests.append(request)
return flattened_requests
def generate_index_file(input_files, cldr_version, common_vars):
locales = [f.filename[f.filename.rfind("/")+1:-4] for f in input_files]
formatted_version = " CLDRVersion { \"%s\" }\n" % cldr_version if cldr_version else ""
formatted_locales = "\n".join([" %s {\"\"}" % v for v in locales])
# TODO: CLDRVersion is required only in the base file
return ("// Warning this file is automatically generated\n"
"{INDEX_NAME}:table(nofallback) {{\n"
"{FORMATTED_VERSION}"
" InstalledLocales {{\n"
"{FORMATTED_LOCALES}\n"
" }}\n"
"}}").format(
FORMATTED_VERSION = formatted_version,
FORMATTED_LOCALES = formatted_locales,
**common_vars
)
def get_input_files(request):
if isinstance(request, SingleExecutionRequest):
return request.dep_files + request.input_files
elif isinstance(request, RepeatedExecutionRequest):
return request.dep_files + request.input_files
elif isinstance(request, RepeatedOrSingleExecutionRequest):
return request.dep_files + request.input_files
elif isinstance(request, PrintFileRequest):
return []
elif isinstance(request, CopyRequest):
return [request.input_file]
elif isinstance(request, VariableRequest):
return []
elif isinstance(request, ListRequest):
return []
elif isinstance(request, IndexTxtRequest):
return request.input_files
else:
assert False
def get_output_files(request):
if isinstance(request, SingleExecutionRequest):
return request.output_files
elif isinstance(request, RepeatedExecutionRequest):
return request.output_files
elif isinstance(request, RepeatedOrSingleExecutionRequest):
return request.output_files
elif isinstance(request, PrintFileRequest):
return [request.output_file]
elif isinstance(request, CopyRequest):
return [request.output_file]
elif isinstance(request, VariableRequest):
return []
elif isinstance(request, ListRequest):
return [request.output_file]
elif isinstance(request, IndexTxtRequest):
return [request.output_file]
else:
assert False
def get_category(request):
if isinstance(request, SingleExecutionRequest):
return request.category
elif isinstance(request, RepeatedExecutionRequest):
return request.category
elif isinstance(request, RepeatedOrSingleExecutionRequest):
return request.category
elif isinstance(request, IndexTxtRequest):
return request.category
else:
return None
def flatten_requests(requests, config, common_vars):
result = []
for request in requests:
result += request.flatten(config, requests, common_vars)
return result
def get_all_output_files(requests, include_tmp=False):
files = []
for request in requests:
files += get_output_files(request)
files += request.all_output_files()
# Filter out all files but those in OUT_DIR if necessary.
# It is also easy to filter for uniqueness; do it right now and return.

View file

@ -4,6 +4,7 @@
from distutils.sysconfig import parse_makefile
from buildtool import *
from buildtool.request_types import *
def generate(config, glob, common_vars):