xref: /aosp_15_r20/external/bazelbuild-rules_android/rules/processing_pipeline.bzl (revision 9e965d6fece27a77de5377433c2f7e6999b8cc0b)
1# Copyright 2020 The Bazel Authors. All rights reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#    http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Common implementation for processing pipelines."""
16
17PROVIDERS = "providers"
18VALIDATION_OUTPUTS = "validation_outputs"
19
20# TODO(djwhang): When a provider type can be retrieved from a Starlark provider
21# ProviderInfo is necessary. Once this is possible, processor methods can have a
22# uniform method signature foo(ctx, target_ctx) where we can pull the provider
23# off the target_ctx using the provider type.
24#
25# Yes, this effectively leads to producing a build rule like system within a
26# build rule, rather than resorting to rule based composition.
27ProviderInfo = provider(
28    "Stores metadata about the actual Starlark provider returned.",
29    fields = dict(
30        name = "The type of the provider",
31        value = "The actual provider",
32        runfiles = "Runfiles to pass to the DefaultInfo provider",
33    ),
34)
35
36_ProcessingPipelineInfo = provider(
37    "Stores functions that forms a rule's implementation.",
38    fields = dict(
39        processors = "Ordered dictionary of processing functions.",
40        finalize = "Function to form the final providers to propagate.",
41    ),
42)
43
44def _make_processing_pipeline(processors = dict(), finalize = None):
45    """Creates the combined processing pipeline.
46
47    Args:
48      processors: Ordered dictionary of processing functions.
49      finalize: Function to form the final providers to propagate.
50
51    Returns:
52      A _ProcessingPipelineInfo provider.
53    """
54    return _ProcessingPipelineInfo(
55        processors = processors,
56        finalize = finalize,
57    )
58
59def _run(ctx, java_package, processing_pipeline):
60    """Runs the processing pipeline and populates the target context.
61
62    Args:
63      ctx: The context.
64      java_package: The java package resolved from the target's path
65        or the custom_package attr.
66      processing_pipeline: The _ProcessingPipelineInfo provider for this target.
67
68    Returns:
69      The output of the _ProcessingPipelineInfo.finalize function.
70    """
71    target_ctx = dict(
72        java_package = java_package,
73        providers = [],
74        validation_outputs = [],
75        runfiles = ctx.runfiles(),
76    )
77
78    for execute in processing_pipeline.processors.values():
79        info = execute(ctx, **target_ctx)
80        if info:
81            if info.name in target_ctx:
82                fail("%s provider already registered in target context" % info.name)
83            target_ctx[info.name] = info.value
84            target_ctx[PROVIDERS].extend(getattr(info.value, PROVIDERS, []))
85            target_ctx[VALIDATION_OUTPUTS].extend(getattr(info.value, VALIDATION_OUTPUTS, []))
86            if hasattr(info, "runfiles") and info.runfiles:
87                target_ctx["runfiles"] = target_ctx["runfiles"].merge(info.runfiles)
88
89    return processing_pipeline.finalize(ctx, **target_ctx)
90
91def _prepend(processors, **new_processors):
92    """Prepends processors in a given processing pipeline.
93
94    Args:
95      processors: The dictionary representing the processing pipeline.
96      **new_processors: The processors to add where the key represents the
97        name of the processor and value is the function pointer to the new
98        processor.
99
100    Returns:
101      A dictionary which represents the new processing pipeline.
102    """
103    updated_processors = dict()
104    for name, processor in new_processors.items():
105        updated_processors[name] = processor
106
107    for key in processors.keys():
108        updated_processors[key] = processors[key]
109
110    return updated_processors
111
112def _append(processors, **new_processors):
113    """Appends processors in a given processing pipeline.
114
115    Args:
116      processors: The dictionary representing the processing pipeline.
117      **new_processors: The processors to append where the key represents the
118        name of the processor and value is the function pointer to the new
119        processor.
120
121    Returns:
122      A dictionary which represents the new processing pipeline.
123    """
124    updated_processors = dict(processors)
125    for name, processor in new_processors.items():
126        updated_processors[name] = processor
127
128    return updated_processors
129
130def _replace(processors, **new_processors):
131    """Replace processors in a given processing pipeline.
132
133    Args:
134      processors: The dictionary representing the processing pipeline.
135      **new_processors: The processors to override where the key represents the
136        name of the processor and value is the function pointer to the new
137        processor.
138
139    Returns:
140      A dictionary which represents the new processing pipeline.
141    """
142    updated_processors = dict(processors)
143    for name, processor in new_processors.items():
144        if name not in processors:
145            fail("Error, %s not found, unable to override." % name)
146
147        # NOTE: Overwriting an existing value does not break iteration order.
148        # However, if a new processor is being added that needs to be injected
149        # between other processors, the processing pipeline dictionary will need
150        # to be recreated.
151        updated_processors[name] = processor
152
153    return updated_processors
154
155processing_pipeline = struct(
156    make_processing_pipeline = _make_processing_pipeline,
157    run = _run,
158    prepend = _prepend,
159    append = _append,
160    replace = _replace,
161)
162