Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
e3b0080
Add a bunch of tests for typechecker behaviour
cormacrelf Nov 14, 2025
32c7802
Follow users' type annotations
cormacrelf Nov 14, 2025
4e1a4f2
Add a list of binding errors to Bindings
cormacrelf Nov 19, 2025
8044ab1
Disallow type annotations anywhere other than first declaration
cormacrelf Nov 16, 2025
f18f0a9
New parse error for `x[0]: int = ...`, `x.y: int = ...`
cormacrelf Nov 15, 2025
b007c5c
Remove vestigial option wrapping in BindingsCollect::visit_def
cormacrelf Nov 15, 2025
103e524
Add DefP::visit_header
cormacrelf Nov 19, 2025
4f5213b
Add TypingOrInternalError::into_eval_exception
cormacrelf Nov 19, 2025
ec7aa4a
Add TypingOrInternalError::into_error
cormacrelf Nov 17, 2025
1343650
Type check every function and top-level scope separately
cormacrelf Nov 16, 2025
cd8861d
Use recursive function type checker in the compiler
cormacrelf Nov 16, 2025
9314cb5
Use recursive function type checker in AstModule's typechecker too
cormacrelf Nov 17, 2025
839483c
Delete dead code post recursive function type checker
cormacrelf Nov 17, 2025
8900d7a
Add test for complex binding type checking
cormacrelf Nov 17, 2025
024103d
Type-check `x: str = ""; x, _ = [1, 2]` and other complex bindings
cormacrelf Nov 17, 2025
5bfc249
Implement has_attr for NativeFunction
cormacrelf Nov 17, 2025
d7bf850
Plumb through starlark(as_type) to the typechecker
cormacrelf Nov 17, 2025
b949c37
Add a type for a test
cormacrelf Nov 17, 2025
4fdb3f2
Fix TyUser not forwarding bin_op
cormacrelf Nov 12, 2025
b17fffd
Return the actual callable for user types
cormacrelf Nov 6, 2025
55ba4c3
impl StarlarkValue::typechecker_ty for UserProviderGen
cormacrelf Nov 10, 2025
df6a9ef
Fix types in prelude for new, stricter typechecker behaviour
cormacrelf Nov 17, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ pub(crate) struct UserProviderCallableData {
/// Type id of provider callable instance.
pub(crate) ty_provider_type_instance_id: TypeInstanceId,
pub(crate) fields: IndexMap<String, UserProviderField, StarlarkHasherSmallPromoteBuilder>,
pub(crate) ty_provider: Ty,
}

/// Initialized after the name is assigned to the provider.
Expand Down Expand Up @@ -387,6 +388,7 @@ impl<'v> StarlarkValue<'v> for UserProviderCallable {
provider_id,
fields: self.fields.clone(),
ty_provider_type_instance_id,
ty_provider: ty_provider.dupe(),
}),
ty_provider,
ty_callable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,10 @@ where
fn provide(&'v self, demand: &mut Demand<'_, 'v>) {
demand.provide_value::<&dyn ProviderLike>(self);
}

fn typechecker_ty(&self) -> Option<Ty> {
Some(self.callable.ty_provider.dupe())
}
}

impl<'v, V: ValueLike<'v>> serde::Serialize for UserProviderGen<'v, V> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@ use buck2_interpreter_for_build::interpreter::testing::Tester;
fn test_attr_display() -> buck2_error::Result<()> {
let mut tester = Tester::new().unwrap();
tester.run_starlark_bzl_test(r#"
def assert_eq(a, b):
if a != b:
fail(a + " != " + b)

assert_eq(repr(attrs.bool(default = True)), "attrs.bool(default=True)")
assert_eq(repr(attrs.string()), "attrs.string()")
assert_eq(repr(attrs.list(attrs.string())), "attrs.list(attrs.string())")
Expand Down
6 changes: 3 additions & 3 deletions prelude/android/android_binary_native_library_rules.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -232,13 +232,13 @@ def get_android_binary_native_library_info(
if native_library_merge_non_asset_libs:
mergemap_cmd.add(cmd_args("--merge-non-asset-libs"))
native_library_merge_dir = ctx.actions.declare_output("merge_sequence_output")
native_library_merge_map = native_library_merge_dir.project("merge.map")
native_library_merge_map_file = native_library_merge_dir.project("merge.map")
split_groups_map = native_library_merge_dir.project("split_groups.map")
mergemap_cmd.add(cmd_args(native_library_merge_dir.as_output(), format = "--output={}"))
ctx.actions.run(mergemap_cmd, category = "compute_mergemap", allow_cache_upload = True)
enhance_ctx.debug_output("compute_merge_sequence", native_library_merge_dir)

dynamic_inputs.append(native_library_merge_map)
dynamic_inputs.append(native_library_merge_map_file)
dynamic_inputs.append(split_groups_map)

mergemap_gencode_jar = None
Expand Down Expand Up @@ -268,7 +268,7 @@ def get_android_binary_native_library_info(
# When changing this dynamic_output, the workflow is a lot better if you compute the module graph once and
# then set it as the binary's precomputed_apk_module_graph attr.
if native_library_merge_sequence:
merge_map_by_platform = artifacts[native_library_merge_map].read_json()
merge_map_by_platform = artifacts[native_library_merge_map_file].read_json()
split_groups = artifacts[split_groups_map].read_json()
native_library_merge_debug_outputs["merge_sequence_output"] = native_library_merge_dir
elif native_library_merge_map:
Expand Down
8 changes: 5 additions & 3 deletions prelude/android/android_manifest.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,13 @@ def generate_android_manifest(
])

if not manifests:
manifests = []
manifests_args = []
elif isinstance(manifests, TransitiveSet):
manifests = manifests.project_as_args("artifacts", ordering = "bfs")
manifests_args = manifests.project_as_args("artifacts", ordering = "bfs")
else:
manifests_args = manifests

library_manifest_paths_file = argfile(actions = ctx.actions, name = "{}/library_manifest_paths_file".format(module_name), args = manifests)
library_manifest_paths_file = argfile(actions = ctx.actions, name = "{}/library_manifest_paths_file".format(module_name), args = manifests_args)

generate_manifest_cmd.add(["--library-manifests-list", library_manifest_paths_file])

Expand Down
4 changes: 2 additions & 2 deletions prelude/android/android_providers.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def merge_android_packageable_info(
AndroidBuildConfigInfoTSet,
)

deps = _get_transitive_set(
deps_tset = _get_transitive_set(
actions,
filter(None, [dep.deps for dep in android_packageable_deps]),
DepsInfo(
Expand Down Expand Up @@ -334,7 +334,7 @@ def merge_android_packageable_info(
return AndroidPackageableInfo(
target_label = label.raw_target(),
build_config_infos = build_config_infos,
deps = deps,
deps = deps_tset,
manifests = manifests,
prebuilt_native_library_dirs = prebuilt_native_library_dirs,
resource_infos = resource_infos,
Expand Down
10 changes: 3 additions & 7 deletions prelude/apple/apple_library.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -630,13 +630,9 @@ def _get_link_style_sub_targets_and_providers(
)

if get_apple_stripped_attr_value_with_default_fallback(ctx):
if False:
# TODO(nga): `output.unstripped` is never `None`.
def unknown():
pass

output = unknown()
expect(output.unstripped != None, "Expecting unstripped output to be non-null when stripping is enabled.")
# TODO(nga): `output.unstripped` is never `None`.
unstripped: None | typing.Any = output.unstripped
expect(unstripped != None, "Expecting unstripped output to be non-null when stripping is enabled.")
dsym_executable = output.unstripped
else:
dsym_executable = output.default
Expand Down
10 changes: 5 additions & 5 deletions prelude/apple/swift/swift_compilation.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -876,7 +876,7 @@ def _compile_with_argsfile(

allow_cache_upload, action_execution_attributes = _get_action_properties(ctx, toolchain, cacheable, build_swift_incrementally, explicit_modules_enabled)

argsfile, output_file_map = compile_with_argsfile(
argsfile, output_file_map_artifact = compile_with_argsfile(
ctx = ctx,
category = category,
shared_flags = shared_flags,
Expand All @@ -903,9 +903,9 @@ def _compile_with_argsfile(

if extension:
# Swift correctly handles relative paths and we can utilize the relative argsfile for Xcode.
return CompileArgsfiles(relative = {extension: argsfile}, xcode = {extension: argsfile}), output_file_map
return CompileArgsfiles(relative = {extension: argsfile}, xcode = {extension: argsfile}), output_file_map_artifact
else:
return None, output_file_map
return None, output_file_map_artifact

def _get_action_properties(
ctx: AnalysisContext,
Expand Down Expand Up @@ -1154,10 +1154,10 @@ def _add_swift_module_map_args(
cmd: cmd_args,
is_macro: bool):
module_name = get_module_name(ctx)
sdk_swiftmodule_deps_tset = [sdk_swiftmodule_deps_tset] if sdk_swiftmodule_deps_tset else []
sdk_swiftmodule_deps_tsets = [sdk_swiftmodule_deps_tset] if sdk_swiftmodule_deps_tset else []
all_deps_tset = ctx.actions.tset(
SwiftCompiledModuleTset,
children = _get_swift_paths_tsets(is_macro, ctx.attrs.deps + getattr(ctx.attrs, "exported_deps", [])) + [pcm_deps_tset, sdk_deps_tset] + sdk_swiftmodule_deps_tset,
children = _get_swift_paths_tsets(is_macro, ctx.attrs.deps + getattr(ctx.attrs, "exported_deps", [])) + [pcm_deps_tset, sdk_deps_tset] + sdk_swiftmodule_deps_tsets,
)
swift_module_map_artifact = write_swift_module_map_with_deps(
ctx,
Expand Down
8 changes: 4 additions & 4 deletions prelude/artifact_tset.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def make_artifact_tset(
)

# As a convenience for our callers, filter our `None` children.
children = [c._tset for c in children if c._tset != None]
children_ = [c._tset for c in children if c._tset != None]

# Build list of all non-child values.
values = []
Expand All @@ -69,15 +69,15 @@ def make_artifact_tset(
values.extend(infos)

# If there's no children or artifacts, return `None`.
if not values and not children:
if not values and not children_:
return ArtifactTSet()

# We only build a `_ArtifactTSet` if there's something to package.
kwargs = {}
if values:
kwargs["value"] = values
if children:
kwargs["children"] = children
if children_:
kwargs["children"] = children_
return ArtifactTSet(
_tset = actions.tset(_ArtifactTSet, **kwargs),
)
Expand Down
10 changes: 5 additions & 5 deletions prelude/cfg/modifier/cfg_constructor.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,15 @@ def cfg_constructor_pre_constraint_analysis(
Returns `(refs, PostConstraintAnalysisParams)`, where `refs` is a list of fully qualified configuration
targets we need providers for.
"""
package_modifiers = package_modifiers or []
package_modifiers_1 = package_modifiers or []
target_modifiers = target_modifiers or []

# Convert JSONs back to TaggedModifiers
package_modifiers = [json_to_tagged_modifiers(modifier_json) for modifier_json in package_modifiers]
tagged_package_modifiers: list[TaggedModifiers] = [json_to_tagged_modifiers(modifier_json) for modifier_json in package_modifiers_1]

# Filter PACKAGE modifiers based on rule name.
# This only filters out PACKAGE modifiers from `extra_cfg_modifiers_per_rule` argument of `set_cfg_modifiers` function.
package_modifiers = [tagged_modifiers for tagged_modifiers in package_modifiers if tagged_modifiers.rule_name == None or tagged_modifiers.rule_name == rule_name]
tagged_package_modifiers = [tagged_modifiers for tagged_modifiers in tagged_package_modifiers if tagged_modifiers.rule_name == None or tagged_modifiers.rule_name == rule_name]

# Resolve all aliases in CLI modifiers
cli_modifiers = [resolved_modifier for modifier in cli_modifiers for resolved_modifier in resolve_alias(modifier, aliases)]
Expand All @@ -85,7 +85,7 @@ def cfg_constructor_pre_constraint_analysis(
if buckconfig_backed_modifiers:
refs.append(buckconfig_backed_modifiers)

for tagged_modifiers in package_modifiers:
for tagged_modifiers in tagged_package_modifiers:
for modifier in tagged_modifiers.modifiers:
refs.extend(modifier_to_refs(modifier, tagged_modifiers.location))
for modifier in target_modifiers:
Expand All @@ -95,7 +95,7 @@ def cfg_constructor_pre_constraint_analysis(

return refs, PostConstraintAnalysisParams(
legacy_platform = legacy_platform,
package_modifiers = package_modifiers,
package_modifiers = tagged_package_modifiers,
target_modifiers = target_modifiers,
cli_modifiers = cli_modifiers,
extra_data = extra_data,
Expand Down
2 changes: 1 addition & 1 deletion prelude/cxx/cxx_library.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -1434,7 +1434,7 @@ def _form_library_outputs(
outputs = {}
solibs = {}
link_infos = {}
providers = []
providers: list[Provider] = []
sanitizer_runtime_files = []
gcno_files = []

Expand Down
4 changes: 2 additions & 2 deletions prelude/cxx/groups.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -183,11 +183,11 @@ def _parse_filter(entry: str) -> GroupFilterInfo:
if target_regex != None:
regex_expr = regex("^{}$".format(target_regex), fancy = True)

def matches_regex(_r, t, _labels):
def matches_regex2(_r, t, _labels):
return regex_expr.match(str(t.raw_target()))

return GroupFilterInfo(
matches = matches_regex,
matches = matches_regex2,
info = {"target_regex": str(regex_expr)},
)

Expand Down
11 changes: 7 additions & 4 deletions prelude/cxx/link.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,10 @@ def _anon_cxx_link(
if generates_split_debug(cxx_toolchain):
split_debug_output = anon_link_target.artifact("split_debug_output")

output = ctx.actions.assert_short_path(anon_link_target.artifact("output"), short_path = output)
output_artifact = ctx.actions.assert_short_path(
anon_link_target.artifact("output"),
short_path = output,
)

external_debug_info = link_external_debug_info(
ctx = ctx,
Expand All @@ -541,12 +544,12 @@ def _anon_cxx_link(

# The anon target API doesn't allow us to return the list of artifacts for
# sanitizer runtime, so it has be computed here
sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain, output)
sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain, output_artifact)

return CxxLinkResult(
linked_object = LinkedObject(
output = output,
unstripped_output = output,
output = output_artifact,
unstripped_output = output_artifact,
dwp = dwp,
external_debug_info = external_debug_info,
),
Expand Down
6 changes: 3 additions & 3 deletions prelude/cxx/shared_library_interface.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ def _shared_library_interface(
linker_info = get_cxx_toolchain_info(ctx).linker_info
args = cmd_args(linker_info.mk_shlib_intf[RunInfo])
args.add(shared_lib)
output = ctx.actions.declare_output(output)
args.add(output.as_output())
output_artifact = ctx.actions.declare_output(output)
args.add(output_artifact.as_output())
ctx.actions.run(
args,
category = "generate_shared_library_interface",
identifier = identifier,
)
return output
return output_artifact

_InterfaceInfo = provider(fields = {
"artifact": provider_field(typing.Any, default = None), # "artifact"
Expand Down
4 changes: 2 additions & 2 deletions prelude/erlang/erlang_application.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -317,8 +317,8 @@ def _link_src_dir(ctx: AnalysisContext, *, extra_srcs: list[Artifact]) -> Artifa
if ctx.attrs.app_src:
srcs[ctx.attrs.app_src.basename] = ctx.attrs.app_src

for extra_srcs in extra_srcs:
srcs[extra_srcs.basename] = extra_srcs
for extra_src in extra_srcs:
srcs[extra_src.basename] = extra_src

return ctx.actions.symlinked_dir(paths.join(erlang_build.utils.BUILD_DIR, "src"), srcs)

Expand Down
7 changes: 4 additions & 3 deletions prelude/http_archive/unarchive.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -215,17 +215,18 @@ def unarchive(
if needs_strip_prefix:
ctx.actions.copy_dir(output.as_output(), script_output.project(strip_prefix), has_content_based_path = has_content_based_path)

sub_targets_dict: dict[str, list[Provider]] = {}
if type(sub_targets) == type([]):
sub_targets = {
sub_targets_dict = {
path: [DefaultInfo(default_output = output.project(path))]
for path in sub_targets
}
elif type(sub_targets) == type({}):
sub_targets = {
sub_targets_dict = {
name: [DefaultInfo(default_outputs = [output.project(path) for path in paths])]
for name, paths in sub_targets.items()
}
else:
fail("sub_targets must be a list or dict")

return output, sub_targets
return output, sub_targets_dict
8 changes: 4 additions & 4 deletions prelude/jvm/cd_jar_creator_util.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -119,17 +119,17 @@ def define_output_paths(actions: AnalysisActions, prefix: [str, None], label: La
)

def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetType) -> struct:
paths = struct(
paths_value = struct(
classesDir = paths.classes.as_output(),
outputJarDirPath = cmd_args(paths.jar.as_output(), parent = 1),
annotationPath = paths.annotations.as_output(),
outputJarPath = paths.jar.as_output(),
)

return struct(
libraryPaths = paths if target_type == TargetType("library") else None,
sourceAbiPaths = paths if target_type == TargetType("source_abi") else None,
sourceOnlyAbiPaths = paths if target_type == TargetType("source_only_abi") else None,
libraryPaths = paths_value if target_type == TargetType("library") else None,
sourceAbiPaths = paths_value if target_type == TargetType("source_abi") else None,
sourceOnlyAbiPaths = paths_value if target_type == TargetType("source_only_abi") else None,
libraryTargetFullyQualifiedName = base_qualified_name(label),
)

Expand Down
6 changes: 3 additions & 3 deletions prelude/linking/link_info.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -1039,9 +1039,9 @@ LinkCommandDebugOutput = record(

# NB: Debug output is _not_ transitive over deps, so tsets are not used here.
LinkCommandDebugOutputInfo = provider(
fields = [
"debug_outputs", # ["LinkCommandDebugOutput"]
],
fields = {
"debug_outputs": provider_field(list[LinkCommandDebugOutput]),
},
)

UnstrippedLinkOutputInfo = provider(fields = {
Expand Down
10 changes: 5 additions & 5 deletions prelude/linking/linkable_graph.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -190,15 +190,15 @@ def create_linkable_node(
)
if not linker_flags:
linker_flags = LinkerFlags()
deps = linkable_deps(deps)
exported_deps = linkable_deps(exported_deps)
ldeps = linkable_deps(deps)
lexported_deps = linkable_deps(exported_deps)
return LinkableNode(
labels = ctx.attrs.labels,
preferred_linkage = preferred_linkage,
default_link_strategy = default_link_strategy,
deps = deps,
exported_deps = exported_deps,
all_deps = deps + exported_deps,
deps = ldeps,
exported_deps = lexported_deps,
all_deps = ldeps + lexported_deps,
link_infos = link_infos,
shared_libs = shared_libs,
can_be_asset = can_be_asset,
Expand Down
Loading