Compare commits

...

2 Commits

Author SHA1 Message Date
7bbbd49976 Fix signals 2025-11-05 15:50:25 -08:00
ae8a9fa894 Type functions 2025-11-05 15:50:25 -08:00
4 changed files with 485 additions and 301 deletions

View File

@ -1991,7 +1991,7 @@ class BuiltinVariable(VariableTracker):
# If the object implements a __getitem__ method, iter(...) will call obj.__getitem__()
# with an integer argument starting at 0, until __getitem__ raises IndexError
ret = variables.UserFunctionVariable(
polyfills.builtins.iter_
polyfills.builtins.iter_ # type: ignore[arg-type]
).call_function(tx, [obj, *args], {})
if args:

File diff suppressed because it is too large Load Diff

View File

@ -586,7 +586,7 @@ class FilterVariable(IteratorVariable):
else:
res = self.fn.call_function(tx, [item], {})
pred_res = variables.UserFunctionVariable(
polyfills.predicate
polyfills.predicate # type: ignore[arg-type]
).call_function(tx, [res], {})
if pred_res.as_python_constant():
return item

View File

@ -472,7 +472,12 @@ class TorchCtxManagerClassVariable(BaseTorchVariable):
)
elif self.value is torch.nn.attention.sdpa_kernel.__wrapped__: # type: ignore[attr-defined]
name_to_arg_map = bind_args_cached(
self.value, tx, self.source, args, kwargs
# pyrefly: ignore[bad-argument-type]
self.value,
tx,
self.source,
args,
kwargs,
)
backends = name_to_arg_map["backends"].as_python_constant()
set_priority = name_to_arg_map["set_priority"].as_python_constant()
@ -1349,7 +1354,7 @@ class TorchInGraphFunctionVariable(BaseTorchVariable):
packed_input_vt = TupleVariable.build(
tx, (TupleVariable.build(tx, args), ConstDictVariable.build(tx, kwargs))
)
out_vt = variables.UserFunctionVariable(tree_flatten).call_function(
out_vt = variables.UserFunctionVariable(tree_flatten).call_function( # type: ignore[arg-type]
tx, [packed_input_vt], {}
)
assert isinstance(out_vt, TupleVariable) and len(out_vt.items) == 2