What precisely is required for isinstance
checks to succeed against data-Protocols
?
#1941
-
The following example fails at runtime: from typing import Protocol, runtime_checkable # python 3.13.2
from torch import nn # v2.6.0
@runtime_checkable
class HasWeight(Protocol):
weight: nn.Parameter
model = nn.Linear(3, 4)
assert hasattr(model, "weight") # ✅
assert isinstance(model.weight, nn.Parameter) # ✅
assert "weight" in model.__static_attributes__ # ✅
assert "weight" in model.__annotations__ # ✅
assert isinstance(model, HasWeight) # ❌ It seems the culprit is a combination of I am trying to replicate this from scratch, but even with some non-static indirection it still works, haven't figured out what exactly makes torch break the isinstance check here.from typing import Any, Protocol, runtime_checkable
@runtime_checkable
class Proto(Protocol):
param: float
class Impl:
param: float
def __init__(self) -> None:
self.register_parameter("param", 1.0)
def __setattr__(self, name: str, value: Any) -> None:
if isinstance(value, float):
self.register_parameter(name, value)
else:
super().__setattr__(name, value)
def register_parameter(self, name: str, value: float) -> None:
assert value >= 0.0, "Negative param not allowed"
super().__setattr__(name, value)
model = Impl()
assert "param" in model.__annotations__ # ✅
assert "param" not in model.__static_attributes__ # ✅
assert isinstance(model, Proto) # ✅ |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 1 reply
-
So, the issue seems to be that torch does not write an entry into |
Beta Was this translation helpful? Give feedback.
-
The implementation of runtime-checkable protocols indeed uses |
Beta Was this translation helpful? Give feedback.
So, the issue seems to be that torch does not write an entry into
__dict__
, but uses different dictionaries for buffers / parameters / sub-modules.