xref: /aosp_15_r20/external/pytorch/test/test_subclass.py (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1*da0073e9SAndroid Build Coastguard Worker# Owner(s): ["module: nn"]
2*da0073e9SAndroid Build Coastguard Worker
3*da0073e9SAndroid Build Coastguard Workerimport tempfile
4*da0073e9SAndroid Build Coastguard Workerfrom copy import deepcopy
5*da0073e9SAndroid Build Coastguard Workerfrom functools import partial
6*da0073e9SAndroid Build Coastguard Workerfrom unittest import expectedFailure
7*da0073e9SAndroid Build Coastguard Worker
8*da0073e9SAndroid Build Coastguard Workerimport torch
9*da0073e9SAndroid Build Coastguard Workerfrom torch import nn
10*da0073e9SAndroid Build Coastguard Workerfrom torch.nn.modules.lazy import LazyModuleMixin
11*da0073e9SAndroid Build Coastguard Workerfrom torch.nn.utils.parametrize import (
12*da0073e9SAndroid Build Coastguard Worker    register_parametrization,
13*da0073e9SAndroid Build Coastguard Worker    remove_parametrizations,
14*da0073e9SAndroid Build Coastguard Worker)
15*da0073e9SAndroid Build Coastguard Workerfrom torch.testing._internal.common_subclass import (
16*da0073e9SAndroid Build Coastguard Worker    DiagTensorBelow,
17*da0073e9SAndroid Build Coastguard Worker    subclass_db,
18*da0073e9SAndroid Build Coastguard Worker)
19*da0073e9SAndroid Build Coastguard Workerfrom torch.testing._internal.common_utils import (
20*da0073e9SAndroid Build Coastguard Worker    TestCase,
21*da0073e9SAndroid Build Coastguard Worker    instantiate_parametrized_tests,
22*da0073e9SAndroid Build Coastguard Worker    parametrize,
23*da0073e9SAndroid Build Coastguard Worker    run_tests,
24*da0073e9SAndroid Build Coastguard Worker    skipIfTorchDynamo,
25*da0073e9SAndroid Build Coastguard Worker    subtest,
26*da0073e9SAndroid Build Coastguard Worker)
27*da0073e9SAndroid Build Coastguard Workerfrom torch.testing._internal.logging_tensor import LoggingTensor
28*da0073e9SAndroid Build Coastguard Workerfrom torch.utils._pytree import tree_map
29*da0073e9SAndroid Build Coastguard Worker
30*da0073e9SAndroid Build Coastguard Worker# The current test methodology in this file is to test a variety of real use cases
31*da0073e9SAndroid Build Coastguard Worker# with a set of fully-fledged tensor subclasses. In the future, this may change
32*da0073e9SAndroid Build Coastguard Worker# to more narrowly specify toy subclasses for each of the specific invariants under
33*da0073e9SAndroid Build Coastguard Worker# test, avoiding the need to maintain the set of fully-fledged tensor subclasses.
34*da0073e9SAndroid Build Coastguard Worker
35*da0073e9SAndroid Build Coastguard Worker
36*da0073e9SAndroid Build Coastguard Worker# Decorator for parametrizing tests across the various tensor classes.
37*da0073e9SAndroid Build Coastguard Workerparametrize_tensor_cls = parametrize("tensor_cls", [
38*da0073e9SAndroid Build Coastguard Worker    subtest(tensor_cls, name=info.name) for tensor_cls, info in subclass_db.items()])
39*da0073e9SAndroid Build Coastguard Worker
40*da0073e9SAndroid Build Coastguard Worker
41*da0073e9SAndroid Build Coastguard Workerclass TestSubclass(TestCase):
42*da0073e9SAndroid Build Coastguard Worker    def _create_tensor(self, tensor_cls):
43*da0073e9SAndroid Build Coastguard Worker        return subclass_db[tensor_cls].create_fn(3)
44*da0073e9SAndroid Build Coastguard Worker
45*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
46*da0073e9SAndroid Build Coastguard Worker    @parametrize("tensor_requires_grad", [False, True])
47*da0073e9SAndroid Build Coastguard Worker    def test_param_invariants(self, tensor_cls, tensor_requires_grad):
48*da0073e9SAndroid Build Coastguard Worker        x = self._create_tensor(tensor_cls).requires_grad_(tensor_requires_grad)
49*da0073e9SAndroid Build Coastguard Worker        param = nn.Parameter(x, requires_grad=(not tensor_requires_grad))
50*da0073e9SAndroid Build Coastguard Worker
51*da0073e9SAndroid Build Coastguard Worker        self.assertIsInstance(param, nn.Parameter)
52*da0073e9SAndroid Build Coastguard Worker        # Ensure requires_grad passed to Parameter's constructor takes precedence.
53*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(param.requires_grad, not tensor_requires_grad)
54*da0073e9SAndroid Build Coastguard Worker
55*da0073e9SAndroid Build Coastguard Worker        # Ensure original tensor is not mutated by Parameter construction.
56*da0073e9SAndroid Build Coastguard Worker        self.assertNotIsInstance(x, nn.Parameter)
57*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(x.requires_grad, tensor_requires_grad)
58*da0073e9SAndroid Build Coastguard Worker
59*da0073e9SAndroid Build Coastguard Worker        class UninitializedParam(nn.Parameter):
60*da0073e9SAndroid Build Coastguard Worker            pass
61*da0073e9SAndroid Build Coastguard Worker
62*da0073e9SAndroid Build Coastguard Worker        self.assertNotIsInstance(param, UninitializedParam)
63*da0073e9SAndroid Build Coastguard Worker
64*da0073e9SAndroid Build Coastguard Worker    @skipIfTorchDynamo()
65*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
66*da0073e9SAndroid Build Coastguard Worker    @parametrize("as_param", [False, True])
67*da0073e9SAndroid Build Coastguard Worker    def test_deepcopy(self, tensor_cls, as_param):
68*da0073e9SAndroid Build Coastguard Worker        x = self._create_tensor(tensor_cls)
69*da0073e9SAndroid Build Coastguard Worker        if as_param:
70*da0073e9SAndroid Build Coastguard Worker            x = nn.Parameter(x)
71*da0073e9SAndroid Build Coastguard Worker        x_copy = deepcopy(x)
72*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(x, x_copy)
73*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(x.__class__, x_copy.__class__)
74*da0073e9SAndroid Build Coastguard Worker        self.assertIsNot(x, x_copy)
75*da0073e9SAndroid Build Coastguard Worker        self.assertIsInstance(x_copy, tensor_cls)
76*da0073e9SAndroid Build Coastguard Worker        if as_param:
77*da0073e9SAndroid Build Coastguard Worker            # Deepcopy should preserve both custom type and "parameter-ness".
78*da0073e9SAndroid Build Coastguard Worker            self.assertIsInstance(x_copy, nn.Parameter)
79*da0073e9SAndroid Build Coastguard Worker
80*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
81*da0073e9SAndroid Build Coastguard Worker    @parametrize("as_param", [False, True])
82*da0073e9SAndroid Build Coastguard Worker    def test_serialization(self, tensor_cls, as_param):
83*da0073e9SAndroid Build Coastguard Worker        with tempfile.TemporaryFile() as f:
84*da0073e9SAndroid Build Coastguard Worker            x = self._create_tensor(tensor_cls)
85*da0073e9SAndroid Build Coastguard Worker            if as_param:
86*da0073e9SAndroid Build Coastguard Worker                x = nn.Parameter(x)
87*da0073e9SAndroid Build Coastguard Worker            torch.save(x, f)
88*da0073e9SAndroid Build Coastguard Worker            f.seek(0)
89*da0073e9SAndroid Build Coastguard Worker            with torch.serialization.safe_globals([tensor_cls]):
90*da0073e9SAndroid Build Coastguard Worker                x_loaded = torch.load(f)
91*da0073e9SAndroid Build Coastguard Worker
92*da0073e9SAndroid Build Coastguard Worker            self.assertEqual(x, x_loaded)
93*da0073e9SAndroid Build Coastguard Worker            self.assertIsNot(x, x_loaded)
94*da0073e9SAndroid Build Coastguard Worker            self.assertIsInstance(x_loaded, tensor_cls)
95*da0073e9SAndroid Build Coastguard Worker            if as_param:
96*da0073e9SAndroid Build Coastguard Worker                # Serialization should preserve both custom type and "parameter-ness".
97*da0073e9SAndroid Build Coastguard Worker                self.assertIsInstance(x_loaded, nn.Parameter)
98*da0073e9SAndroid Build Coastguard Worker
99*da0073e9SAndroid Build Coastguard Worker    @skipIfTorchDynamo("Visible only with functorch as functorch monkeypatches tensor str")
100*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
101*da0073e9SAndroid Build Coastguard Worker    @parametrize("as_param", [False, True])
102*da0073e9SAndroid Build Coastguard Worker    def test_repr(self, tensor_cls, as_param):
103*da0073e9SAndroid Build Coastguard Worker        x = self._create_tensor(tensor_cls)
104*da0073e9SAndroid Build Coastguard Worker        if as_param:
105*da0073e9SAndroid Build Coastguard Worker            x = nn.Parameter(x)
106*da0073e9SAndroid Build Coastguard Worker        str_repr = x.__repr__()
107*da0073e9SAndroid Build Coastguard Worker        if tensor_cls is not torch.Tensor:
108*da0073e9SAndroid Build Coastguard Worker            self.assertEqual(str_repr.count(f"{tensor_cls.__name__}("), 1)
109*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(str_repr.count("Parameter"), 1 if as_param else 0)
110*da0073e9SAndroid Build Coastguard Worker
111*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
112*da0073e9SAndroid Build Coastguard Worker    @parametrize("as_param", [False, True])
113*da0073e9SAndroid Build Coastguard Worker    def test_type_propagation(self, tensor_cls, as_param):
114*da0073e9SAndroid Build Coastguard Worker        x = self._create_tensor(tensor_cls)
115*da0073e9SAndroid Build Coastguard Worker        if as_param:
116*da0073e9SAndroid Build Coastguard Worker            x = nn.Parameter(x)
117*da0073e9SAndroid Build Coastguard Worker
118*da0073e9SAndroid Build Coastguard Worker        # Call the add operator to produce an output tensor.
119*da0073e9SAndroid Build Coastguard Worker        output = x + self._create_tensor(torch.Tensor)
120*da0073e9SAndroid Build Coastguard Worker
121*da0073e9SAndroid Build Coastguard Worker        # Custom type should be propagated across operations if closed under the op, but
122*da0073e9SAndroid Build Coastguard Worker        # "parameter-ness" should not be.
123*da0073e9SAndroid Build Coastguard Worker        if subclass_db[tensor_cls].closed_under_ops:
124*da0073e9SAndroid Build Coastguard Worker            self.assertIsInstance(output, tensor_cls)
125*da0073e9SAndroid Build Coastguard Worker        else:
126*da0073e9SAndroid Build Coastguard Worker            self.assertIsInstance(output, torch.Tensor)
127*da0073e9SAndroid Build Coastguard Worker        self.assertNotIsInstance(output, nn.Parameter)
128*da0073e9SAndroid Build Coastguard Worker
129*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
130*da0073e9SAndroid Build Coastguard Worker    def test_module_optimization(self, tensor_cls):
131*da0073e9SAndroid Build Coastguard Worker        create_fn = partial(self._create_tensor, tensor_cls)
132*da0073e9SAndroid Build Coastguard Worker
133*da0073e9SAndroid Build Coastguard Worker        class MyModule(nn.Module):
134*da0073e9SAndroid Build Coastguard Worker            def __init__(self) -> None:
135*da0073e9SAndroid Build Coastguard Worker                super().__init__()
136*da0073e9SAndroid Build Coastguard Worker                self.p1 = nn.Parameter(create_fn())
137*da0073e9SAndroid Build Coastguard Worker
138*da0073e9SAndroid Build Coastguard Worker                self.p_list = nn.ParameterList([create_fn() for _ in range(3)])
139*da0073e9SAndroid Build Coastguard Worker                self.p_list.append(create_fn())
140*da0073e9SAndroid Build Coastguard Worker
141*da0073e9SAndroid Build Coastguard Worker                self.p_dict = nn.ParameterDict({
142*da0073e9SAndroid Build Coastguard Worker                    'foo': create_fn(),
143*da0073e9SAndroid Build Coastguard Worker                    'bar': create_fn(),
144*da0073e9SAndroid Build Coastguard Worker                })
145*da0073e9SAndroid Build Coastguard Worker                self.p_dict['baz'] = create_fn()
146*da0073e9SAndroid Build Coastguard Worker
147*da0073e9SAndroid Build Coastguard Worker                with torch.no_grad():
148*da0073e9SAndroid Build Coastguard Worker                    nn.init.normal_(self.p1)
149*da0073e9SAndroid Build Coastguard Worker                    for p in self.p_list:
150*da0073e9SAndroid Build Coastguard Worker                        nn.init.uniform_(p)
151*da0073e9SAndroid Build Coastguard Worker                    for p in self.p_dict.values():
152*da0073e9SAndroid Build Coastguard Worker                        nn.init.uniform_(p)
153*da0073e9SAndroid Build Coastguard Worker
154*da0073e9SAndroid Build Coastguard Worker            def forward(self, x):
155*da0073e9SAndroid Build Coastguard Worker                out = self.p1 + x
156*da0073e9SAndroid Build Coastguard Worker                for p in self.p_list:
157*da0073e9SAndroid Build Coastguard Worker                    out = p + out
158*da0073e9SAndroid Build Coastguard Worker
159*da0073e9SAndroid Build Coastguard Worker                for v in self.p_dict.values():
160*da0073e9SAndroid Build Coastguard Worker                    out = v + out
161*da0073e9SAndroid Build Coastguard Worker
162*da0073e9SAndroid Build Coastguard Worker                return out
163*da0073e9SAndroid Build Coastguard Worker
164*da0073e9SAndroid Build Coastguard Worker        m = MyModule()
165*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(len(m.state_dict()), 8)
166*da0073e9SAndroid Build Coastguard Worker
167*da0073e9SAndroid Build Coastguard Worker        optimizer = torch.optim.SGD(m.parameters(), lr=0.1)
168*da0073e9SAndroid Build Coastguard Worker        m(create_fn()).sum().backward(torch.tensor(1))
169*da0073e9SAndroid Build Coastguard Worker        optimizer.step()
170*da0073e9SAndroid Build Coastguard Worker
171*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
172*da0073e9SAndroid Build Coastguard Worker    @parametrize("leave_parametrized", [False, True])
173*da0073e9SAndroid Build Coastguard Worker    def test_parametrization(self, tensor_cls, leave_parametrized):
174*da0073e9SAndroid Build Coastguard Worker        # TODO: Either implement set_() properly for these tensor subclasses or apply a
175*da0073e9SAndroid Build Coastguard Worker        # more general fix to avoid the need for special set_() handling. For now, skip
176*da0073e9SAndroid Build Coastguard Worker        # testing these as they're expected to fail.
177*da0073e9SAndroid Build Coastguard Worker        if tensor_cls in [LoggingTensor, DiagTensorBelow]:
178*da0073e9SAndroid Build Coastguard Worker            return
179*da0073e9SAndroid Build Coastguard Worker
180*da0073e9SAndroid Build Coastguard Worker        create_fn = partial(self._create_tensor, tensor_cls)
181*da0073e9SAndroid Build Coastguard Worker
182*da0073e9SAndroid Build Coastguard Worker        class MyModule(nn.Module):
183*da0073e9SAndroid Build Coastguard Worker            def __init__(self) -> None:
184*da0073e9SAndroid Build Coastguard Worker                super().__init__()
185*da0073e9SAndroid Build Coastguard Worker                self.weight = nn.Parameter(create_fn())
186*da0073e9SAndroid Build Coastguard Worker
187*da0073e9SAndroid Build Coastguard Worker            def forward(self, x):
188*da0073e9SAndroid Build Coastguard Worker                return self.weight + x
189*da0073e9SAndroid Build Coastguard Worker
190*da0073e9SAndroid Build Coastguard Worker        class MyParametrization(nn.Module):
191*da0073e9SAndroid Build Coastguard Worker            def forward(self, X):
192*da0073e9SAndroid Build Coastguard Worker                return -X
193*da0073e9SAndroid Build Coastguard Worker
194*da0073e9SAndroid Build Coastguard Worker        m = MyModule()
195*da0073e9SAndroid Build Coastguard Worker        self.assertEqual(len(m.state_dict()), 1)
196*da0073e9SAndroid Build Coastguard Worker        register_parametrization(m, 'weight', MyParametrization())
197*da0073e9SAndroid Build Coastguard Worker        self.assertIsInstance(m.weight, tensor_cls)
198*da0073e9SAndroid Build Coastguard Worker        output = m(self._create_tensor(torch.Tensor))
199*da0073e9SAndroid Build Coastguard Worker        self.assertIsInstance(output, tensor_cls)
200*da0073e9SAndroid Build Coastguard Worker        remove_parametrizations(m, 'weight', leave_parametrized=leave_parametrized)
201*da0073e9SAndroid Build Coastguard Worker
202*da0073e9SAndroid Build Coastguard Worker    # Lazy modules with custom tensors are not supported yet.
203*da0073e9SAndroid Build Coastguard Worker    @expectedFailure
204*da0073e9SAndroid Build Coastguard Worker    @parametrize_tensor_cls
205*da0073e9SAndroid Build Coastguard Worker    def test_lazy_module(self, tensor_cls):
206*da0073e9SAndroid Build Coastguard Worker        if tensor_cls is torch.Tensor:
207*da0073e9SAndroid Build Coastguard Worker            self.fail('dummy fail for base tensor until the test passes for subclasses')
208*da0073e9SAndroid Build Coastguard Worker
209*da0073e9SAndroid Build Coastguard Worker        class MyLazyModule(LazyModuleMixin, nn.Module):
210*da0073e9SAndroid Build Coastguard Worker            def __init__(self) -> None:
211*da0073e9SAndroid Build Coastguard Worker                super().__init__()
212*da0073e9SAndroid Build Coastguard Worker                self.param = nn.UninitializedParameter()
213*da0073e9SAndroid Build Coastguard Worker
214*da0073e9SAndroid Build Coastguard Worker            def initialize_parameters(self, input) -> None:  # type: ignore[override]
215*da0073e9SAndroid Build Coastguard Worker                if self.has_uninitialized_params():
216*da0073e9SAndroid Build Coastguard Worker                    with torch.no_grad():
217*da0073e9SAndroid Build Coastguard Worker                        self.param.materialize(input.shape)
218*da0073e9SAndroid Build Coastguard Worker                        nn.init.uniform_(self.param)
219*da0073e9SAndroid Build Coastguard Worker
220*da0073e9SAndroid Build Coastguard Worker            def forward(self, x):
221*da0073e9SAndroid Build Coastguard Worker                return self.param + x
222*da0073e9SAndroid Build Coastguard Worker
223*da0073e9SAndroid Build Coastguard Worker        m = MyLazyModule()
224*da0073e9SAndroid Build Coastguard Worker        self.assertTrue(m.has_uninitialized_params())
225*da0073e9SAndroid Build Coastguard Worker        output = m(self._create_tensor(tensor_cls))
226*da0073e9SAndroid Build Coastguard Worker        self.assertFalse(m.has_uninitialized_params())
227*da0073e9SAndroid Build Coastguard Worker        self.assertIsInstance(m.param, tensor_cls)
228*da0073e9SAndroid Build Coastguard Worker
229*da0073e9SAndroid Build Coastguard Worker    def test_non_rewrapping_torch_dispatch_subclass_as_parameter_throws_for_detach(self):
230*da0073e9SAndroid Build Coastguard Worker
231*da0073e9SAndroid Build Coastguard Worker        # Define a subclass that does not rewrap for any function in its __torch_dispatch__ impl.
232*da0073e9SAndroid Build Coastguard Worker        class NonRewrappingTensor(torch.Tensor):
233*da0073e9SAndroid Build Coastguard Worker            @staticmethod
234*da0073e9SAndroid Build Coastguard Worker            def __new__(
235*da0073e9SAndroid Build Coastguard Worker                cls, t: torch.Tensor
236*da0073e9SAndroid Build Coastguard Worker            ):
237*da0073e9SAndroid Build Coastguard Worker                r = super()._make_wrapper_subclass(
238*da0073e9SAndroid Build Coastguard Worker                    cls, t.shape, dtype=t.dtype, requires_grad=t.requires_grad, device=t.device)
239*da0073e9SAndroid Build Coastguard Worker                return r
240*da0073e9SAndroid Build Coastguard Worker
241*da0073e9SAndroid Build Coastguard Worker            def __init__(self, t) -> None:
242*da0073e9SAndroid Build Coastguard Worker                self.tensor: torch.Tensor = t
243*da0073e9SAndroid Build Coastguard Worker
244*da0073e9SAndroid Build Coastguard Worker            @classmethod
245*da0073e9SAndroid Build Coastguard Worker            def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
246*da0073e9SAndroid Build Coastguard Worker
247*da0073e9SAndroid Build Coastguard Worker                def unwrap(e) -> torch.Tensor:
248*da0073e9SAndroid Build Coastguard Worker                    if isinstance(e, NonRewrappingTensor):
249*da0073e9SAndroid Build Coastguard Worker                        t = e.tensor
250*da0073e9SAndroid Build Coastguard Worker                        return t
251*da0073e9SAndroid Build Coastguard Worker                    else:
252*da0073e9SAndroid Build Coastguard Worker                        return e
253*da0073e9SAndroid Build Coastguard Worker
254*da0073e9SAndroid Build Coastguard Worker                r = func(*tree_map(unwrap, args), **tree_map(unwrap, kwargs))
255*da0073e9SAndroid Build Coastguard Worker                # Return an unwrapped tensor no longer of original subclass type.
256*da0073e9SAndroid Build Coastguard Worker                return r
257*da0073e9SAndroid Build Coastguard Worker
258*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, r"requires that detach\(\) returns an instance of the same type"):
259*da0073e9SAndroid Build Coastguard Worker            param = nn.Parameter(NonRewrappingTensor(torch.randn(3)))
260*da0073e9SAndroid Build Coastguard Worker
261*da0073e9SAndroid Build Coastguard Worker    def test_tensor_subclass_storage_data_accesses_throw(self):
262*da0073e9SAndroid Build Coastguard Worker        from torch.testing._internal.logging_tensor import LoggingTensor
263*da0073e9SAndroid Build Coastguard Worker        x = torch.ones(2)
264*da0073e9SAndroid Build Coastguard Worker        x_log = LoggingTensor(x)
265*da0073e9SAndroid Build Coastguard Worker        # Accessing storage on a tensor subclass is valid
266*da0073e9SAndroid Build Coastguard Worker        storage = x_log.untyped_storage()
267*da0073e9SAndroid Build Coastguard Worker        # This includes accessing metadata on the storage
268*da0073e9SAndroid Build Coastguard Worker        sz = storage.size()
269*da0073e9SAndroid Build Coastguard Worker        # But storage methods that access data will throw
270*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, "on an invalid python storage"):
271*da0073e9SAndroid Build Coastguard Worker            storage.data_ptr()
272*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, "on an invalid python storage"):
273*da0073e9SAndroid Build Coastguard Worker            storage.resize_(0)
274*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, "on an invalid python storage"):
275*da0073e9SAndroid Build Coastguard Worker            storage.copy_(storage)
276*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, "on an invalid python storage"):
277*da0073e9SAndroid Build Coastguard Worker            storage.fill_(0)
278*da0073e9SAndroid Build Coastguard Worker        with self.assertRaisesRegex(RuntimeError, "on an invalid python storage"):
279*da0073e9SAndroid Build Coastguard Worker            storage._write_file("file")
280*da0073e9SAndroid Build Coastguard Worker
281*da0073e9SAndroid Build Coastguard Worker
282*da0073e9SAndroid Build Coastguard Workerinstantiate_parametrized_tests(TestSubclass)
283*da0073e9SAndroid Build Coastguard Worker
284*da0073e9SAndroid Build Coastguard Workerif __name__ == '__main__':
285*da0073e9SAndroid Build Coastguard Worker    run_tests()
286