|
4 | 4 | # This source code is licensed under the BSD-style license found in the |
5 | 5 | # LICENSE file in the root directory of this source tree. |
6 | 6 |
|
| 7 | +import tempfile |
7 | 8 | import unittest |
8 | 9 |
|
9 | 10 | import executorch.exir as exir |
|
41 | 42 | prepare_fx, |
42 | 43 | ) |
43 | 44 |
|
| 45 | +from typing import Tuple |
| 46 | + |
| 47 | +import torch |
| 48 | +from executorch import exir |
| 49 | +from executorch.backends.xnnpack.partition.xnnpack_partitioner import ( |
| 50 | + XnnpackFloatingPointPartitioner, |
| 51 | +) |
| 52 | +from executorch.exir import ( |
| 53 | + EdgeCompileConfig, |
| 54 | + EdgeProgramManager, |
| 55 | + to_edge_transform_and_lower, |
| 56 | +) |
44 | 57 |
|
45 | 58 | class TestXnnQnnBackends(unittest.TestCase): |
46 | 59 | def test_add_xnnpack_and_dqlinear_qnn(self): |
@@ -132,3 +145,42 @@ def forward(self, x, y): |
132 | 145 | self.assertTrue( |
133 | 146 | torch.allclose(model_output[0], ref_output, atol=1e-03, rtol=1e-03) |
134 | 147 | ) |
| 148 | + |
| 149 | + def test_serde(self): |
| 150 | + # The module with blank_logprobs() function |
| 151 | + class BlankLogProbsModule(torch.nn.Module): |
| 152 | + def __init__(self) -> None: |
| 153 | + super().__init__() |
| 154 | + self.linear = torch.nn.Linear(768, 1) |
| 155 | + self.log_sigmoid = torch.nn.LogSigmoid() |
| 156 | + |
| 157 | + def forward(self, joint_encodings: torch.Tensor) -> torch.Tensor: |
| 158 | + tanh_out = torch.tanh(joint_encodings) |
| 159 | + linear_out = self.linear(tanh_out) |
| 160 | + blank_output = self.log_sigmoid(linear_out) |
| 161 | + return blank_output |
| 162 | + |
| 163 | + def get_blank_logprobs_inputs_fn() -> Tuple[torch.Tensor, ...]: |
| 164 | + """ |
| 165 | + Get the input to the blank_logprobs() and nonblank_logprobs() functions. |
| 166 | + """ |
| 167 | + return (torch.randn(1, 1, 1, 768),) |
| 168 | + |
| 169 | + model = BlankLogProbsModule() |
| 170 | + # Get the inputs for the logprobs function |
| 171 | + logprobs_fake_inputs = get_blank_logprobs_inputs_fn() |
| 172 | + |
| 173 | + # Export and partition |
| 174 | + aten_prog = torch.export.export(model, logprobs_fake_inputs, strict=True) |
| 175 | + partitioned_prog: EdgeProgramManager = to_edge_transform_and_lower( |
| 176 | + aten_prog, |
| 177 | + partitioner=[XnnpackFloatingPointPartitioner()], |
| 178 | + compile_config=EdgeCompileConfig(_check_ir_validity=False, _use_edge_ops=True), |
| 179 | + ) |
| 180 | + |
| 181 | + with tempfile.NamedTemporaryFile(suffix=".pt2") as f: |
| 182 | + exir.save(partitioned_prog.exported_program(), f.name) |
| 183 | + f.seek(0) |
| 184 | + loaded_model = exir.load(f.name) |
| 185 | + |
| 186 | + self.assertTrue(torch.allclose(model(*logprobs_fake_inputs), loaded_model.module()(*logprobs_fake_inputs))) |
0 commit comments