File tree Expand file tree Collapse file tree 10 files changed +145
-0
lines changed Expand file tree Collapse file tree 10 files changed +145
-0
lines changed Original file line number Diff line number Diff line change 5050 op_static_constant_pad ,
5151 op_static_resize_bilinear_2d ,
5252 op_sub ,
53+ op_tanh ,
5354 op_to_copy ,
5455)
Original file line number Diff line number Diff line change 1+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2+ # All rights reserved.
3+ #
4+ # This source code is licensed under the BSD-style license found in the
5+ # LICENSE file in the root directory of this source tree.
6+
7+ from typing import Dict
8+
9+ import torch
10+ from executorch .backends .xnnpack .operators .node_visitor import (
11+ NodeVisitor ,
12+ register_node_visitor ,
13+ )
14+ from executorch .backends .xnnpack .serialization .xnnpack_graph_schema import (
15+ XNNGraph ,
16+ XNNTanh ,
17+ XNode ,
18+ )
19+ from executorch .backends .xnnpack .utils .utils import get_input_node
20+
21+
22+ @register_node_visitor
23+ class TanhVisitor (NodeVisitor ):
24+ target = "aten.tanh.default"
25+
26+ def __init__ (self , * args ) -> None :
27+ super ().__init__ (* args )
28+
29+ def define_node (
30+ self ,
31+ node : torch .fx .Node ,
32+ xnn_graph : XNNGraph ,
33+ vals_to_ids : Dict [torch .fx .Node , int ],
34+ debug_handle : int ,
35+ ) -> None :
36+ self .define_nodes_tensor_inputs_outputs (node , xnn_graph , vals_to_ids )
37+
38+ # input
39+ input_id = vals_to_ids [get_input_node (node , 0 )]
40+
41+ # output
42+ output_id = vals_to_ids [node ]
43+
44+ ser_node = XNode (
45+ xnode_union = XNNTanh (
46+ input_id = input_id ,
47+ output_id = output_id ,
48+ flags = 0 ,
49+ ),
50+ debug_handle = debug_handle ,
51+ )
52+ xnn_graph .xnodes .append (ser_node )
Original file line number Diff line number Diff line change 4949 SoftmaxConfig ,
5050 SquareRootConfig ,
5151 SubConfig ,
52+ TanhConfig ,
5253 UpsampleBilinear2dConfig ,
5354)
5455from executorch .backends .xnnpack .partition .config .node_configs import (
99100 PreluConfig ,
100101 ReciprocalSquareRootConfig ,
101102 ReLUConfig ,
103+ TanhConfig ,
102104 # SDPAConfig, TODO: D60553559: preserving SDPA for fairseq fails
103105 SigmoidConfig ,
104106 SliceCopyConfig ,
Original file line number Diff line number Diff line change @@ -371,6 +371,13 @@ def supported_precision_types(self) -> List[ConfigPrecisionType]:
371371 return [ConfigPrecisionType .FP32 ]
372372
373373
374+ class TanhConfig (GenericNodePartitionerConfig ):
375+ target_name = "tanh.default"
376+
377+ def supported_precision_types (self ) -> List [ConfigPrecisionType ]:
378+ return [ConfigPrecisionType .FP32 ]
379+
380+
374381class MeanDimConfig (GenericNodePartitionerConfig ):
375382 target_name = "mean.dim"
376383
Original file line number Diff line number Diff line change 6666 exir_ops .edge .aten .rsqrt .default ,
6767 exir_ops .edge .aten .log .default ,
6868 exir_ops .edge .aten .gelu .default ,
69+ exir_ops .edge .aten .tanh .default ,
6970]
7071
7172SUPPORTED_MODULES = [
Original file line number Diff line number Diff line change @@ -1513,6 +1513,36 @@ Error defineGeluNode(
15131513 return Error::Ok;
15141514}
15151515
1516+ /*
1517+ Define serialized tanh node into the subgraph, using the remapped ids
1518+ to map the serialized ids, to the new ids generated when defining the
1519+ tensor value
1520+ */
1521+ Error defineTanhNode (
1522+ xnn_subgraph_t subgraph_ptr,
1523+ const std::unordered_map<uint32_t , uint32_t >& remapped_ids,
1524+ const NodePtr node,
1525+ const fb_xnnpack::XNNGraph* graph) noexcept {
1526+ MAYBE_UNUSED (graph);
1527+
1528+ auto graph_node = node->xnode_union_as_XNNTanh ();
1529+
1530+ xnn_status status = xnn_define_tanh (
1531+ subgraph_ptr,
1532+ remapped_ids.at (graph_node->input_id ()),
1533+ remapped_ids.at (graph_node->output_id ()),
1534+ graph_node->flags ());
1535+
1536+ ET_CHECK_OR_RETURN_ERROR (
1537+ status == xnn_status_success,
1538+ Internal,
1539+ " Failed to create tanh node %i with code: %s" ,
1540+ node->debug_handle (),
1541+ xnn_status_to_string (status));
1542+
1543+ return Error::Ok;
1544+ }
1545+
15161546/*
15171547Define serialized ceiling node into the subgraph, using the remapped ids
15181548to map the serialized ids, to the new ids generated when defining the
@@ -2078,6 +2108,7 @@ DefineNodeFunc getDefineNodeFunc(fb_xnnpack::XNodeUnion nodeType) {
20782108 _DEFINE (Hardswish)
20792109 _DEFINE (LeakyReLU)
20802110 _DEFINE (Log)
2111+ _DEFINE (Tanh)
20812112 _DEFINE (Maximum)
20822113 _DEFINE (Negate)
20832114 _DEFINE (Square)
Original file line number Diff line number Diff line change @@ -145,6 +145,7 @@ union XNodeUnion {
145145 XNNReciprocalSquareRoot: _XNNNode1x1,
146146 XNNLog: _XNNNode1x1,
147147 XNNGelu: _XNNNode1x1,
148+ XNNTanh: _XNNNode1x1,
148149}
149150
150151union XValueUnion {
Original file line number Diff line number Diff line change @@ -141,6 +141,7 @@ union XNodeUnion {
141141 XNNReciprocalSquareRoot: _XNNNode1x1,
142142 XNNLog: _XNNNode1x1,
143143 XNNGelu: _XNNNode1x1,
144+ XNNTanh: _XNNNode1x1,
144145}
145146
146147union XValueUnion {
Original file line number Diff line number Diff line change @@ -319,6 +319,11 @@ class XNNLog(XNNNode1x1):
319319 pass
320320
321321
322+ @dataclass
323+ class XNNTanh (XNNNode1x1 ):
324+ pass
325+
326+
322327@dataclass
323328class XNNMaximum (XNNNode2x1 ):
324329 pass
@@ -391,6 +396,7 @@ class XNNScaledDotProductAttention:
391396 XNNReciprocalSquareRoot ,
392397 XNNLog ,
393398 XNNGelu ,
399+ XNNTanh ,
394400]
395401
396402
Original file line number Diff line number Diff line change 1+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2+ # All rights reserved.
3+ #
4+ # This source code is licensed under the BSD-style license found in the
5+ # LICENSE file in the root directory of this source tree.
6+
7+ import unittest
8+
9+ import torch
10+ from executorch .backends .xnnpack .test .tester import Tester
11+
12+
13+ class TestTanh (unittest .TestCase ):
14+ def setUp (self ):
15+ torch ._dynamo .reset ()
16+
17+ class Tanh (torch .nn .Module ):
18+ def __init__ (self ):
19+ super ().__init__ ()
20+
21+ def forward (self , x ):
22+ return torch .tanh (x )
23+
24+ def run_tanh_test (self , inputs ):
25+ (
26+ Tester (self .Tanh (), inputs )
27+ .export ()
28+ .check_count ({"torch.ops.aten.tanh.default" : 1 })
29+ .to_edge_transform_and_lower ()
30+ .check_count ({"torch.ops.higher_order.executorch_call_delegate" : 1 })
31+ .check_not (["executorch_exir_dialects_edge__ops_aten_tanh_default" ])
32+ .to_executorch ()
33+ .serialize ()
34+ .run_method_and_compare_outputs ()
35+ )
36+
37+ def test_fp16_tanh (self ):
38+ inputs = (torch .randn (20 ).to (torch .float16 ),)
39+ self .run_tanh_test (inputs )
40+
41+ def test_fp32_tanh (self ):
42+ inputs = (torch .randn (20 ),)
43+ self .run_tanh_test (inputs )
You can’t perform that action at this time.
0 commit comments