|
2 | 2 | #include "THSNN.h"
|
3 | 3 |
|
4 | 4 | #include <torch/nn/init.h>
|
5 |
| - |
6 |
| -NNModule THSNN_CELU_ctor(const double alpha, const bool inplace, NNAnyModule* outAsAnyModule) |
7 |
| -{ |
8 |
| - CATCH_RETURN_NNModule( |
9 |
| - auto opts = torch::nn::CELUOptions().alpha(alpha).inplace(inplace); |
10 |
| - res = create_module<torch::nn::CELUImpl>(opts, outAsAnyModule); |
11 |
| - ); |
12 |
| -} |
13 |
| - |
14 |
| -Tensor THSNN_CELU_forward(const NNModule module, const Tensor tensor) |
15 |
| -{ |
16 |
| - CATCH_TENSOR((*module)->as<torch::nn::CELU>()->forward(*tensor)); |
17 |
| -} |
18 |
| - |
19 |
| -NNModule THSNN_ELU_ctor(const double alpha, const bool inplace, NNAnyModule* outAsAnyModule) |
20 |
| -{ |
21 |
| - CATCH_RETURN_NNModule( |
22 |
| - auto opts = torch::nn::ELUOptions().alpha(alpha).inplace(inplace); |
23 |
| - res = create_module<torch::nn::ELUImpl>(opts, outAsAnyModule); |
24 |
| - ); |
25 |
| -} |
26 |
| - |
27 |
| -Tensor THSNN_ELU_forward(const NNModule module, const Tensor tensor) |
28 |
| -{ |
29 |
| - CATCH_TENSOR((*module)->as<torch::nn::ELU>()->forward(*tensor)); |
30 |
| -} |
31 |
| - |
32 |
| -NNModule THSNN_GELU_ctor(NNAnyModule* outAsAnyModule) |
33 |
| -{ |
34 |
| - CATCH_RETURN_NNModule( |
35 |
| - res = create_module<torch::nn::GELUImpl>(outAsAnyModule); |
36 |
| - ); |
37 |
| -} |
38 |
| - |
39 |
| -Tensor THSNN_GELU_forward(const NNModule module, const Tensor tensor) |
40 |
| -{ |
41 |
| - CATCH_TENSOR((*module)->as<torch::nn::GELU>()->forward(*tensor)); |
42 |
| -} |
43 |
| - |
44 |
| -NNModule THSNN_GLU_ctor(const int64_t dim, NNAnyModule* outAsAnyModule) |
45 |
| -{ |
46 |
| - CATCH_RETURN_NNModule( |
47 |
| - auto opts = torch::nn::GLUOptions().dim(dim); |
48 |
| - res = create_module<torch::nn::GLUImpl>(opts, outAsAnyModule); |
49 |
| - ); |
50 |
| -} |
51 |
| - |
52 |
| -Tensor THSNN_GLU_forward(const NNModule module, const Tensor tensor) |
53 |
| -{ |
54 |
| - CATCH_TENSOR((*module)->as<torch::nn::GLU>()->forward(*tensor)); |
55 |
| -} |
56 |
| - |
57 |
| -NNModule THSNN_Hardshrink_ctor(const double lambda, NNAnyModule* outAsAnyModule) |
58 |
| -{ |
59 |
| - CATCH_RETURN_NNModule( |
60 |
| - auto opts = torch::nn::HardshrinkOptions(lambda); |
61 |
| - res = create_module<torch::nn::HardshrinkImpl>(opts, outAsAnyModule); |
62 |
| - ); |
63 |
| -} |
64 |
| - |
65 |
| -Tensor THSNN_Hardshrink_forward(const NNModule module, const Tensor tensor) |
66 |
| -{ |
67 |
| - CATCH_TENSOR((*module)->as<torch::nn::Hardshrink>()->forward(*tensor)); |
68 |
| -} |
69 |
| - |
70 |
| -NNModule THSNN_Hardtanh_ctor(const double min_val, const double max_val, const bool inplace, NNAnyModule* outAsAnyModule) |
71 |
| -{ |
72 |
| - CATCH_RETURN_NNModule( |
73 |
| - auto opts = torch::nn::HardtanhOptions() |
74 |
| - .min_val(min_val) |
75 |
| - .max_val(max_val) |
76 |
| - .inplace(inplace); |
77 |
| - res = create_module<torch::nn::HardtanhImpl>(opts, outAsAnyModule); |
78 |
| - ); |
79 |
| -} |
80 |
| - |
81 |
| -Tensor THSNN_Hardtanh_forward(const NNModule module, const Tensor tensor) |
82 |
| -{ |
83 |
| - CATCH_TENSOR((*module)->as<torch::nn::Hardtanh>()->forward(*tensor)); |
84 |
| -} |
85 |
| - |
86 |
| - |
87 |
| -NNModule THSNN_LeakyReLU_ctor(const double negative_sloope, const bool inplace, NNAnyModule* outAsAnyModule) |
88 |
| -{ |
89 |
| - CATCH_RETURN_NNModule( |
90 |
| - auto opts = torch::nn::LeakyReLUOptions().negative_slope(negative_sloope).inplace(inplace); |
91 |
| - res = create_module<torch::nn::LeakyReLUImpl>(opts, outAsAnyModule); |
92 |
| - ); |
93 |
| -} |
94 |
| - |
95 |
| -Tensor THSNN_LeakyReLU_forward(const NNModule module, const Tensor tensor) |
96 |
| -{ |
97 |
| - CATCH_TENSOR((*module)->as<torch::nn::LeakyReLU>()->forward(*tensor)); |
98 |
| -} |
99 |
| - |
100 |
| -NNModule THSNN_LogSoftmax_ctor(int64_t dim, NNAnyModule* outAsAnyModule) |
101 |
| -{ |
102 |
| - CATCH_RETURN_NNModule( |
103 |
| - auto opts = torch::nn::LogSoftmaxOptions(dim); |
104 |
| - res = create_module<torch::nn::LogSoftmaxImpl>(opts, outAsAnyModule); |
105 |
| - ); |
106 |
| -} |
107 |
| - |
108 |
| -Tensor THSNN_LogSoftmax_forward(const NNModule module, const Tensor tensor) |
109 |
| -{ |
110 |
| - CATCH_TENSOR((*module)->as<torch::nn::LogSoftmax>()->forward(*tensor)); |
111 |
| -} |
112 |
| - |
113 |
| -NNModule THSNN_Mish_ctor(NNAnyModule* outAsAnyModule) |
114 |
| -{ |
115 |
| - CATCH_RETURN_NNModule( |
116 |
| - res = create_module<torch::nn::MishImpl>(outAsAnyModule); |
117 |
| - ); |
118 |
| -} |
119 |
| - |
120 |
| -Tensor THSNN_Mish_forward(const NNModule module, const Tensor tensor) |
121 |
| -{ |
122 |
| - CATCH_TENSOR((*module)->as<torch::nn::Mish>()->forward(*tensor)); |
123 |
| -} |
124 |
| - |
125 |
| -NNModule THSNN_PReLU_ctor(const int64_t nparams, const double init, NNAnyModule* outAsAnyModule) |
126 |
| -{ |
127 |
| - CATCH_RETURN_NNModule( |
128 |
| - auto opts = torch::nn::PReLUOptions().num_parameters(nparams).init(init); |
129 |
| - res = create_module<torch::nn::PReLUImpl>(opts, outAsAnyModule); |
130 |
| - ); |
131 |
| -} |
132 |
| - |
133 |
| -Tensor THSNN_PReLU_forward(const NNModule module, const Tensor tensor) |
134 |
| -{ |
135 |
| - CATCH_TENSOR((*module)->as<torch::nn::PReLU>()->forward(*tensor)); |
136 |
| -} |
137 |
| - |
138 |
| -Tensor THSNN_PReLU_weight(const NNModule module) |
139 |
| -{ |
140 |
| - return get_weight<torch::nn::PReLU>(module); |
141 |
| -} |
142 |
| - |
143 |
| -void THSNN_PReLU_set_weight(const NNModule module, const Tensor weight) |
144 |
| -{ |
145 |
| - set_weight<torch::nn::PReLU>(module, weight); |
146 |
| -} |
147 |
| - |
148 |
| -NNModule THSNN_ReLU_ctor(bool inplace, NNAnyModule* outAsAnyModule) |
149 |
| -{ |
150 |
| - CATCH_RETURN_NNModule( |
151 |
| - auto opts = torch::nn::ReLUOptions(inplace); |
152 |
| - res = create_module<torch::nn::ReLUImpl>(opts, outAsAnyModule); |
153 |
| - ); |
154 |
| -} |
155 |
| - |
156 |
| -Tensor THSNN_ReLU_forward(const NNModule module, const Tensor tensor) |
157 |
| -{ |
158 |
| - CATCH_TENSOR((*module)->as<torch::nn::ReLU>()->forward(*tensor)); |
159 |
| -} |
160 |
| - |
161 |
| -NNModule THSNN_RReLU_ctor(const double lower, const double upper, const bool inplace, NNAnyModule* outAsAnyModule) |
162 |
| -{ |
163 |
| - CATCH_RETURN_NNModule( |
164 |
| - auto opts = torch::nn::RReLUOptions().lower(lower).upper(upper).inplace(inplace); |
165 |
| - res = create_module<torch::nn::RReLUImpl>(opts, outAsAnyModule); |
166 |
| - ); |
167 |
| -} |
168 |
| - |
169 |
| -Tensor THSNN_RReLU_forward(const NNModule module, const Tensor tensor) |
170 |
| -{ |
171 |
| - CATCH_TENSOR((*module)->as<torch::nn::RReLU>()->forward(*tensor)); |
172 |
| -} |
173 |
| - |
174 |
| -NNModule THSNN_ReLU6_ctor(bool inplace, NNAnyModule* outAsAnyModule) |
175 |
| -{ |
176 |
| - CATCH_RETURN_NNModule( |
177 |
| - auto opts = torch::nn::ReLU6Options(inplace); |
178 |
| - res = create_module<torch::nn::ReLU6Impl>(opts, outAsAnyModule); |
179 |
| - ); |
180 |
| -} |
181 |
| - |
182 |
| -Tensor THSNN_ReLU6_forward(const NNModule module, const Tensor tensor) |
183 |
| -{ |
184 |
| - CATCH_TENSOR((*module)->as<torch::nn::ReLU6>()->forward(*tensor)); |
185 |
| -} |
186 |
| - |
187 |
| -NNModule THSNN_SELU_ctor(bool inplace, NNAnyModule* outAsAnyModule) |
188 |
| -{ |
189 |
| - CATCH_RETURN_NNModule( |
190 |
| - auto opts = torch::nn::SELUOptions(inplace); |
191 |
| - res = create_module<torch::nn::SELUImpl>(opts, outAsAnyModule); |
192 |
| - ); |
193 |
| -} |
194 |
| - |
195 |
| -Tensor THSNN_SELU_forward(const NNModule module, const Tensor tensor) |
196 |
| -{ |
197 |
| - CATCH_TENSOR((*module)->as<torch::nn::SELU>()->forward(*tensor)); |
198 |
| -} |
199 |
| - |
200 |
| -NNModule THSNN_Sigmoid_ctor(NNAnyModule* outAsAnyModule) |
201 |
| -{ |
202 |
| - CATCH_RETURN_NNModule( |
203 |
| - res = create_module<torch::nn::SigmoidImpl>(outAsAnyModule); |
204 |
| - ); |
205 |
| -} |
206 |
| - |
207 |
| -Tensor THSNN_Sigmoid_forward(const NNModule module, const Tensor tensor) |
208 |
| -{ |
209 |
| - CATCH_TENSOR((*module)->as<torch::nn::Sigmoid>()->forward(*tensor)); |
210 |
| -} |
211 |
| - |
212 |
| -NNModule THSNN_SiLU_ctor(NNAnyModule* outAsAnyModule) |
213 |
| -{ |
214 |
| - CATCH_RETURN_NNModule( |
215 |
| - res = create_module<torch::nn::SiLUImpl>(outAsAnyModule); |
216 |
| - ); |
217 |
| -} |
218 |
| - |
219 |
| -Tensor THSNN_SiLU_forward(const NNModule module, const Tensor tensor) |
220 |
| -{ |
221 |
| - CATCH_TENSOR((*module)->as<torch::nn::SiLU>()->forward(*tensor)); |
222 |
| -} |
223 |
| - |
224 |
| -NNModule THSNN_Softmax2d_ctor(NNAnyModule* outAsAnyModule) |
225 |
| -{ |
226 |
| - CATCH_RETURN_NNModule( |
227 |
| - res = create_module<torch::nn::Softmax2dImpl>(outAsAnyModule); |
228 |
| - ); |
229 |
| -} |
230 |
| - |
231 |
| -Tensor THSNN_Softmax2d_forward(const NNModule module, const Tensor tensor) |
232 |
| -{ |
233 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softmax2d>()->forward(*tensor)); |
234 |
| -} |
235 |
| - |
236 |
| -NNModule THSNN_Softmax_ctor(const int64_t dim, NNAnyModule* outAsAnyModule) |
237 |
| -{ |
238 |
| - CATCH_RETURN_NNModule( |
239 |
| - auto opts = torch::nn::SoftmaxOptions(dim); |
240 |
| - res = create_module<torch::nn::SoftmaxImpl>(opts, outAsAnyModule); |
241 |
| - ); |
242 |
| -} |
243 |
| - |
244 |
| -Tensor THSNN_Softmax_forward(const NNModule module, const Tensor tensor) |
245 |
| -{ |
246 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softmax>()->forward(*tensor)); |
247 |
| -} |
248 |
| - |
249 |
| -NNModule THSNN_Softmin_ctor(const int64_t dim, NNAnyModule* outAsAnyModule) |
250 |
| -{ |
251 |
| - CATCH_RETURN_NNModule( |
252 |
| - auto opts = torch::nn::SoftminOptions(dim); |
253 |
| - res = create_module<torch::nn::SoftminImpl>(opts, outAsAnyModule); |
254 |
| - ); |
255 |
| -} |
256 |
| - |
257 |
| -Tensor THSNN_Softmin_forward(const NNModule module, const Tensor tensor) |
258 |
| -{ |
259 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softmin>()->forward(*tensor)); |
260 |
| -} |
261 |
| - |
262 |
| -NNModule THSNN_Softplus_ctor(const double beta, const double threshold, NNAnyModule* outAsAnyModule) |
263 |
| -{ |
264 |
| - CATCH_RETURN_NNModule( |
265 |
| - auto opts = torch::nn::SoftplusOptions().beta(beta).threshold(threshold); |
266 |
| - res = create_module<torch::nn::SoftplusImpl>(opts, outAsAnyModule); |
267 |
| - ); |
268 |
| -} |
269 |
| - |
270 |
| -Tensor THSNN_Softplus_forward(const NNModule module, const Tensor tensor) { |
271 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softplus>()->forward(*tensor)); |
272 |
| -} |
273 |
| - |
274 |
| -NNModule THSNN_Softshrink_ctor(const double lambda, NNAnyModule* outAsAnyModule) |
275 |
| -{ |
276 |
| - CATCH_RETURN_NNModule( |
277 |
| - auto opts = torch::nn::SoftshrinkOptions().lambda(lambda); |
278 |
| - res = create_module<torch::nn::SoftshrinkImpl>(opts, outAsAnyModule); |
279 |
| - ); |
280 |
| -} |
281 |
| - |
282 |
| -Tensor THSNN_Softshrink_forward(const NNModule module, const Tensor tensor) { |
283 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softshrink>()->forward(*tensor)); |
284 |
| -} |
285 |
| - |
286 |
| -NNModule THSNN_Softsign_ctor(NNAnyModule* outAsAnyModule) |
287 |
| -{ |
288 |
| - CATCH_RETURN_NNModule( |
289 |
| - res = create_module<torch::nn::SoftsignImpl>(outAsAnyModule); |
290 |
| - ); |
291 |
| -} |
292 |
| - |
293 |
| -Tensor THSNN_Softsign_forward(const NNModule module, const Tensor tensor) { |
294 |
| - CATCH_TENSOR((*module)->as<torch::nn::Softsign>()->forward(*tensor)); |
295 |
| -} |
296 |
| - |
297 |
| -NNModule THSNN_Tanh_ctor(NNAnyModule* outAsAnyModule) |
298 |
| -{ |
299 |
| - CATCH_RETURN_NNModule( |
300 |
| - res = create_module<torch::nn::TanhImpl>(outAsAnyModule); |
301 |
| - ); |
302 |
| -} |
303 |
| - |
304 |
| -Tensor THSNN_Tanh_forward(const NNModule module, const Tensor tensor) |
305 |
| -{ |
306 |
| - CATCH_TENSOR((*module)->as<torch::nn::Tanh>()->forward(*tensor)); |
307 |
| -} |
308 |
| - |
309 |
| -NNModule THSNN_Tanhshrink_ctor(NNAnyModule* outAsAnyModule) |
310 |
| -{ |
311 |
| - CATCH_RETURN_NNModule( |
312 |
| - res = create_module<torch::nn::TanhshrinkImpl>(outAsAnyModule); |
313 |
| - ); |
314 |
| -} |
315 |
| - |
316 |
| -Tensor THSNN_Tanhshrink_forward(const NNModule module, const Tensor tensor) { |
317 |
| - CATCH_TENSOR((*module)->as<torch::nn::Tanhshrink>()->forward(*tensor)); |
318 |
| -} |
319 |
| - |
320 |
| -NNModule THSNN_Threshold_ctor(const double threshold, const double value, const bool inplace, NNAnyModule* outAsAnyModule) |
321 |
| -{ |
322 |
| - CATCH_RETURN_NNModule( |
323 |
| - auto opts = torch::nn::ThresholdOptions(threshold, value).inplace(inplace); |
324 |
| - res = create_module<torch::nn::ThresholdImpl>(opts, outAsAnyModule); |
325 |
| - ); |
326 |
| -} |
327 |
| - |
328 |
| -Tensor THSNN_Threshold_forward(const NNModule module, const Tensor tensor) { |
329 |
| - CATCH_TENSOR((*module)->as<torch::nn::Threshold>()->forward(*tensor)); |
330 |
| -} |
331 |
| - |
0 commit comments