Skip to content

Commit e736128

Browse files
committed
add tapas masklm models
1 parent 7475562 commit e736128

30 files changed

+2935
-0
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
8e2f2f1b2cd29f1d0f726b2818b4ae3aac430fe15412a65835665131fc87436e
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"framework": "torch",
3+
"num_devices_required": 1,
4+
"num_nodes_required": 1,
5+
"dynamic": true
6+
}

samples/transformers-auto-model/google/tapas-base-masklm/input_meta.py

Whitespace-only changes.

samples/transformers-auto-model/google/tapas-base-masklm/input_tensor_constraints.py

Whitespace-only changes.
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import torch
2+
3+
from torch import device
4+
5+
6+
class GraphModule(torch.nn.Module):
7+
def forward(
8+
self,
9+
s0: torch.SymInt,
10+
L_index_indices: torch.Tensor,
11+
L_index_num_segments: torch.Tensor,
12+
):
13+
l_index_indices = L_index_indices
14+
l_index_num_segments = L_index_num_segments
15+
tensor = torch.tensor([1])
16+
batch_size = torch.prod(tensor)
17+
tensor = None
18+
arange = torch.arange(start=0, end=batch_size, device=device(type="cpu"))
19+
offset = arange * l_index_num_segments
20+
arange = None
21+
offset_1 = offset.view((1,))
22+
offset = None
23+
offset_2 = offset_1.unsqueeze(-1)
24+
offset_1 = None
25+
indices = offset_2 + l_index_indices
26+
offset_2 = l_index_indices = None
27+
view_1 = indices.view(-1)
28+
indices = None
29+
mul_1 = l_index_num_segments * batch_size
30+
l_index_num_segments = batch_size = None
31+
as_tensor = torch.as_tensor(view_1, device=device(type="cpu"))
32+
view_1 = None
33+
as_tensor_1 = torch.as_tensor(mul_1, device=device(type="cpu"))
34+
mul_1 = None
35+
as_tensor_2 = torch.as_tensor([-1], dtype=torch.int64)
36+
as_tensor_3 = torch.as_tensor((), dtype=torch.int64)
37+
flattened_shape = torch.cat([as_tensor_2, as_tensor_3], dim=0)
38+
as_tensor_2 = as_tensor_3 = None
39+
return (flattened_shape, as_tensor_1, as_tensor)

0 commit comments

Comments
 (0)