Issue using GENConv #7871
-
I created a graph dataset with: data = Data(x=torch.from_numpy(features), y=torch.from_numpy(y), edge_index=edge_index.t().contiguous(), edge_attr=edge_dis.unsqueeze(1), pos=pos) My data has following dimension:
I got error about node and edge feature dimensions are not matching: assert x[0].size(-1) == edge_attr.size(-1) However, I did change both my node feature and edge attr to the same dimension using a linear layer, I also printed x[0].size(-1) and edge_attr.size(-1) and they show the same value. It seems I got this error whenever my GENConv has different in_channels size and out_channels size. I can't figure out why. Here is my code: class DeeperGCN(torch.nn.Module):
def __init__(self, hidden_channels, num_layers):
super(DeeperGCN, self).__init__()
self.node_encoder = Linear(5, hidden_channels)
self.edge_encoder = Linear(1, hidden_channels)
self.layers = torch.nn.ModuleList()
for i in range(1, num_layers+1):
conv = GENConv(hidden_channels, hidden_channels, aggr='softmax',
t=1.0, learn_t=True, num_layers=2, norm='layer')
norm = LayerNorm(hidden_channels, elementwise_affine=True)
act = LeakyReLU(inplace=True)
layer = DeepGCNLayer(conv, norm, act, block='res+')
self.layers.append(layer)
self.conv2 = GENConv(hidden_channels, 3, aggr='softmax',
t=1.0, learn_t=True, num_layers=2, norm='layer')
def forward(self, data):
x, y, edge_index, edge_attr, pos = data.x, data.y, data.edge_index, data.edge_attr, data.pos
out = self.node_encoder(x)
edge_attr = self.edge_encoder(edge_attr)
for layer in self.layers[1:]:
out = layer(out, edge_index, edge_attr)
out = self.conv2(out, edge_index, edge_attr)
return out |
Beta Was this translation helpful? Give feedback.
Replies: 1 comment 1 reply
-
Hi @loisbijing , |
Beta Was this translation helpful? Give feedback.
Hi @loisbijing ,
The issue is that
edge_dim
is not set. Setedge_dim
attribute while initialisingGENConv
tohidden_channels
.