Replies: 1 comment 3 replies
-
|
Beta Was this translation helpful? Give feedback.
3 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
Uh oh!
There was an error while loading. Please reload this page.
-
I had data with edge index = torch.Size([50]) and the data.x shape= torch.Size([100, 172]) where the 100 is num of node and 172 number of features when i use this
`class Net(torch.nn.Module):
def init(self, hidden_channels, in_channel, out_channel):
super(Net, self).init()
self.conv1 = GraphConv(in_channel, hidden_channels)
self.conv2 = GraphConv(hidden_channels, hidden_channels)
self.conv3 = GraphConv(hidden_channels, hidden_channels)
self.lin = torch.nn.Linear(1 * hidden_channels, out_channel)
I got those issues :
ValueError Traceback (most recent call last)
Cell In [1], line 216
214 labels_batch_torch = torch.from_numpy(labels_batch).float().to(device)
215 data_b, id_new_value_old = shuffleData(source_embedding)
--> 216 pred = decoder(source_embedding, source_embedding.edge_idxs, data_b.edge_idxs, lam , id_new_value_old).sigmoid()
217 #pred = torch.flatten(pred)
218 labels_batch_torch = torch.stack((1 - labels_batch_torch, labels_batch_torch), dim=-1)
File ~/opt/anaconda3/envs/M1Max/lib/python3.8/site-packages/torch/nn/modules/module.py:1190, in Module._call_impl(self, *input, **kwargs)
1186 # If we don't have any hooks, we want to skip the rest of the logic in
1187 # this function, and just call forward.
1188 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1189 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1190 return forward_call(*input, **kwargs)
1191 # Do not call functions when jit is used
1192 full_backward_hooks, non_full_backward_hooks = [], []
File ~/Documents/Codes/tgn-master 3 copy 2/utils/utils.py:21, in Net.forward(self, x0, edge_index, edge_index_b, lam, id_new_value_old)
19 print(edge_index_b.shape)
20 print("x0.shape",x0.shape)
---> 21 x1 = self.conv1(x0, edge_index, x0)
22 print("x1.shape",x1.shape)
23 x1 = F.relu(x1)
File ~/opt/anaconda3/envs/M1Max/lib/python3.8/site-packages/torch/nn/modules/module.py:1190, in Module._call_impl(self, *input, **kwargs)
1186 # If we don't have any hooks, we want to skip the rest of the logic in
1187 # this function, and just call forward.
1188 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1189 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1190 return forward_call(*input, **kwargs)
1191 # Do not call functions when jit is used
1192 full_backward_hooks, non_full_backward_hooks = [], []
File ~/Documents/Codes/tgn-master 3 copy 2/graph_conv.py:27, in GraphConv.forward(self, x, edge_index, x_cen)
25 def forward(self, x, edge_index, x_cen):
26 h = torch.matmul(x, self.weight)
---> 27 aggr_out = self.propagate(edge_index, size=None, h=h, edge_weight=None)
28 return aggr_out + self.lin(x_cen)
File ~/opt/anaconda3/envs/M1Max/lib/python3.8/site-packages/torch_geometric/nn/conv/message_passing.py:392, in MessagePassing.propagate(self, edge_index, size, **kwargs)
389 if res is not None:
390 edge_index, size, kwargs = res
--> 392 size = self.check_input(edge_index, size)
394 # Run "fused" message and aggregation (if applicable).
395 if is_sparse(edge_index) and self.fuse and not self.explain:
File ~/opt/anaconda3/envs/M1Max/lib/python3.8/site-packages/torch_geometric/nn/conv/message_passing.py:205, in MessagePassing.check_input(self, edge_index, size)
202 raise ValueError(f"Expected 'edge_index' to be of integer "
203 f"type (got '{edge_index.dtype}')")
204 if edge_index.dim() != 2:
--> 205 raise ValueError(f"Expected 'edge_index' to be two-dimensional"
206 f" (got {edge_index.dim()} dimensions)")
207 if edge_index.size(0) != 2:
208 raise ValueError(f"Expected 'edge_index' to have size '2' in "
209 f"the first dimension (got "
210 f"'{edge_index.size(0)}')")
ValueError: Expected 'edge_index' to be two-dimensional (got 1 dimensions)
Beta Was this translation helpful? Give feedback.
All reactions