Skip to content

Commit

Permalink
Disallow add_self_loops=True in GCNConv when normalize=False (#…
Browse files Browse the repository at this point in the history
…8210)

Fix #6251

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Matthias Fey <[email protected]>
  • Loading branch information
3 people authored Oct 20, 2023
1 parent 8b883d6 commit cd6d1f2
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Changed

- Disallow the usage of `add_self_loops=True` in `GCNConv(normalize=False)` ([#8210](https://github.com/pyg-team/pytorch_geometric/pull/8210))
- Disable device asserts during `torch_geometric.compile` ([#8220](https://github.com/pyg-team/pytorch_geometric/pull/8220))

### Deprecated
Expand Down
8 changes: 6 additions & 2 deletions test/nn/conv/test_gcn_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,14 @@ def test_static_gcn_conv():
assert out.size() == (3, 4, 32)


def test_gcn_conv_norm():
def test_gcn_conv_error():
with pytest.raises(ValueError, match="does not support adding self-loops"):
GCNConv(16, 32, normalize=False, add_self_loops=True)


def test_gcn_conv_flow():
x = torch.randn(4, 16)
edge_index = torch.tensor([[0, 0, 0], [1, 2, 3]])
row, col = edge_index

conv = GCNConv(16, 32, flow="source_to_target")
out1 = conv(x, edge_index)
Expand Down
16 changes: 13 additions & 3 deletions torch_geometric/nn/conv/gcn_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,11 @@ class GCNConv(MessagePassing):
This parameter should only be set to :obj:`True` in transductive
learning scenarios. (default: :obj:`False`)
add_self_loops (bool, optional): If set to :obj:`False`, will not add
self-loops to the input graph. (default: :obj:`True`)
self-loops to the input graph. By default, self-loops will be added
in case :obj:`normalize` is set to :obj:`True`, and not added
otherwise. (default: :obj:`None`)
normalize (bool, optional): Whether to add self-loops and compute
symmetric normalization coefficients on the fly.
symmetric normalization coefficients on-the-fly.
(default: :obj:`True`)
bias (bool, optional): If set to :obj:`False`, the layer will not learn
an additive bias. (default: :obj:`True`)
Expand All @@ -170,14 +172,22 @@ def __init__(
out_channels: int,
improved: bool = False,
cached: bool = False,
add_self_loops: bool = True,
add_self_loops: Optional[bool] = None,
normalize: bool = True,
bias: bool = True,
**kwargs,
):
kwargs.setdefault('aggr', 'add')
super().__init__(**kwargs)

if add_self_loops is None:
add_self_loops = normalize

if add_self_loops and not normalize:
raise ValueError(f"'{self.__class__.__name__}' does not support "
f"adding self-loops to the graph when no "
f"on-the-fly normalization is applied")

self.in_channels = in_channels
self.out_channels = out_channels
self.improved = improved
Expand Down

0 comments on commit cd6d1f2

Please sign in to comment.