Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Oct 12, 2023
1 parent c56a93b commit f632ec0
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 7 deletions.
7 changes: 3 additions & 4 deletions benchmark/ops/softmax.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import argparse
from time import perf_counter as timestamp

import torch
import pyg_lib

from time import perf_counter as timestamp
from torch_geometric.utils import segment

import pyg_lib


def softmax_reference_ptr(src, ptr, dim=0):
dim = dim + src.dim() if dim < 0 else dim
Expand Down Expand Up @@ -66,4 +66,3 @@ def measure_perf(impl_func, ptr, out_grad, num_warmups, num_steps, backward):
print(f'pyg_lib forward: {t_fwd:.4f}s')
if args.backward:
print(f'pyg_lib backward: {t_bwd:.4f}s')

5 changes: 2 additions & 3 deletions pyg_lib/ops/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,8 @@ def forward(
@staticmethod
def backward(ctx, out_grad: Tensor) -> Tuple[Union[Tensor, int]]:
out, index, ptr = ctx.saved_tensors
in_grad = torch.ops.pyg.softmax_backward(
out, out_grad, index, ptr, ctx.num_nodes, ctx.dim
)
in_grad = torch.ops.pyg.softmax_backward(out, out_grad, index, ptr,
ctx.num_nodes, ctx.dim)

return in_grad, None, None, None, None

Expand Down

0 comments on commit f632ec0

Please sign in to comment.