Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit autoupdate (#110)
Browse files Browse the repository at this point in the history
* [pre-commit.ci] pre-commit autoupdate

updates:
- [github.com/psf/black: 23.12.1 → 24.1.1](psf/black@23.12.1...24.1.1)

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
pre-commit-ci[bot] authored Jan 29, 2024
1 parent cbfcdd8 commit c0d9df8
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ repos:
additional_dependencies: [toml]
exclude: docs/tutorials
- repo: https://github.com/psf/black
rev: "23.12.1"
rev: "24.1.1"
hooks:
- id: black-jupyter
- repo: https://github.com/kynan/nbstripout
Expand Down
8 changes: 5 additions & 3 deletions python/celerite2/pymc/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,11 @@ def perform(self, node, inputs, outputs):
def grad(self, inputs, gradients):
outputs = self(*inputs)
grads = (
pt.zeros_like(outputs[n])
if isinstance(b.type, pytensor.gradient.DisconnectedType)
else b
(
pt.zeros_like(outputs[n])
if isinstance(b.type, pytensor.gradient.DisconnectedType)
else b
)
for n, b in enumerate(gradients[: len(self.spec["outputs"])])
)
return self.rev_op(*chain(inputs, outputs, grads))
Expand Down
6 changes: 3 additions & 3 deletions python/celerite2/pymc3/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ class CeleriteNormal(Continuous):
def __init__(self, gp, *args, **kwargs):
super().__init__(*args, **kwargs)
self.gp = gp
self.mean = (
self.median
) = self.mode = self.gp.mean_value + tt.zeros_like(self.gp._t)
self.mean = self.median = self.mode = (
self.gp.mean_value + tt.zeros_like(self.gp._t)
)

def random(self, point=None, size=None):
if size is None:
Expand Down
8 changes: 5 additions & 3 deletions python/celerite2/pymc3/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,11 @@ def perform(self, node, inputs, outputs):
def grad(self, inputs, gradients):
outputs = self(*inputs)
grads = (
tt.zeros_like(outputs[n])
if isinstance(b.type, theano.gradient.DisconnectedType)
else b
(
tt.zeros_like(outputs[n])
if isinstance(b.type, theano.gradient.DisconnectedType)
else b
)
for n, b in enumerate(gradients[: len(self.spec["outputs"])])
)
return self.rev_op(*chain(inputs, outputs, grads))
Expand Down
8 changes: 5 additions & 3 deletions python/celerite2/pymc3/terms.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,11 @@ def get_coefficients(self):
dc.append(tt.flatten(dj[:, None] + dk[None, :]))

return [
tt.concatenate(vals, axis=0)
if len(vals)
else tt.zeros(0, dtype=self.dtype)
(
tt.concatenate(vals, axis=0)
if len(vals)
else tt.zeros(0, dtype=self.dtype)
)
for vals in (ar, cr, ac, bc, cc, dc)
]

Expand Down

0 comments on commit c0d9df8

Please sign in to comment.