Skip to content

Commit

Permalink
working
Browse files Browse the repository at this point in the history
  • Loading branch information
ThomasHelfer committed Feb 3, 2024
1 parent b2d0f7e commit 7b53e52
Showing 1 changed file with 5 additions and 4 deletions.
9 changes: 5 additions & 4 deletions learn_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def forward(self, x):
optimizerBFGS = torch.optim.LBFGS(
net.parameters(), lr=0.1
) # Use LBFGS sometimes, it really does do magic sometimes, though its a bit of a diva
optimizerADAM = torch.optim.Adam(net.parameters(), lr=0.00001)
optimizerADAM = torch.optim.Adam(net.parameters(), lr=0.000001)

# Define the ratio for the split (e.g., 80% train, 20% test)
train_ratio = 0.8
Expand Down Expand Up @@ -225,8 +225,10 @@ def __call__(self, output: torch.tensor, dummy: torch.tensor) -> torch.tensor:
my_loss = Hamiltonian_loss(oneoverdx)

# Note: it will slow down signficantly with BFGS steps, they are 10x slower, just be aware!
ADAMsteps = 400 # Will perform # steps of ADAM steps and then switch over to BFGS-L
n_steps = 600 # Total amount of steps
ADAMsteps = (
10000 # Will perform # steps of ADAM steps and then switch over to BFGS-L
)
n_steps = 10000 # Total amount of steps

net.train()
net.to(device)
Expand All @@ -250,7 +252,6 @@ def __call__(self, output: torch.tensor, dummy: torch.tensor) -> torch.tensor:
:, :25, diff - 1 : -diff - 1, diff - 1 : -diff - 1, diff - 1 : -diff - 1
]
batchcounter += 1
print(y_batch.shape)

# This is needed for LBFGS
def closure():
Expand Down

0 comments on commit 7b53e52

Please sign in to comment.