Skip to content

Commit

Permalink
Use Adam optimizer in estimate
Browse files Browse the repository at this point in the history
  • Loading branch information
funkey committed Jan 12, 2024
1 parent 5ace529 commit 6e10b3c
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions blinx/estimate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from tqdm import tqdm

from .hyper_parameters import HyperParameters
from .optimizer import create_optimizer
from .optimizer import create_adam_optimizer
from .parameter_ranges import ParameterRanges
from .parameters import Parameters

Expand Down Expand Up @@ -152,7 +152,7 @@ def estimate_parameters(traces, y, parameter_ranges, hyper_parameters):

# create an optimizer, which will be shared between all optimizations

optimizer = create_optimizer(grad_func, hyper_parameters)
optimizer = create_adam_optimizer(grad_func, hyper_parameters)

# create optimizer states for each trace and parameter guess

Expand Down

0 comments on commit 6e10b3c

Please sign in to comment.