Skip to content

Commit

Permalink
Merge pull request #382 from python-adaptive/easy-runner-goals
Browse files Browse the repository at this point in the history
Add loss_goal, npoints_goal, and an auto_goal function and use it in the runners
  • Loading branch information
basnijholt authored Nov 23, 2022
2 parents ae44fae + b5f6f26 commit 28d4c35
Show file tree
Hide file tree
Showing 33 changed files with 458 additions and 146 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ repos:
rev: 5.10.1
hooks:
- id: isort
- repo: https://gitlab.com/pycqa/flake8
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def peak(x, a=0.01):


learner = Learner1D(peak, bounds=(-1, 1))
runner = Runner(learner, goal=lambda l: l.loss() < 0.01)
runner = Runner(learner, loss_goal=0.01)
runner.live_info()
runner.live_plot()
```
Expand Down
3 changes: 2 additions & 1 deletion adaptive/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from contextlib import suppress

from adaptive import learner, runner, utils
from adaptive._version import __version__
from adaptive.learner import (
AverageLearner,
Expand All @@ -22,6 +21,8 @@
)
from adaptive.runner import AsyncRunner, BlockingRunner, Runner

from adaptive import learner, runner, utils # isort:skip

__all__ = [
"learner",
"runner",
Expand Down
14 changes: 13 additions & 1 deletion adaptive/learner/data_saver.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def _to_key(x):
return tuple(x.values) if x.values.size > 1 else x.item()


class DataSaver:
class DataSaver(BaseLearner):
"""Save extra data associated with the values that need to be learned.
Parameters
Expand Down Expand Up @@ -50,6 +50,18 @@ def new(self) -> DataSaver:
"""Return a new `DataSaver` with the same `arg_picker` and `learner`."""
return DataSaver(self.learner.new(), self.arg_picker)

@copy_docstring_from(BaseLearner.ask)
def ask(self, *args, **kwargs):
return self.learner.ask(*args, **kwargs)

@copy_docstring_from(BaseLearner.loss)
def loss(self, *args, **kwargs):
return self.learner.loss(*args, **kwargs)

@copy_docstring_from(BaseLearner.remove_unfinished)
def remove_unfinished(self, *args, **kwargs):
return self.learner.remove_unfinished(*args, **kwargs)

def __getattr__(self, attr: str) -> Any:
return getattr(self.learner, attr)

Expand Down
Loading

0 comments on commit 28d4c35

Please sign in to comment.