Skip to content

Commit 24814f1

Browse files
committed
rework example
1 parent 5e1d9cd commit 24814f1

3 files changed

Lines changed: 24 additions & 20 deletions

File tree

adaptive/learner/average_learner1D.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,10 @@ def total_samples(self):
9898
"""Returns the total number of samples"""
9999
return sum(self._number_samples.values())
100100

101+
@property
102+
def min_samples_per_point(self):
103+
return min(self._number_samples.values())
104+
101105
def ask(self, n, tell_pending=True):
102106
"""Return 'n' points that are expected to maximally reduce the loss."""
103107
# If some point is undersampled, resample it

docs/source/tutorial/tutorial.AverageLearner1D.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ First, we define the (noisy) function to be sampled. Note that the parameter
2828

2929
.. jupyter-execute::
3030

31-
def f(x, sigma=0, peak_width=0.05, offset=-0.5):
31+
def noisy_peak(x, sigma=0, peak_width=0.05, offset=-0.5):
3232
y = x ** 3 - x + 3 * peak_width ** 2 / (peak_width ** 2 + (x - offset) ** 2)
3333
noise = np.random.normal(0, sigma)
3434
return y + noise
@@ -38,14 +38,14 @@ This is how the function looks in the absence of noise:
3838
.. jupyter-execute::
3939

4040
xs = np.linspace(-2, 2, 500)
41-
ys = f(xs, sigma=0)
41+
ys = noisy_peak(xs, sigma=0)
4242
hv.Path((xs, ys))
4343

4444
And an example of a single realization of the noisy function:
4545

4646
.. jupyter-execute::
4747

48-
ys = [f(x, sigma=1) for x in xs]
48+
ys = [noisy_peak(x, sigma=1) for x in xs]
4949
hv.Path((xs, ys))
5050

5151
To obtain an estimate of the mean value of the function at each point ``x``, we
@@ -57,7 +57,7 @@ We start by initializing a 1D average learner:
5757

5858
.. jupyter-execute::
5959

60-
learner = adaptive.AverageLearner1D(partial(f, sigma=1), bounds=(-2, 2))
60+
learner = adaptive.AverageLearner1D(partial(noisy_peak, sigma=1), bounds=(-2, 2))
6161

6262
As with other types of learners, we need to initialize a runner with a certain
6363
goal to run our learner. In this case, we set 10000 samples as the goal (the

example-notebook.ipynb

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -312,15 +312,10 @@
312312
"metadata": {},
313313
"outputs": [],
314314
"source": [
315-
"def f(x, sigma=0, peak_width=0.05, offset=-0.5, wait=False):\n",
316-
" from time import sleep\n",
317-
" from random import random\n",
318-
"\n",
319-
" if wait:\n",
320-
" sleep(random())\n",
321-
"\n",
322-
" function = x ** 3 - x + 3 * peak_width ** 2 / (peak_width ** 2 + (x - offset) ** 2)\n",
323-
" return function + np.random.normal(0, sigma)"
315+
"def noisy_peak(x, sigma=0, peak_width=0.05, offset=-0.5):\n",
316+
" y = x ** 3 - x + 3 * peak_width ** 2 / (peak_width ** 2 + (x - offset) ** 2)\n",
317+
" noise = np.random.normal(0, sigma)\n",
318+
" return y + noise"
324319
]
325320
},
326321
{
@@ -336,9 +331,9 @@
336331
"metadata": {},
337332
"outputs": [],
338333
"source": [
339-
"import matplotlib.pyplot as plt\n",
340-
"x = np.linspace(-2,2,500)\n",
341-
"plt.plot(x, f(x, sigma=0));"
334+
"xs = np.linspace(-2, 2, 500)\n",
335+
"ys = noisy_peak(xs, sigma=0)\n",
336+
"hv.Path((xs, ys))"
342337
]
343338
},
344339
{
@@ -354,7 +349,8 @@
354349
"metadata": {},
355350
"outputs": [],
356351
"source": [
357-
"plt.plot(x, [f(xi, sigma=1) for xi in x]);"
352+
"ys = [noisy_peak(x, sigma=1) for x in xs]\n",
353+
"hv.Path((xs, ys))"
358354
]
359355
},
360356
{
@@ -370,10 +366,14 @@
370366
"metadata": {},
371367
"outputs": [],
372368
"source": [
373-
"learner = adaptive.AverageLearner1D(function=partial(f, sigma=1), bounds=(-2,2))\n",
369+
"learner = adaptive.AverageLearner1D(partial(noisy_peak, sigma=1), bounds=(-2, 2))\n",
370+
"\n",
371+
"\n",
372+
"def goal(l):\n",
373+
" return l.total_samples >= 10_000 and l.min_samples_per_point >= 20\n",
374+
"\n",
374375
"\n",
375-
"runner = adaptive.Runner(learner, goal=lambda l: l.total_samples >= 10000 \n",
376-
" and min(l._number_samples.values()) >= 20)\n",
376+
"runner = adaptive.Runner(learner, goal=goal)\n",
377377
"runner.live_info()"
378378
]
379379
},

0 commit comments

Comments
 (0)