We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 6db3e75 commit fb457d5Copy full SHA for fb457d5
5 files changed
adaptive/learner/base_learner.py
@@ -118,7 +118,7 @@ def remove_unfinished(self):
118
pass
119
120
@abc.abstractmethod
121
- def loss(self, real=True):
+ def loss(self, real: bool = True):
122
"""Return the loss for the current state of the learner.
123
124
Parameters
@@ -130,7 +130,7 @@ def loss(self, real=True):
130
"""
131
132
133
- def ask(self, n, tell_pending=True):
+ def ask(self, n: int, tell_pending: bool = True):
134
"""Choose the next 'n' points to evaluate.
135
136
adaptive/learner/learner2D.py
@@ -598,7 +598,9 @@ def _fill_stack(
598
599
return points_new, losses_new
600
601
- def ask(self, n: int, tell_pending: bool = True) -> Tuple[np.ndarray, np.ndarray]:
+ def ask(
602
+ self, n: int, tell_pending: bool = True
603
+ ) -> Tuple[List[Tuple[float, float]], List[float]]:
604
# Even if tell_pending is False we add the point such that _fill_stack
605
# will return new points, later we remove these points if needed.
606
points = list(self._stack.keys())
adaptive/learner/learnerND.py
@@ -31,7 +31,7 @@ def to_list(inp: float) -> List[float]:
31
return [inp]
32
33
34
-def volume(simplex: List[Tuple[float, float]], ys: None = None) -> float:
+def volume(simplex: Simplex, ys: None = None) -> float:
35
# Notice the parameter ys is there so you can use this volume method as
36
# as loss function
37
matrix = np.subtract(simplex[:-1], simplex[-1], dtype=float)
@@ -69,7 +69,7 @@ def uniform_loss(simplex: np.ndarray, values: np.ndarray, value_scale: float) ->
69
return volume(simplex)
70
71
72
-def std_loss(simplex: np.ndarray, values: np.ndarray, value_scale: float) -> np.ndarray:
+def std_loss(simplex: Simplex, values: np.ndarray, value_scale: float) -> np.ndarray:
73
74
Computes the loss of the simplex based on the standard deviation.
75
adaptive/learner/triangulation.py
@@ -52,7 +52,7 @@ def fast_norm(v: Union[Tuple[float, ...], ndarray]) -> float:
52
53
def fast_2d_point_in_simplex(
54
point: Point, simplex: SimplexPoints, eps: float = 1e-8
55
-) -> Union[np.bool_, bool]:
+) -> Union[bool, np.bool_]:
56
(p0x, p0y), (p1x, p1y), (p2x, p2y) = simplex
57
px, py = point
58
@@ -68,7 +68,7 @@ def fast_2d_point_in_simplex(
68
def point_in_simplex(
if len(point) == 2:
return fast_2d_point_in_simplex(point, simplex, eps)
@@ -417,7 +417,7 @@ def get_reduced_simplex(
417
418
419
self, point: Point, simplex: Simplex, eps: float = 1e-8
420
- ) -> Union[np.bool_, bool]:
+ ) -> Union[bool, np.bool_]:
421
vertices = self.get_vertices(simplex)
422
return point_in_simplex(point, vertices, eps)
423
adaptive/utils.py
@@ -3,9 +3,9 @@
3
import gzip
4
import os
5
import pickle
6
-from contextlib import contextmanager
+from contextlib import _GeneratorContextManager, contextmanager
7
from itertools import product
8
-from typing import Any, Callable, Dict, Iterator, Sequence
+from typing import Any, Callable, Dict, Sequence
9
10
from atomicwrites import AtomicWriter
11
@@ -17,7 +17,7 @@ def named_product(**items: Dict[str, Sequence[Any]]):
17
18
19
@contextmanager
20
-def restore(*learners) -> Iterator[None]:
+def restore(*learners) -> _GeneratorContextManager:
21
states = [learner.__getstate__() for learner in learners]
22
try:
23
yield
0 commit comments